diff --git a/.aztec-sync-commit b/.aztec-sync-commit index bdaabc69727..0e17b00d0d2 100644 --- a/.aztec-sync-commit +++ b/.aztec-sync-commit @@ -1 +1 @@ -12af650f0d27c37dca06bb329bf76a5574534d78 +ed815a3713fc311056a8bd0a616945f12d9be2a8 diff --git a/Cargo.lock b/Cargo.lock index cacfc06327d..d2afe025c69 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -451,6 +451,7 @@ dependencies = [ "noirc_errors", "noirc_frontend", "regex", + "tiny-keccak", ] [[package]] @@ -1186,6 +1187,19 @@ dependencies = [ "syn 2.0.64", ] +[[package]] +name = "dashmap" +version = "5.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "978747c1d849a7d2ee5e8adc0159961c48fb7e5db2f06af6723b80123bb53856" +dependencies = [ + "cfg-if 1.0.0", + "hashbrown 0.14.5", + "lock_api", + "once_cell", + "parking_lot_core 0.9.8", +] + [[package]] name = "debugid" version = "0.8.0" @@ -1382,6 +1396,15 @@ version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c34f04666d835ff5d62e058c3995147c06f42fe86ff053337632bca83e42702d" +[[package]] +name = "env_logger" +version = "0.10.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4cd405aab171cb85d6735e5c8d9db038c17d3ca007a4d2c25f337935c3d90580" +dependencies = [ + "log", +] + [[package]] name = "equivalent" version = "1.0.1" @@ -2017,12 +2040,17 @@ dependencies = [ [[package]] name = "inferno" -version = "0.11.15" +version = "0.11.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2fb7c1b80a1dfa604bb4a649a5c5aeef3d913f7c520cb42b40e534e8a61bcdfc" +checksum = "321f0f839cd44a4686e9504b0a62b4d69a50b62072144c71c68f5873c167b8d9" dependencies = [ "ahash 0.8.11", - "indexmap 1.9.3", + "clap", + "crossbeam-channel", + "crossbeam-utils", + "dashmap", + "env_logger", + "indexmap 2.2.6", "is-terminal", "itoa", "log", @@ -2714,6 +2742,30 @@ dependencies = [ "wasm-bindgen", ] +[[package]] +name = "noir_profiler" +version = "0.31.0" +dependencies = [ + "acir", + "clap", + "codespan-reporting", + "color-eyre", + "const_format", + "fm", + "im", + "inferno", + "nargo", + "noirc_abi", + "noirc_artifacts", + "noirc_driver", + "noirc_errors", + "serde", + "serde_json", + "tempfile", + "tracing-appender", + "tracing-subscriber", +] + [[package]] name = "noir_wasm" version = "0.31.0" @@ -2836,6 +2888,7 @@ name = "noirc_evaluator" version = "0.31.0" dependencies = [ "acvm", + "bn254_blackbox_solver", "chrono", "fxhash", "im", diff --git a/Cargo.toml b/Cargo.toml index 129661d157d..8cd5defa121 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -24,6 +24,7 @@ members = [ "tooling/noirc_abi", "tooling/noirc_abi_wasm", "tooling/acvm_cli", + "tooling/profiler", # ACVM "acvm-repo/acir_field", "acvm-repo/acir", @@ -36,7 +37,7 @@ members = [ # Utility crates "utils/iter-extended", ] -default-members = ["tooling/nargo_cli", "tooling/acvm_cli"] +default-members = ["tooling/nargo_cli", "tooling/acvm_cli", "tooling/profiler"] resolver = "2" [workspace.package] @@ -84,7 +85,7 @@ acvm_cli = { path = "tooling/acvm_cli" } # Arkworks ark-bn254 = { version = "^0.4.0", default-features = false, features = ["curve"] } ark-bls12-381 = { version = "^0.4.0", default-features = false, features = ["curve"] } -grumpkin = { version = "0.1.0", package = "noir_grumpkin", features = ["std"] } +grumpkin = { version = "0.1.0", package = "noir_grumpkin", features = ["std"] } ark-ec = { version = "^0.4.0", default-features = false } ark-ff = { version = "^0.4.0", default-features = false } ark-std = { version = "^0.4.0", default-features = false } @@ -143,11 +144,11 @@ similar-asserts = "1.5.0" tempfile = "3.6.0" jsonrpc = { version = "0.16.0", features = ["minreq_http"] } flate2 = "1.0.24" +color-eyre = "0.6.2" rand = "0.8.5" proptest = "1.2.0" proptest-derive = "0.4.0" - im = { version = "15.1", features = ["serde"] } tracing = "0.1.40" tracing-web = "0.1.3" diff --git a/acvm-repo/acir/src/circuit/black_box_functions.rs b/acvm-repo/acir/src/circuit/black_box_functions.rs index 419c0266b69..fb7d9eb584c 100644 --- a/acvm-repo/acir/src/circuit/black_box_functions.rs +++ b/acvm-repo/acir/src/circuit/black_box_functions.rs @@ -82,43 +82,10 @@ pub enum BlackBoxFunc { /// /// [grumpkin]: https://hackmd.io/@aztec-network/ByzgNxBfd#2-Grumpkin---A-curve-on-top-of-BN-254-for-SNARK-efficient-group-operations SchnorrVerify, - - /// Calculates a Pedersen commitment to the inputs. - /// - /// Computes a Pedersen commitment of the inputs using generators of the - /// embedded curve - /// - input: vector of (witness, 254) - /// - output: 2 witnesses representing the x,y coordinates of the resulting - /// Grumpkin point - /// - domain separator: a constant public value (a field element) that you - /// can use so that the commitment also depends on the domain separator. - /// Noir uses 0 as domain separator. - /// - /// The backend should handle proper conversion between the inputs being ACIR - /// field elements and the scalar field of the embedded curve. In the case of - /// Aztec's Barretenberg, the latter is bigger than the ACIR field so it is - /// straightforward. The Pedersen generators are managed by the proving - /// system. - /// - /// The commitment is expected to be additively homomorphic + /// Will be deprecated PedersenCommitment, - - /// Calculates a Pedersen hash to the inputs. - /// - /// Computes a Pedersen hash of the inputs and their number, using - /// generators of the embedded curve - /// - input: vector of (witness, 254) - /// - output: the x-coordinate of the pedersen commitment of the - /// 'prepended input' (see below) - /// - domain separator: a constant public value (a field element) that you - /// can use so that the hash also depends on the domain separator. Noir - /// uses 0 as domain separator. - /// - /// In Barretenberg, PedersenHash is doing the same as PedersenCommitment, - /// except that it prepends the inputs with their length. This is expected - /// to not be additively homomorphic. + /// Will be deprecated PedersenHash, - /// Verifies a ECDSA signature over the secp256k1 curve. /// - inputs: /// - x coordinate of public key as 32 bytes @@ -242,8 +209,6 @@ impl BlackBoxFunc { BlackBoxFunc::SchnorrVerify => "schnorr_verify", BlackBoxFunc::Blake2s => "blake2s", BlackBoxFunc::Blake3 => "blake3", - BlackBoxFunc::PedersenCommitment => "pedersen_commitment", - BlackBoxFunc::PedersenHash => "pedersen_hash", BlackBoxFunc::EcdsaSecp256k1 => "ecdsa_secp256k1", BlackBoxFunc::MultiScalarMul => "multi_scalar_mul", BlackBoxFunc::EmbeddedCurveAdd => "embedded_curve_add", @@ -262,6 +227,8 @@ impl BlackBoxFunc { BlackBoxFunc::BigIntToLeBytes => "bigint_to_le_bytes", BlackBoxFunc::Poseidon2Permutation => "poseidon2_permutation", BlackBoxFunc::Sha256Compression => "sha256_compression", + BlackBoxFunc::PedersenCommitment => "pedersen_commitment", + BlackBoxFunc::PedersenHash => "pedersen_hash", } } @@ -272,8 +239,6 @@ impl BlackBoxFunc { "schnorr_verify" => Some(BlackBoxFunc::SchnorrVerify), "blake2s" => Some(BlackBoxFunc::Blake2s), "blake3" => Some(BlackBoxFunc::Blake3), - "pedersen_commitment" => Some(BlackBoxFunc::PedersenCommitment), - "pedersen_hash" => Some(BlackBoxFunc::PedersenHash), "ecdsa_secp256k1" => Some(BlackBoxFunc::EcdsaSecp256k1), "ecdsa_secp256r1" => Some(BlackBoxFunc::EcdsaSecp256r1), "multi_scalar_mul" => Some(BlackBoxFunc::MultiScalarMul), @@ -292,6 +257,8 @@ impl BlackBoxFunc { "bigint_to_le_bytes" => Some(BlackBoxFunc::BigIntToLeBytes), "poseidon2_permutation" => Some(BlackBoxFunc::Poseidon2Permutation), "sha256_compression" => Some(BlackBoxFunc::Sha256Compression), + "pedersen_commitment" => Some(BlackBoxFunc::PedersenCommitment), + "pedersen_hash" => Some(BlackBoxFunc::PedersenHash), _ => None, } } diff --git a/acvm-repo/acir/src/circuit/opcodes/black_box_function_call.rs b/acvm-repo/acir/src/circuit/opcodes/black_box_function_call.rs index 362e9ba5936..b8be81fcdef 100644 --- a/acvm-repo/acir/src/circuit/opcodes/black_box_function_call.rs +++ b/acvm-repo/acir/src/circuit/opcodes/black_box_function_call.rs @@ -54,11 +54,13 @@ pub enum BlackBoxFuncCall { message: Vec, output: Witness, }, + /// Will be deprecated PedersenCommitment { inputs: Vec, domain_separator: u32, outputs: (Witness, Witness), }, + /// Will be deprecated PedersenHash { inputs: Vec, domain_separator: u32, @@ -189,8 +191,6 @@ impl BlackBoxFuncCall { BlackBoxFuncCall::Blake2s { .. } => BlackBoxFunc::Blake2s, BlackBoxFuncCall::Blake3 { .. } => BlackBoxFunc::Blake3, BlackBoxFuncCall::SchnorrVerify { .. } => BlackBoxFunc::SchnorrVerify, - BlackBoxFuncCall::PedersenCommitment { .. } => BlackBoxFunc::PedersenCommitment, - BlackBoxFuncCall::PedersenHash { .. } => BlackBoxFunc::PedersenHash, BlackBoxFuncCall::EcdsaSecp256k1 { .. } => BlackBoxFunc::EcdsaSecp256k1, BlackBoxFuncCall::EcdsaSecp256r1 { .. } => BlackBoxFunc::EcdsaSecp256r1, BlackBoxFuncCall::MultiScalarMul { .. } => BlackBoxFunc::MultiScalarMul, @@ -206,6 +206,8 @@ impl BlackBoxFuncCall { BlackBoxFuncCall::BigIntToLeBytes { .. } => BlackBoxFunc::BigIntToLeBytes, BlackBoxFuncCall::Poseidon2Permutation { .. } => BlackBoxFunc::Poseidon2Permutation, BlackBoxFuncCall::Sha256Compression { .. } => BlackBoxFunc::Sha256Compression, + BlackBoxFuncCall::PedersenCommitment { .. } => BlackBoxFunc::PedersenCommitment, + BlackBoxFuncCall::PedersenHash { .. } => BlackBoxFunc::PedersenHash, } } @@ -219,9 +221,9 @@ impl BlackBoxFuncCall { | BlackBoxFuncCall::SHA256 { inputs, .. } | BlackBoxFuncCall::Blake2s { inputs, .. } | BlackBoxFuncCall::Blake3 { inputs, .. } + | BlackBoxFuncCall::BigIntFromLeBytes { inputs, .. } | BlackBoxFuncCall::PedersenCommitment { inputs, .. } | BlackBoxFuncCall::PedersenHash { inputs, .. } - | BlackBoxFuncCall::BigIntFromLeBytes { inputs, .. } | BlackBoxFuncCall::Poseidon2Permutation { inputs, .. } => inputs.to_vec(), BlackBoxFuncCall::Keccakf1600 { inputs, .. } => inputs.to_vec(), @@ -421,6 +423,14 @@ fn get_outputs_string(outputs: &[Witness]) -> String { impl std::fmt::Display for BlackBoxFuncCall { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + BlackBoxFuncCall::PedersenCommitment { .. } => { + return write!(f, "BLACKBOX::Deprecated") + } + BlackBoxFuncCall::PedersenHash { .. } => return write!(f, "BLACKBOX::Deprecated"), + _ => (), + } + let uppercase_name = self.name().to_uppercase(); write!(f, "BLACKBOX::{uppercase_name} ")?; // INPUTS @@ -440,13 +450,7 @@ impl std::fmt::Display for BlackBoxFuncCall { write!(f, "]")?; - // SPECIFIC PARAMETERS - match self { - BlackBoxFuncCall::PedersenCommitment { domain_separator, .. } => { - write!(f, " domain_separator: {domain_separator}") - } - _ => write!(f, ""), - } + write!(f, "") } } diff --git a/acvm-repo/acir/tests/test_program_serialization.rs b/acvm-repo/acir/tests/test_program_serialization.rs index dfcb1a8bb86..84a9aa719f2 100644 --- a/acvm-repo/acir/tests/test_program_serialization.rs +++ b/acvm-repo/acir/tests/test_program_serialization.rs @@ -100,33 +100,6 @@ fn multi_scalar_mul_circuit() { assert_eq!(bytes, expected_serialization) } -#[test] -fn pedersen_circuit() { - let pedersen = Opcode::BlackBoxFuncCall(BlackBoxFuncCall::PedersenCommitment { - inputs: vec![FunctionInput { witness: Witness(1), num_bits: FieldElement::max_num_bits() }], - outputs: (Witness(2), Witness(3)), - domain_separator: 0, - }); - - let circuit: Circuit = Circuit { - current_witness_index: 4, - opcodes: vec![pedersen], - private_parameters: BTreeSet::from([Witness(1)]), - return_values: PublicInputs(BTreeSet::from_iter(vec![Witness(2), Witness(3)])), - ..Circuit::default() - }; - let program = Program { functions: vec![circuit], unconstrained_functions: vec![] }; - - let bytes = Program::serialize_program(&program); - - let expected_serialization: Vec = vec![ - 31, 139, 8, 0, 0, 0, 0, 0, 0, 255, 93, 74, 73, 10, 0, 0, 4, 180, 29, 252, 255, 193, 66, 40, - 76, 77, 179, 34, 20, 36, 136, 237, 83, 245, 101, 107, 79, 65, 94, 253, 214, 217, 255, 239, - 192, 1, 43, 124, 181, 238, 113, 0, 0, 0, - ]; - assert_eq!(bytes, expected_serialization) -} - #[test] fn schnorr_verify_circuit() { let public_key_x = diff --git a/acvm-repo/acvm/src/compiler/transformers/csat.rs b/acvm-repo/acvm/src/compiler/transformers/csat.rs index f2a3cc2c84e..19cc18ca7f3 100644 --- a/acvm-repo/acvm/src/compiler/transformers/csat.rs +++ b/acvm-repo/acvm/src/compiler/transformers/csat.rs @@ -432,7 +432,7 @@ fn fits_in_one_identity(expr: &Expression, width: usize) -> boo return true; } - // We now know that we have a single mul term. We also know that the mul term must match up with two other terms + // We now know that we have a single mul term. We also know that the mul term must match up with at least one of the other terms // A polynomial whose mul terms are non zero which do not match up with two terms in the fan-in cannot fit into one opcode // An example of this is: Axy + Bx + Cy + ... // Notice how the bivariate monomial xy has two univariate monomials with their respective coefficients @@ -461,7 +461,25 @@ fn fits_in_one_identity(expr: &Expression, width: usize) -> boo } } - found_x & found_y + // If the multiplication is a squaring then we must assign the two witnesses to separate wires and so we + // can never get a zero contribution to the width. + let multiplication_is_squaring = mul_term.1 == mul_term.2; + + let mul_term_width_contribution = if !multiplication_is_squaring && (found_x & found_y) { + // Both witnesses involved in the multiplication exist elsewhere in the expression. + // They both do not contribute to the width of the expression as this would be double-counting + // due to their appearance in the linear terms. + 0 + } else if found_x || found_y { + // One of the witnesses involved in the multiplication exists elsewhere in the expression. + // The multiplication then only contributes 1 new witness to the width. + 1 + } else { + // Worst case scenario, the multiplication is using completely unique witnesses so has a contribution of 2. + 2 + }; + + mul_term_width_contribution + expr.linear_combinations.len() <= width } #[cfg(test)] @@ -573,4 +591,20 @@ mod tests { let contains_b = got_optimized_opcode_a.linear_combinations.iter().any(|(_, w)| *w == b); assert!(contains_b); } + + #[test] + fn recognize_expr_with_single_shared_witness_which_fits_in_single_identity() { + // Regression test for an expression which Zac found which should have been preserved but + // was being split into two expressions. + let expr = Expression { + mul_terms: vec![(-FieldElement::from(555u128), Witness(8), Witness(10))], + linear_combinations: vec![ + (FieldElement::one(), Witness(10)), + (FieldElement::one(), Witness(11)), + (-FieldElement::one(), Witness(13)), + ], + q_c: FieldElement::zero(), + }; + assert!(fits_in_one_identity(&expr, 4)); + } } diff --git a/acvm-repo/acvm/src/pwg/blackbox/mod.rs b/acvm-repo/acvm/src/pwg/blackbox/mod.rs index 8bda9221d8a..0c65759ebcd 100644 --- a/acvm-repo/acvm/src/pwg/blackbox/mod.rs +++ b/acvm-repo/acvm/src/pwg/blackbox/mod.rs @@ -7,7 +7,7 @@ use acvm_blackbox_solver::{blake2s, blake3, keccak256, keccakf1600, sha256}; use self::{ aes128::solve_aes128_encryption_opcode, bigint::AcvmBigIntSolver, - hash::solve_poseidon2_permutation_opcode, pedersen::pedersen_hash, + hash::solve_poseidon2_permutation_opcode, }; use super::{insert_value, OpcodeNotSolvable, OpcodeResolutionError}; @@ -27,7 +27,7 @@ use embedded_curve_ops::{embedded_curve_add, multi_scalar_mul}; // Hash functions should eventually be exposed for external consumers. use hash::{solve_generic_256_hash_opcode, solve_sha_256_permutation_opcode}; use logic::{and, xor}; -use pedersen::pedersen; +use pedersen::{pedersen, pedersen_hash}; pub(crate) use range::solve_range_opcode; use signature::{ ecdsa::{secp256k1_prehashed, secp256r1_prehashed}, diff --git a/acvm-repo/acvm_js/test/browser/execute_circuit.test.ts b/acvm-repo/acvm_js/test/browser/execute_circuit.test.ts index cfd5523b79f..aaa82f8f1e5 100644 --- a/acvm-repo/acvm_js/test/browser/execute_circuit.test.ts +++ b/acvm-repo/acvm_js/test/browser/execute_circuit.test.ts @@ -75,16 +75,6 @@ it('successfully processes complex brillig foreign call opcodes', async () => { expect(solved_witness).to.be.deep.eq(expectedWitnessMap); }); -it('successfully executes a Pedersen opcode', async function () { - const { bytecode, initialWitnessMap, expectedWitnessMap } = await import('../shared/pedersen'); - - const solvedWitness: WitnessMap = await executeCircuit(bytecode, initialWitnessMap, () => { - throw Error('unexpected oracle'); - }); - - expect(solvedWitness).to.be.deep.eq(expectedWitnessMap); -}); - it('successfully executes a MultiScalarMul opcode', async () => { const { bytecode, initialWitnessMap, expectedWitnessMap } = await import('../shared/multi_scalar_mul'); diff --git a/acvm-repo/acvm_js/test/node/execute_circuit.test.ts b/acvm-repo/acvm_js/test/node/execute_circuit.test.ts index 1e3517e8814..120ad0fa738 100644 --- a/acvm-repo/acvm_js/test/node/execute_circuit.test.ts +++ b/acvm-repo/acvm_js/test/node/execute_circuit.test.ts @@ -76,17 +76,6 @@ it('successfully processes complex brillig foreign call opcodes', async () => { expect(solved_witness).to.be.deep.eq(expectedWitnessMap); }); -it('successfully executes a Pedersen opcode', async function () { - this.timeout(10000); - const { bytecode, initialWitnessMap, expectedWitnessMap } = await import('../shared/pedersen'); - - const solvedWitness: WitnessMap = await executeCircuit(bytecode, initialWitnessMap, () => { - throw Error('unexpected oracle'); - }); - - expect(solvedWitness).to.be.deep.eq(expectedWitnessMap); -}); - it('successfully executes a MultiScalarMul opcode', async () => { const { bytecode, initialWitnessMap, expectedWitnessMap } = await import('../shared/multi_scalar_mul'); @@ -117,25 +106,6 @@ it('successfully executes a MemoryOp opcode', async () => { expect(solvedWitness).to.be.deep.eq(expectedWitnessMap); }); -it('successfully executes 500 pedersen circuits', async function () { - this.timeout(100000); - - // Pedersen opcodes used to have a large upfront cost due to generator calculation - // so we'd need to pass around the blackbox solver in JS to avoid redoing this work. - // - // This test now shows that we don't need to do this anymore without a performance regression. - - const { bytecode, initialWitnessMap, expectedWitnessMap } = await import('../shared/pedersen'); - - for (let i = 0; i < 500; i++) { - const solvedWitness = await executeCircuit(bytecode, initialWitnessMap, () => { - throw Error('unexpected oracle'); - }); - - expect(solvedWitness).to.be.deep.eq(expectedWitnessMap); - } -}); - /** * Below are all the same tests as above but using `executeProgram` * TODO: also add a couple tests for executing multiple circuits diff --git a/acvm-repo/acvm_js/test/shared/pedersen.ts b/acvm-repo/acvm_js/test/shared/pedersen.ts deleted file mode 100644 index 6e3ec403d65..00000000000 --- a/acvm-repo/acvm_js/test/shared/pedersen.ts +++ /dev/null @@ -1,13 +0,0 @@ -// See `pedersen_circuit` integration test in `acir/tests/test_program_serialization.rs`. -export const bytecode = Uint8Array.from([ - 31, 139, 8, 0, 0, 0, 0, 0, 0, 255, 93, 74, 73, 10, 0, 0, 4, 180, 29, 252, 255, 193, 66, 40, 76, 77, 179, 34, 20, 36, - 136, 237, 83, 245, 101, 107, 79, 65, 94, 253, 214, 217, 255, 239, 192, 1, 43, 124, 181, 238, 113, 0, 0, 0, -]); - -export const initialWitnessMap = new Map([[1, '0x0000000000000000000000000000000000000000000000000000000000000001']]); - -export const expectedWitnessMap = new Map([ - [1, '0x0000000000000000000000000000000000000000000000000000000000000001'], - [2, '0x083e7911d835097629f0067531fc15cafd79a89beecb39903f69572c636f4a5a'], - [3, '0x1a7f5efaad7f315c25a918f30cc8d7333fccab7ad7c90f14de81bcc528f9935d'], -]); diff --git a/acvm-repo/blackbox_solver/src/curve_specific_solver.rs b/acvm-repo/blackbox_solver/src/curve_specific_solver.rs index 0ee3a252840..f729a5033fb 100644 --- a/acvm-repo/blackbox_solver/src/curve_specific_solver.rs +++ b/acvm-repo/blackbox_solver/src/curve_specific_solver.rs @@ -81,6 +81,7 @@ impl BlackBoxFunctionSolver for StubbedBlackBoxSolver { ) -> Result { Err(Self::fail(BlackBoxFunc::PedersenHash)) } + fn multi_scalar_mul( &self, _points: &[F], diff --git a/acvm-repo/bn254_blackbox_solver/benches/criterion.rs b/acvm-repo/bn254_blackbox_solver/benches/criterion.rs index cbcb75a3291..e7917fa1adc 100644 --- a/acvm-repo/bn254_blackbox_solver/benches/criterion.rs +++ b/acvm-repo/bn254_blackbox_solver/benches/criterion.rs @@ -13,22 +13,6 @@ fn bench_poseidon2(c: &mut Criterion) { c.bench_function("poseidon2", |b| b.iter(|| poseidon2_permutation(black_box(&inputs), 4))); } -fn bench_pedersen_commitment(c: &mut Criterion) { - let inputs = [FieldElement::one(); 2]; - - c.bench_function("pedersen_commitment", |b| { - b.iter(|| Bn254BlackBoxSolver.pedersen_commitment(black_box(&inputs), 0)) - }); -} - -fn bench_pedersen_hash(c: &mut Criterion) { - let inputs = [FieldElement::one(); 2]; - - c.bench_function("pedersen_hash", |b| { - b.iter(|| Bn254BlackBoxSolver.pedersen_hash(black_box(&inputs), 0)) - }); -} - fn bench_schnorr_verify(c: &mut Criterion) { let pub_key_x = FieldElement::from_hex( "0x04b260954662e97f00cab9adb773a259097f7a274b83b113532bce27fa3fb96a", @@ -62,7 +46,7 @@ fn bench_schnorr_verify(c: &mut Criterion) { criterion_group!( name = benches; config = Criterion::default().sample_size(40).measurement_time(Duration::from_secs(20)).with_profiler(PProfProfiler::new(100, Output::Flamegraph(None))); - targets = bench_poseidon2, bench_pedersen_commitment, bench_pedersen_hash, bench_schnorr_verify + targets = bench_poseidon2, bench_schnorr_verify ); criterion_main!(benches); diff --git a/acvm-repo/bn254_blackbox_solver/src/generator/generators.rs b/acvm-repo/bn254_blackbox_solver/src/generator/generators.rs index f89d582d167..bb51426b33b 100644 --- a/acvm-repo/bn254_blackbox_solver/src/generator/generators.rs +++ b/acvm-repo/bn254_blackbox_solver/src/generator/generators.rs @@ -38,7 +38,7 @@ fn default_generators() -> &'static [Affine; NUM_DEFAULT_GEN /// index-addressable generators. /// /// [hash_to_curve]: super::hash_to_curve::hash_to_curve -pub(crate) fn derive_generators( +pub fn derive_generators( domain_separator_bytes: &[u8], num_generators: u32, starting_index: u32, diff --git a/acvm-repo/bn254_blackbox_solver/src/lib.rs b/acvm-repo/bn254_blackbox_solver/src/lib.rs index 08e0fb66a6d..6897116e90e 100644 --- a/acvm-repo/bn254_blackbox_solver/src/lib.rs +++ b/acvm-repo/bn254_blackbox_solver/src/lib.rs @@ -12,6 +12,7 @@ mod schnorr; use ark_ec::AffineRepr; pub use embedded_curve_ops::{embedded_curve_add, multi_scalar_mul}; +pub use generator::generators::derive_generators; pub use poseidon2::poseidon2_permutation; // Temporary hack, this ensure that we always use a bn254 field here @@ -47,11 +48,13 @@ impl BlackBoxFunctionSolver for Bn254BlackBoxSolver { ) -> Result<(FieldElement, FieldElement), BlackBoxResolutionError> { let inputs: Vec = inputs.iter().map(|input| input.into_repr()).collect(); let result = pedersen::commitment::commit_native_with_index(&inputs, domain_separator); - let res_x = - FieldElement::from_repr(*result.x().expect("should not commit to point at infinity")); - let res_y = - FieldElement::from_repr(*result.y().expect("should not commit to point at infinity")); - Ok((res_x, res_y)) + let result = if let Some((x, y)) = result.xy() { + (FieldElement::from_repr(*x), FieldElement::from_repr(*y)) + } else { + (FieldElement::from(0_u128), FieldElement::from(0_u128)) + }; + + Ok(result) } fn pedersen_hash( diff --git a/acvm-repo/brillig/src/black_box.rs b/acvm-repo/brillig/src/black_box.rs index 3887092a8c2..60e4af11ea2 100644 --- a/acvm-repo/brillig/src/black_box.rs +++ b/acvm-repo/brillig/src/black_box.rs @@ -61,13 +61,13 @@ pub enum BlackBoxOp { signature: HeapVector, result: MemoryAddress, }, - /// Calculates a Pedersen commitment to the inputs. + /// Will be deprecated PedersenCommitment { inputs: HeapVector, domain_separator: MemoryAddress, output: HeapArray, }, - /// Calculates a Pedersen hash to the inputs. + /// Will be deprecated PedersenHash { inputs: HeapVector, domain_separator: MemoryAddress, diff --git a/acvm-repo/brillig_vm/src/black_box.rs b/acvm-repo/brillig_vm/src/black_box.rs index 2053f4e7c86..36d045efabf 100644 --- a/acvm-repo/brillig_vm/src/black_box.rs +++ b/acvm-repo/brillig_vm/src/black_box.rs @@ -241,7 +241,7 @@ pub(crate) fn evaluate_black_box memory.read(*domain_separator).try_into().map_err(|_| { BlackBoxResolutionError::Failed( BlackBoxFunc::PedersenCommitment, - "Invalid signature length".to_string(), + "Invalid separator length".to_string(), ) })?; let (x, y) = solver.pedersen_commitment(&inputs, domain_separator)?; @@ -260,7 +260,7 @@ pub(crate) fn evaluate_black_box memory.read(*domain_separator).try_into().map_err(|_| { BlackBoxResolutionError::Failed( BlackBoxFunc::PedersenCommitment, - "Invalid signature length".to_string(), + "Invalid separator length".to_string(), ) })?; let hash = solver.pedersen_hash(&inputs, domain_separator)?; @@ -392,8 +392,6 @@ fn black_box_function_from_op(op: &BlackBoxOp) -> BlackBoxFunc { BlackBoxOp::EcdsaSecp256k1 { .. } => BlackBoxFunc::EcdsaSecp256k1, BlackBoxOp::EcdsaSecp256r1 { .. } => BlackBoxFunc::EcdsaSecp256r1, BlackBoxOp::SchnorrVerify { .. } => BlackBoxFunc::SchnorrVerify, - BlackBoxOp::PedersenCommitment { .. } => BlackBoxFunc::PedersenCommitment, - BlackBoxOp::PedersenHash { .. } => BlackBoxFunc::PedersenHash, BlackBoxOp::MultiScalarMul { .. } => BlackBoxFunc::MultiScalarMul, BlackBoxOp::EmbeddedCurveAdd { .. } => BlackBoxFunc::EmbeddedCurveAdd, BlackBoxOp::BigIntAdd { .. } => BlackBoxFunc::BigIntAdd, @@ -405,6 +403,8 @@ fn black_box_function_from_op(op: &BlackBoxOp) -> BlackBoxFunc { BlackBoxOp::Poseidon2Permutation { .. } => BlackBoxFunc::Poseidon2Permutation, BlackBoxOp::Sha256Compression { .. } => BlackBoxFunc::Sha256Compression, BlackBoxOp::ToRadix { .. } => unreachable!("ToRadix is not an ACIR BlackBoxFunc"), + BlackBoxOp::PedersenCommitment { .. } => BlackBoxFunc::PedersenCommitment, + BlackBoxOp::PedersenHash { .. } => BlackBoxFunc::PedersenHash, } } diff --git a/acvm-repo/brillig_vm/src/lib.rs b/acvm-repo/brillig_vm/src/lib.rs index 862162ddccf..01f45bf653c 100644 --- a/acvm-repo/brillig_vm/src/lib.rs +++ b/acvm-repo/brillig_vm/src/lib.rs @@ -500,9 +500,14 @@ impl<'a, F: AcirField, B: BlackBoxFunctionSolver> VM<'a, F, B> { match output { ForeignCallParam::Array(values) => { if values.len() != *size { - return Err("Foreign call result array doesn't match expected size".to_string()); + // foreign call returning flattened values into a nested type, so the sizes do not match + let destination = self.memory.read_ref(*pointer_index); + let return_type = value_type; + let mut flatten_values_idx = 0; //index of values read from flatten_values + self.write_slice_of_values_to_memory(destination, &output.fields(), &mut flatten_values_idx, return_type)?; + } else { + self.write_values_to_memory_slice(*pointer_index, values, value_types)?; } - self.write_values_to_memory_slice(*pointer_index, values, value_types)?; } _ => { return Err("Function result size does not match brillig bytecode size".to_string()); diff --git a/aztec_macros/Cargo.toml b/aztec_macros/Cargo.toml index ed70066af22..a99a654aeed 100644 --- a/aztec_macros/Cargo.toml +++ b/aztec_macros/Cargo.toml @@ -16,4 +16,4 @@ noirc_errors.workspace = true iter-extended.workspace = true convert_case = "0.6.0" regex = "1.10" - +tiny-keccak = { version = "2.0.0", features = ["keccak"] } diff --git a/aztec_macros/src/lib.rs b/aztec_macros/src/lib.rs index a36b7b17d09..580a132aa5a 100644 --- a/aztec_macros/src/lib.rs +++ b/aztec_macros/src/lib.rs @@ -7,7 +7,7 @@ use transforms::{ contract_interface::{ generate_contract_interface, stub_function, update_fn_signatures_in_contract_interface, }, - events::{generate_selector_impl, transform_events}, + events::{generate_event_impls, transform_event_abi}, functions::{ check_for_public_args, export_fn_abi, transform_function, transform_unconstrained, }, @@ -65,19 +65,14 @@ fn transform( // Usage -> mut ast -> aztec_library::transform(&mut ast) // Covers all functions in the ast for submodule in ast.submodules.iter_mut().filter(|submodule| submodule.is_contract) { - if transform_module( - crate_id, - &file_id, - context, - &mut submodule.contents, - submodule.name.0.contents.as_str(), - ) - .map_err(|err| (err.into(), file_id))? + if transform_module(&file_id, &mut submodule.contents, submodule.name.0.contents.as_str()) + .map_err(|err| (err.into(), file_id))? { check_for_aztec_dependency(crate_id, context)?; } } + generate_event_impls(&mut ast).map_err(|err| (err.into(), file_id))?; generate_note_interface_impl(&mut ast).map_err(|err| (err.into(), file_id))?; Ok(ast) @@ -87,9 +82,7 @@ fn transform( /// For annotated functions it calls the `transform` function which will perform the required transformations. /// Returns true if an annotated node is found, false otherwise fn transform_module( - crate_id: &CrateId, file_id: &FileId, - context: &HirContext, module: &mut SortedModule, module_name: &str, ) -> Result { @@ -106,19 +99,7 @@ fn transform_module( if !check_for_storage_implementation(module, storage_struct_name) { generate_storage_implementation(module, storage_struct_name)?; } - // Make sure we're only generating the storage layout for the root crate - // In case we got a contract importing other contracts for their interface, we - // don't want to generate the storage layout for them - if crate_id == context.root_crate_id() { - generate_storage_layout(module, storage_struct_name.clone())?; - } - } - - for structure in module.types.iter_mut() { - if structure.attributes.iter().any(|attr| is_custom_attribute(attr, "aztec(event)")) { - module.impls.push(generate_selector_impl(structure)); - has_transformed_module = true; - } + generate_storage_layout(module, storage_struct_name.clone(), module_name)?; } let has_initializer = module.functions.iter().any(|func| { @@ -219,7 +200,7 @@ fn transform_module( }); } - generate_contract_interface(module, module_name, &stubs)?; + generate_contract_interface(module, module_name, &stubs, storage_defined)?; } Ok(has_transformed_module) @@ -235,7 +216,7 @@ fn transform_hir( context: &mut HirContext, ) -> Result<(), (AztecMacroError, FileId)> { if has_aztec_dependency(crate_id, context) { - transform_events(crate_id, context)?; + transform_event_abi(crate_id, context)?; inject_compute_note_hash_and_optionally_a_nullifier(crate_id, context)?; assign_storage_slots(crate_id, context)?; inject_note_exports(crate_id, context)?; diff --git a/aztec_macros/src/transforms/compute_note_hash_and_optionally_a_nullifier.rs b/aztec_macros/src/transforms/compute_note_hash_and_optionally_a_nullifier.rs index 40fde39a06f..30c0f63a2d4 100644 --- a/aztec_macros/src/transforms/compute_note_hash_and_optionally_a_nullifier.rs +++ b/aztec_macros/src/transforms/compute_note_hash_and_optionally_a_nullifier.rs @@ -176,7 +176,7 @@ fn generate_compute_note_hash_and_optionally_a_nullifier_source( format!( " unconstrained fn compute_note_hash_and_optionally_a_nullifier( - contract_address: aztec::protocol_types::address::AztecAddress, + contract_address: dep::aztec::protocol_types::address::AztecAddress, nonce: Field, storage_slot: Field, note_type_id: Field, @@ -194,7 +194,7 @@ fn generate_compute_note_hash_and_optionally_a_nullifier_source( let if_statements: Vec = note_types.iter().map(|note_type| format!( "if (note_type_id == {0}::get_note_type_id()) {{ - aztec::note::utils::compute_note_hash_and_optionally_a_nullifier({0}::deserialize_content, note_header, compute_nullifier, serialized_note) + dep::aztec::note::utils::compute_note_hash_and_optionally_a_nullifier({0}::deserialize_content, note_header, compute_nullifier, serialized_note) }}" , note_type)).collect(); @@ -208,14 +208,14 @@ fn generate_compute_note_hash_and_optionally_a_nullifier_source( format!( " unconstrained fn compute_note_hash_and_optionally_a_nullifier( - contract_address: aztec::protocol_types::address::AztecAddress, + contract_address: dep::aztec::protocol_types::address::AztecAddress, nonce: Field, storage_slot: Field, note_type_id: Field, compute_nullifier: bool, serialized_note: [Field; {}], ) -> pub [Field; 4] {{ - let note_header = aztec::prelude::NoteHeader::new(contract_address, nonce, storage_slot); + let note_header = dep::aztec::prelude::NoteHeader::new(contract_address, nonce, storage_slot); {} }}", diff --git a/aztec_macros/src/transforms/contract_interface.rs b/aztec_macros/src/transforms/contract_interface.rs index e6ac43ad2c4..ed451fdd998 100644 --- a/aztec_macros/src/transforms/contract_interface.rs +++ b/aztec_macros/src/transforms/contract_interface.rs @@ -30,8 +30,8 @@ use crate::utils::{ // for i in 0..third_arg.len() { // args_acc = args_acc.append(third_arg[i].serialize().as_slice()); // } -// let args_hash = aztec::hash::hash_args(args_acc); -// assert(args_hash == aztec::oracle::arguments::pack_arguments(args_acc)); +// let args_hash = dep::aztec::hash::hash_args(args_acc); +// assert(args_hash == dep::aztec::oracle::arguments::pack_arguments(args_acc)); // PublicCallInterface { // target_contract: self.target_contract, // selector: FunctionSelector::from_signature("SELECTOR_PLACEHOLDER"), @@ -57,18 +57,14 @@ pub fn stub_function(aztec_visibility: &str, func: &NoirFunction, is_static_call let fn_return_type: noirc_frontend::ast::UnresolvedType = func.return_type(); let fn_selector = format!( - "aztec::protocol_types::abis::function_selector::FunctionSelector::from_signature(\"{}\")", + "dep::aztec::protocol_types::abis::function_selector::FunctionSelector::from_signature(\"{}\")", SELECTOR_PLACEHOLDER ); let parameters = func.parameters(); let is_void = if matches!(fn_return_type.typ, UnresolvedTypeData::Unit) { "Void" } else { "" }; let is_static = if is_static_call { "Static" } else { "" }; - let return_type_hint = if is_void == "Void" { - "".to_string() - } else { - format!("<{}>", fn_return_type.typ.to_string().replace("plain::", "")) - }; + let return_type_hint = fn_return_type.typ.to_string().replace("plain::", ""); let call_args = parameters .iter() .map(|arg| { @@ -76,48 +72,103 @@ pub fn stub_function(aztec_visibility: &str, func: &NoirFunction, is_static_call match &arg.typ.typ { UnresolvedTypeData::Array(_, typ) => { format!( - "let hash_{0} = {0}.map(|x: {1}| x.serialize()); - for i in 0..{0}.len() {{ - args_acc = args_acc.append(hash_{0}[i].as_slice()); - }}\n", + "let serialized_{0} = {0}.map(|x: {1}| x.serialize()); + for i in 0..{0}.len() {{ + args_acc = args_acc.append(serialized_{0}[i].as_slice()); + }}\n", param_name, typ.typ.to_string().replace("plain::", "") ) } - _ => { + UnresolvedTypeData::Named(_, _, _) | UnresolvedTypeData::String(_) => { format!("args_acc = args_acc.append({}.serialize().as_slice());\n", param_name) } + _ => { + format!("args_acc = args_acc.append(&[{}.to_field()]);\n", param_name) + } } }) .collect::>() .join(""); - if aztec_visibility != "Public" { + + let param_types = if !parameters.is_empty() { + parameters + .iter() + .map(|param| param.pattern.name_ident().0.contents.clone()) + .collect::>() + .join(", ") + } else { + "".to_string() + }; + + let original = format!( + "| inputs: dep::aztec::context::inputs::{}ContextInputs | -> {} {{ + {}(inputs{}) + }}", + aztec_visibility, + if aztec_visibility == "Private" { + "dep::aztec::protocol_types::abis::private_circuit_public_inputs::PrivateCircuitPublicInputs".to_string() + } else { + return_type_hint.clone() + }, + fn_name, + if param_types.is_empty() { "".to_string() } else { format!(" ,{} ", param_types) } + ); + let arg_types = format!( + "({}{})", + parameters + .iter() + .map(|param| param.typ.typ.to_string().replace("plain::", "")) + .collect::>() + .join(","), + // In order to distinguish between a single element Tuple (Type,) and a single type with unnecessary parenthesis around it (Type), + // The latter gets simplified to Type, that is NOT a valid env + if parameters.len() == 1 { "," } else { "" } + ); + + let generics = if is_void == "Void" { + format!("{}>", arg_types) + } else { + format!("{}, {}>", return_type_hint, arg_types) + }; + + let fn_body = if aztec_visibility != "Public" { let args_hash = if !parameters.is_empty() { format!( "let mut args_acc: [Field] = &[]; {} - let args_hash = aztec::hash::hash_args(args_acc); - assert(args_hash == aztec::oracle::arguments::pack_arguments(args_acc));", + let args_hash = dep::aztec::hash::hash_args(args_acc); + assert(args_hash == dep::aztec::oracle::arguments::pack_arguments(args_acc));", call_args ) } else { - "let args_hash = 0;".to_string() + " + let mut args_acc: [Field] = &[]; + let args_hash = 0; + " + .to_string() }; - let fn_body = format!( - "{} - aztec::context::{}{}{}CallInterface {{ - target_contract: self.target_contract, - selector: {}, - args_hash, - }}", - args_hash, aztec_visibility, is_static, is_void, fn_selector, - ); format!( - "pub fn {}(self, {}) -> aztec::context::{}{}{}CallInterface{} {{ - {} - }}", - fn_name, fn_parameters, aztec_visibility, is_static, is_void, return_type_hint, fn_body + "{} + let selector = {}; + dep::aztec::context::{}{}{}CallInterface {{ + target_contract: self.target_contract, + selector, + name: \"{}\", + args_hash, + args: args_acc, + original: {}, + is_static: {} + }}", + args_hash, + fn_selector, + aztec_visibility, + is_static, + is_void, + fn_name, + original, + is_static_call ) } else { let args = format!( @@ -126,23 +177,42 @@ pub fn stub_function(aztec_visibility: &str, func: &NoirFunction, is_static_call ", call_args ); - let fn_body = format!( + format!( "{} - aztec::context::Public{}{}CallInterface {{ + let selector = {}; + dep::aztec::context::{}{}{}CallInterface {{ target_contract: self.target_contract, - selector: {}, + selector, + name: \"{}\", args: args_acc, - gas_opts: aztec::context::gas::GasOpts::default(), - }}", - args, is_static, is_void, fn_selector, - ); - format!( - "pub fn {}(self, {}) -> aztec::context::Public{}{}CallInterface{} {{ - {} + gas_opts: dep::aztec::context::gas::GasOpts::default(), + original: {}, + is_static: {} }}", - fn_name, fn_parameters, is_static, is_void, return_type_hint, fn_body + args, + fn_selector, + aztec_visibility, + is_static, + is_void, + fn_name, + original, + is_static_call ) - } + }; + + format!( + "pub fn {}(self, {}) -> dep::aztec::context::{}{}{}CallInterface<{},{} {{ + {} + }}", + fn_name, + fn_parameters, + aztec_visibility, + is_static, + is_void, + fn_name.len(), + generics, + fn_body + ) } // Generates the contract interface as a struct with an `at` function that holds the stubbed functions and provides @@ -152,32 +222,55 @@ pub fn generate_contract_interface( module: &mut SortedModule, module_name: &str, stubs: &[(String, Location)], + has_storage_layout: bool, ) -> Result<(), AztecMacroError> { + let storage_layout_getter = format!( + "#[contract_library_method] + pub fn storage() -> StorageLayout {{ + {}_STORAGE_LAYOUT + }}", + module_name, + ); let contract_interface = format!( " struct {0} {{ - target_contract: aztec::protocol_types::address::AztecAddress + target_contract: dep::aztec::protocol_types::address::AztecAddress }} impl {0} {{ {1} pub fn at( - target_contract: aztec::protocol_types::address::AztecAddress + target_contract: dep::aztec::protocol_types::address::AztecAddress ) -> Self {{ Self {{ target_contract }} }} + + pub fn interface() -> Self {{ + Self {{ target_contract: dep::aztec::protocol_types::address::AztecAddress::zero() }} + }} + + {2} }} #[contract_library_method] pub fn at( - target_contract: aztec::protocol_types::address::AztecAddress + target_contract: dep::aztec::protocol_types::address::AztecAddress ) -> {0} {{ {0} {{ target_contract }} }} + + #[contract_library_method] + pub fn interface() -> {0} {{ + {0} {{ target_contract: dep::aztec::protocol_types::address::AztecAddress::zero() }} + }} + + {3} ", module_name, stubs.iter().map(|(src, _)| src.to_owned()).collect::>().join("\n"), + if has_storage_layout { storage_layout_getter.clone() } else { "".to_string() }, + if has_storage_layout { format!("#[contract_library_method]\n{}", storage_layout_getter) } else { "".to_string() } ); let (contract_interface_ast, errors) = parse_program(&contract_interface); @@ -194,7 +287,7 @@ pub fn generate_contract_interface( .iter() .enumerate() .map(|(i, (method, orig_span))| { - if method.name() == "at" { + if method.name() == "at" || method.name() == "interface" || method.name() == "storage" { (method.clone(), *orig_span) } else { let (_, new_location) = stubs[i]; @@ -208,7 +301,9 @@ pub fn generate_contract_interface( module.types.push(contract_interface_ast.types.pop().unwrap()); module.impls.push(impl_with_locations); - module.functions.push(contract_interface_ast.functions.pop().unwrap()); + for function in contract_interface_ast.functions { + module.functions.push(function); + } Ok(()) } @@ -247,7 +342,7 @@ pub fn update_fn_signatures_in_contract_interface( let name = context.def_interner.function_name(func_id); let fn_parameters = &context.def_interner.function_meta(func_id).parameters.clone(); - if name == "at" { + if name == "at" || name == "interface" || name == "storage" { continue; } @@ -260,42 +355,29 @@ pub fn update_fn_signatures_in_contract_interface( .collect::>(), ); let hir_func = context.def_interner.function(func_id).block(&context.def_interner); - let call_interface_constructor_statement = context.def_interner.statement( - hir_func - .statements() - .last() - .ok_or((AztecMacroError::AztecDepNotFound, file_id))?, + + let function_selector_statement = context.def_interner.statement( + hir_func.statements().get(hir_func.statements().len() - 2).ok_or(( + AztecMacroError::CouldNotGenerateContractInterface { + secondary_message: Some( + "Function signature statement not found, invalid body length" + .to_string(), + ), + }, + file_id, + ))?, ); - let call_interface_constructor_expression = - match call_interface_constructor_statement { - HirStatement::Expression(expression_id) => { - match context.def_interner.expression(&expression_id) { - HirExpression::Constructor(hir_constructor_expression) => { - Ok(hir_constructor_expression) - } - _ => Err(( - AztecMacroError::CouldNotGenerateContractInterface { - secondary_message: Some( - "CallInterface constructor statement must be a constructor expression" - .to_string(), - ), - }, - file_id, - )), - } - } - _ => Err(( - AztecMacroError::CouldNotGenerateContractInterface { - secondary_message: Some( - "CallInterface constructor statement must be an expression" - .to_string(), - ), - }, - file_id, - )), - }?; - let (_, function_selector_expression_id) = - call_interface_constructor_expression.fields[1]; + let function_selector_expression_id = match function_selector_statement { + HirStatement::Let(let_statement) => Ok(let_statement.expression), + _ => Err(( + AztecMacroError::CouldNotGenerateContractInterface { + secondary_message: Some( + "Function selector statement must be an expression".to_string(), + ), + }, + file_id, + )), + }?; let function_selector_expression = context.def_interner.expression(&function_selector_expression_id); diff --git a/aztec_macros/src/transforms/events.rs b/aztec_macros/src/transforms/events.rs index 69cb6ddafc3..05861b96eb4 100644 --- a/aztec_macros/src/transforms/events.rs +++ b/aztec_macros/src/transforms/events.rs @@ -1,178 +1,333 @@ -use iter_extended::vecmap; -use noirc_errors::Span; -use noirc_frontend::ast::{ - ExpressionKind, FunctionDefinition, FunctionReturnType, ItemVisibility, Literal, NoirFunction, - Visibility, -}; +use noirc_frontend::ast::{ItemVisibility, NoirFunction, NoirTraitImpl, TraitImplItem}; +use noirc_frontend::macros_api::{NodeInterner, StructId}; +use noirc_frontend::token::SecondaryAttribute; use noirc_frontend::{ graph::CrateId, - macros_api::{ - BlockExpression, FileId, HirContext, HirExpression, HirLiteral, HirStatement, NodeInterner, - NoirStruct, PathKind, StatementKind, StructId, StructType, Type, TypeImpl, - UnresolvedTypeData, - }, - token::SecondaryAttribute, + macros_api::{FileId, HirContext}, + parse_program, + parser::SortedModule, }; -use crate::{ - chained_dep, - utils::{ - ast_utils::{ - call, expression, ident, ident_path, is_custom_attribute, make_statement, make_type, - path, variable_path, - }, - constants::SIGNATURE_PLACEHOLDER, - errors::AztecMacroError, - hir_utils::{collect_crate_structs, signature_of_type}, - }, -}; +use crate::utils::hir_utils::collect_crate_structs; +use crate::utils::{ast_utils::is_custom_attribute, errors::AztecMacroError}; + +// Automatic implementation of most of the methods in the EventInterface trait, guiding the user with meaningful error messages in case some +// methods must be implemented manually. +pub fn generate_event_impls(module: &mut SortedModule) -> Result<(), AztecMacroError> { + // Find structs annotated with #[aztec(event)] + // Why doesn't this work ? Events are not tagged and do not appear, it seems only going through the submodule works + // let annotated_event_structs = module + // .types + // .iter_mut() + // .filter(|typ| typ.attributes.iter().any(|attr: &SecondaryAttribute| is_custom_attribute(attr, "aztec(event)"))); + // This did not work because I needed the submodule itself to add the trait impl back in to, but it would be nice if it was tagged on the module level + // let mut annotated_event_structs = module.submodules.iter_mut() + // .flat_map(|submodule| submodule.contents.types.iter_mut()) + // .filter(|typ| typ.attributes.iter().any(|attr| is_custom_attribute(attr, "aztec(event)"))); + + // To diagnose + // let test = module.types.iter_mut(); + // for event_struct in test { + // print!("\ngenerate_event_interface_impl COUNT: {}\n", event_struct.name.0.contents); + // } + + for submodule in module.submodules.iter_mut() { + let annotated_event_structs = submodule.contents.types.iter_mut().filter(|typ| { + typ.attributes.iter().any(|attr| is_custom_attribute(attr, "aztec(event)")) + }); + + for event_struct in annotated_event_structs { + // event_struct.attributes.push(SecondaryAttribute::Abi("events".to_string())); + // If one impl is pushed, this doesn't throw the "#[abi(tag)] attributes can only be used in contracts" error + // But if more than one impl is pushed, we get an increasing amount of "#[abi(tag)] attributes can only be used in contracts" errors + // We work around this by doing this addition in the HIR pass via transform_event_abi below. + + let event_type = event_struct.name.0.contents.to_string(); + let event_len = event_struct.fields.len() as u32; + // event_byte_len = event fields * 32 + randomness (32) + event_type_id (32) + let event_byte_len = event_len * 32 + 64; + + let mut event_fields = vec![]; + + for (field_ident, field_type) in event_struct.fields.iter() { + event_fields.push(( + field_ident.0.contents.to_string(), + field_type.typ.to_string().replace("plain::", ""), + )); + } -/// Generates the impl for an event selector -/// -/// Inserts the following code: -/// ```noir -/// impl SomeStruct { -/// fn selector() -> FunctionSelector { -/// aztec::protocol_types::abis::function_selector::FunctionSelector::from_signature("SIGNATURE_PLACEHOLDER") -/// } -/// } -/// ``` -/// -/// This allows developers to emit events without having to write the signature of the event every time they emit it. -/// The signature cannot be known at this point since types are not resolved yet, so we use a signature placeholder. -/// It'll get resolved after by transforming the HIR. -pub fn generate_selector_impl(structure: &mut NoirStruct) -> TypeImpl { - structure.attributes.push(SecondaryAttribute::Abi("events".to_string())); - let struct_type = - make_type(UnresolvedTypeData::Named(path(structure.name.clone()), vec![], true)); - - let selector_path = - chained_dep!("aztec", "protocol_types", "abis", "function_selector", "FunctionSelector"); - let mut from_signature_path = selector_path.clone(); - from_signature_path.segments.push(ident("from_signature")); - - let selector_fun_body = BlockExpression { - statements: vec![make_statement(StatementKind::Expression(call( - variable_path(from_signature_path), - vec![expression(ExpressionKind::Literal(Literal::Str( - SIGNATURE_PLACEHOLDER.to_string(), - )))], - )))], - }; - - // Define `FunctionSelector` return type - let return_type = - FunctionReturnType::Ty(make_type(UnresolvedTypeData::Named(selector_path, vec![], true))); - - let mut selector_fn_def = FunctionDefinition::normal( - &ident("selector"), - &vec![], - &[], - &selector_fun_body, - &[], - &return_type, - ); - - selector_fn_def.visibility = ItemVisibility::Public; - - // Seems to be necessary on contract modules - selector_fn_def.return_visibility = Visibility::Public; - - TypeImpl { - object_type: struct_type, - type_span: structure.span, - generics: vec![], - methods: vec![(NoirFunction::normal(selector_fn_def), Span::default())], + let mut event_interface_trait_impl = + generate_trait_impl_stub_event_interface(event_type.as_str(), event_byte_len)?; + event_interface_trait_impl.items.push(TraitImplItem::Function( + generate_fn_get_event_type_id(event_type.as_str(), event_len)?, + )); + event_interface_trait_impl.items.push(TraitImplItem::Function( + generate_fn_private_to_be_bytes(event_type.as_str(), event_byte_len)?, + )); + event_interface_trait_impl.items.push(TraitImplItem::Function( + generate_fn_to_be_bytes(event_type.as_str(), event_byte_len)?, + )); + event_interface_trait_impl + .items + .push(TraitImplItem::Function(generate_fn_emit(event_type.as_str())?)); + submodule.contents.trait_impls.push(event_interface_trait_impl); + + let serialize_trait_impl = + generate_trait_impl_serialize(event_type.as_str(), event_len, &event_fields)?; + submodule.contents.trait_impls.push(serialize_trait_impl); + + let deserialize_trait_impl = + generate_trait_impl_deserialize(event_type.as_str(), event_len, &event_fields)?; + submodule.contents.trait_impls.push(deserialize_trait_impl); + } } + + Ok(()) } -/// Computes the signature for a resolved event type. -/// It has the form 'EventName(Field,(Field),[u8;2])' -fn event_signature(event: &StructType) -> String { - let fields = vecmap(event.get_fields(&[]), |(_, typ)| signature_of_type(&typ)); - format!("{}({})", event.name.0.contents, fields.join(",")) +fn generate_trait_impl_stub_event_interface( + event_type: &str, + byte_length: u32, +) -> Result { + let byte_length_without_randomness = byte_length - 32; + let trait_impl_source = format!( + " +impl dep::aztec::event::event_interface::EventInterface<{byte_length}, {byte_length_without_randomness}> for {event_type} {{ + }} + " + ) + .to_string(); + + let (parsed_ast, errors) = parse_program(&trait_impl_source); + if !errors.is_empty() { + dbg!(errors); + return Err(AztecMacroError::CouldNotImplementEventInterface { + secondary_message: Some(format!("Failed to parse Noir macro code (trait impl of {event_type} for EventInterface). This is either a bug in the compiler or the Noir macro code")), + }); + } + + let mut sorted_ast = parsed_ast.into_sorted(); + let event_interface_impl = sorted_ast.trait_impls.remove(0); + + Ok(event_interface_impl) } -/// Substitutes the signature literal that was introduced in the selector method previously with the actual signature. -fn transform_event( - struct_id: StructId, - interner: &mut NodeInterner, -) -> Result<(), (AztecMacroError, FileId)> { - let struct_type = interner.get_struct(struct_id); - let selector_id = interner - .lookup_method(&Type::Struct(struct_type.clone(), vec![]), struct_id, "selector", false) - .ok_or_else(|| { - let error = AztecMacroError::EventError { - span: struct_type.borrow().location.span, - message: "Selector method not found".to_owned(), - }; - (error, struct_type.borrow().location.file) - })?; - let selector_function = interner.function(&selector_id); - - let compute_selector_statement = interner.statement( - selector_function.block(interner).statements().first().ok_or_else(|| { - let error = AztecMacroError::EventError { - span: struct_type.borrow().location.span, - message: "Compute selector statement not found".to_owned(), - }; - (error, struct_type.borrow().location.file) - })?, - ); - - let compute_selector_expression = match compute_selector_statement { - HirStatement::Expression(expression_id) => match interner.expression(&expression_id) { - HirExpression::Call(hir_call_expression) => Some(hir_call_expression), - _ => None, - }, - _ => None, +fn generate_trait_impl_serialize( + event_type: &str, + event_len: u32, + event_fields: &[(String, String)], +) -> Result { + let field_names = + event_fields.iter().map(|field| format!("self.{}", field.0)).collect::>(); + let field_input = field_names.join(","); + + let trait_impl_source = format!( + " + impl dep::aztec::protocol_types::traits::Serialize<{event_len}> for {event_type} {{ + fn serialize(self: {event_type}) -> [Field; {event_len}] {{ + [{field_input}] + }} + }} + " + ) + .to_string(); + + let (parsed_ast, errors) = parse_program(&trait_impl_source); + if !errors.is_empty() { + dbg!(errors); + return Err(AztecMacroError::CouldNotImplementEventInterface { + secondary_message: Some(format!("Failed to parse Noir macro code (trait impl of Serialize for {event_type}). This is either a bug in the compiler or the Noir macro code")), + }); } - .ok_or_else(|| { - let error = AztecMacroError::EventError { - span: struct_type.borrow().location.span, - message: "Compute selector statement is not a call expression".to_owned(), - }; - (error, struct_type.borrow().location.file) - })?; - - let first_arg_id = compute_selector_expression.arguments.first().ok_or_else(|| { - let error = AztecMacroError::EventError { - span: struct_type.borrow().location.span, - message: "Compute selector statement is not a call expression".to_owned(), - }; - (error, struct_type.borrow().location.file) - })?; - - match interner.expression(first_arg_id) { - HirExpression::Literal(HirLiteral::Str(signature)) - if signature == SIGNATURE_PLACEHOLDER => - { - let selector_literal_id = *first_arg_id; - - let structure = interner.get_struct(struct_id); - let signature = event_signature(&structure.borrow()); - interner.update_expression(selector_literal_id, |expr| { - *expr = HirExpression::Literal(HirLiteral::Str(signature.clone())); - }); - - // Also update the type! It might have a different length now than the placeholder. - interner.push_expr_type( - selector_literal_id, - Type::String(Box::new(Type::Constant(signature.len() as u32))), - ); - Ok(()) - } - _ => Err(( - AztecMacroError::EventError { - span: struct_type.borrow().location.span, - message: "Signature placeholder literal does not match".to_owned(), - }, - struct_type.borrow().location.file, - )), + + let mut sorted_ast = parsed_ast.into_sorted(); + let serialize_impl = sorted_ast.trait_impls.remove(0); + + Ok(serialize_impl) +} + +fn generate_trait_impl_deserialize( + event_type: &str, + event_len: u32, + event_fields: &[(String, String)], +) -> Result { + let field_names: Vec = event_fields + .iter() + .enumerate() + .map(|(index, field)| format!("{}: fields[{}]", field.0, index)) + .collect::>(); + let field_input = field_names.join(","); + + let trait_impl_source = format!( + " + impl dep::aztec::protocol_types::traits::Deserialize<{event_len}> for {event_type} {{ + fn deserialize(fields: [Field; {event_len}]) -> {event_type} {{ + {event_type} {{ {field_input} }} + }} + }} + " + ) + .to_string(); + + let (parsed_ast, errors) = parse_program(&trait_impl_source); + if !errors.is_empty() { + dbg!(errors); + return Err(AztecMacroError::CouldNotImplementEventInterface { + secondary_message: Some(format!("Failed to parse Noir macro code (trait impl of Deserialize for {event_type}). This is either a bug in the compiler or the Noir macro code")), + }); } + + let mut sorted_ast = parsed_ast.into_sorted(); + let deserialize_impl = sorted_ast.trait_impls.remove(0); + + Ok(deserialize_impl) } -pub fn transform_events( +fn generate_fn_get_event_type_id( + event_type: &str, + field_length: u32, +) -> Result { + let from_signature_input = + std::iter::repeat("Field").take(field_length as usize).collect::>().join(","); + let function_source = format!( + " + fn get_event_type_id() -> dep::aztec::protocol_types::abis::event_selector::EventSelector {{ + dep::aztec::protocol_types::abis::event_selector::EventSelector::from_signature(\"{event_type}({from_signature_input})\") + }} + ", + ) + .to_string(); + + let (function_ast, errors) = parse_program(&function_source); + if !errors.is_empty() { + dbg!(errors); + return Err(AztecMacroError::CouldNotImplementEventInterface { + secondary_message: Some(format!("Failed to parse Noir macro code (fn get_event_type_id, implemented for EventInterface of {event_type}). This is either a bug in the compiler or the Noir macro code")), + }); + } + + let mut function_ast = function_ast.into_sorted(); + let mut noir_fn = function_ast.functions.remove(0); + noir_fn.def.visibility = ItemVisibility::Public; + Ok(noir_fn) +} + +fn generate_fn_private_to_be_bytes( + event_type: &str, + byte_length: u32, +) -> Result { + let function_source = format!( + " + fn private_to_be_bytes(self: {event_type}, randomness: Field) -> [u8; {byte_length}] {{ + let mut buffer: [u8; {byte_length}] = [0; {byte_length}]; + + let randomness_bytes = randomness.to_be_bytes(32); + let event_type_id_bytes = {event_type}::get_event_type_id().to_field().to_be_bytes(32); + + for i in 0..32 {{ + buffer[i] = randomness_bytes[i]; + buffer[32 + i] = event_type_id_bytes[i]; + }} + + let serialized_event = self.serialize(); + + for i in 0..serialized_event.len() {{ + let bytes = serialized_event[i].to_be_bytes(32); + for j in 0..32 {{ + buffer[64 + i * 32 + j] = bytes[j]; + }} + }} + + buffer + }} + " + ) + .to_string(); + + let (function_ast, errors) = parse_program(&function_source); + if !errors.is_empty() { + dbg!(errors); + return Err(AztecMacroError::CouldNotImplementEventInterface { + secondary_message: Some(format!("Failed to parse Noir macro code (fn private_to_be_bytes, implemented for EventInterface of {event_type}). This is either a bug in the compiler or the Noir macro code")), + }); + } + + let mut function_ast = function_ast.into_sorted(); + let mut noir_fn = function_ast.functions.remove(0); + noir_fn.def.visibility = ItemVisibility::Public; + Ok(noir_fn) +} + +fn generate_fn_to_be_bytes( + event_type: &str, + byte_length: u32, +) -> Result { + let byte_length_without_randomness = byte_length - 32; + let function_source = format!( + " + fn to_be_bytes(self: {event_type}) -> [u8; {byte_length_without_randomness}] {{ + let mut buffer: [u8; {byte_length_without_randomness}] = [0; {byte_length_without_randomness}]; + + let event_type_id_bytes = {event_type}::get_event_type_id().to_field().to_be_bytes(32); + + for i in 0..32 {{ + buffer[i] = event_type_id_bytes[i]; + }} + + let serialized_event = self.serialize(); + + for i in 0..serialized_event.len() {{ + let bytes = serialized_event[i].to_be_bytes(32); + for j in 0..32 {{ + buffer[32 + i * 32 + j] = bytes[j]; + }} + }} + + buffer + }} + ") + .to_string(); + + let (function_ast, errors) = parse_program(&function_source); + if !errors.is_empty() { + dbg!(errors); + return Err(AztecMacroError::CouldNotImplementEventInterface { + secondary_message: Some(format!("Failed to parse Noir macro code (fn to_be_bytes, implemented for EventInterface of {event_type}). This is either a bug in the compiler or the Noir macro code")), + }); + } + + let mut function_ast = function_ast.into_sorted(); + let mut noir_fn = function_ast.functions.remove(0); + noir_fn.def.visibility = ItemVisibility::Public; + Ok(noir_fn) +} + +fn generate_fn_emit(event_type: &str) -> Result { + let function_source = format!( + " + fn emit(self: {event_type}, _emit: fn[Env](Self) -> ()) {{ + _emit(self); + }} + " + ) + .to_string(); + + let (function_ast, errors) = parse_program(&function_source); + if !errors.is_empty() { + dbg!(errors); + return Err(AztecMacroError::CouldNotImplementEventInterface { + secondary_message: Some(format!("Failed to parse Noir macro code (fn emit, implemented for EventInterface of {event_type}). This is either a bug in the compiler or the Noir macro code")), + }); + } + + let mut function_ast = function_ast.into_sorted(); + let mut noir_fn = function_ast.functions.remove(0); + noir_fn.def.visibility = ItemVisibility::Public; + Ok(noir_fn) +} + +// We do this pass in the HIR to work around the "#[abi(tag)] attributes can only be used in contracts" error +pub fn transform_event_abi( crate_id: &CrateId, context: &mut HirContext, ) -> Result<(), (AztecMacroError, FileId)> { @@ -184,3 +339,14 @@ pub fn transform_events( } Ok(()) } + +fn transform_event( + struct_id: StructId, + interner: &mut NodeInterner, +) -> Result<(), (AztecMacroError, FileId)> { + interner.update_struct_attributes(struct_id, |struct_attributes| { + struct_attributes.push(SecondaryAttribute::Abi("events".to_string())); + }); + + Ok(()) +} diff --git a/aztec_macros/src/transforms/note_interface.rs b/aztec_macros/src/transforms/note_interface.rs index f0e7d0d5034..c2dfb3d86d4 100644 --- a/aztec_macros/src/transforms/note_interface.rs +++ b/aztec_macros/src/transforms/note_interface.rs @@ -11,7 +11,10 @@ use noirc_frontend::{ Type, }; +use acvm::AcirField; use regex::Regex; +// TODO(#7165): nuke the following dependency from here and Cargo.toml +use tiny_keccak::{Hasher, Keccak}; use crate::{ chained_dep, @@ -69,6 +72,7 @@ pub fn generate_note_interface_impl(module: &mut SortedModule) -> Result<(), Azt type_span: note_struct.name.span(), generics: vec![], methods: vec![], + where_clause: vec![], }; module.impls.push(default_impl.clone()); module.impls.last_mut().unwrap() @@ -97,7 +101,6 @@ pub fn generate_note_interface_impl(module: &mut SortedModule) -> Result<(), Azt .collect::, _>>()?; let [note_serialized_len, note_bytes_len]: [_; 2] = note_interface_generics.try_into().unwrap(); - let note_type_id = note_type_id(¬e_type); // Automatically inject the header field if it's not present let (header_field_name, _) = if let Some(existing_header) = @@ -184,25 +187,26 @@ pub fn generate_note_interface_impl(module: &mut SortedModule) -> Result<(), Azt } if !check_trait_method_implemented(trait_impl, "get_note_type_id") { + let note_type_id = compute_note_type_id(¬e_type); let get_note_type_id_fn = - generate_note_get_type_id(¬e_type_id, note_interface_impl_span)?; + generate_get_note_type_id(note_type_id, note_interface_impl_span)?; trait_impl.items.push(TraitImplItem::Function(get_note_type_id_fn)); } if !check_trait_method_implemented(trait_impl, "compute_note_content_hash") { - let get_header_fn = + let compute_note_content_hash_fn = generate_compute_note_content_hash(¬e_type, note_interface_impl_span)?; - trait_impl.items.push(TraitImplItem::Function(get_header_fn)); + trait_impl.items.push(TraitImplItem::Function(compute_note_content_hash_fn)); } if !check_trait_method_implemented(trait_impl, "to_be_bytes") { - let get_header_fn = generate_note_to_be_bytes( + let to_be_bytes_fn = generate_note_to_be_bytes( ¬e_type, note_bytes_len.as_str(), note_serialized_len.as_str(), note_interface_impl_span, )?; - trait_impl.items.push(TraitImplItem::Function(get_header_fn)); + trait_impl.items.push(TraitImplItem::Function(to_be_bytes_fn)); } } @@ -268,7 +272,7 @@ fn generate_note_get_header( ) -> Result { let function_source = format!( " - fn get_header(note: {}) -> aztec::note::note_header::NoteHeader {{ + fn get_header(note: {}) -> dep::aztec::note::note_header::NoteHeader {{ note.{} }} ", @@ -299,7 +303,7 @@ fn generate_note_set_header( ) -> Result { let function_source = format!( " - fn set_header(self: &mut {}, header: aztec::note::note_header::NoteHeader) {{ + fn set_header(self: &mut {}, header: dep::aztec::note::note_header::NoteHeader) {{ self.{} = header; }} ", @@ -324,16 +328,17 @@ fn generate_note_set_header( // Automatically generate the note type id getter method. The id itself its calculated as the concatenation // of the conversion of the characters in the note's struct name to unsigned integers. -fn generate_note_get_type_id( - note_type_id: &str, +fn generate_get_note_type_id( + note_type_id: u32, impl_span: Option, ) -> Result { + // TODO(#7165): replace {} with dep::aztec::protocol_types::abis::note_selector::compute_note_selector(\"{}\") in the function source below let function_source = format!( " - fn get_note_type_id() -> Field {{ - {} - }} - ", + fn get_note_type_id() -> Field {{ + {} + }} + ", note_type_id ) .to_string(); @@ -387,7 +392,7 @@ fn generate_note_properties_struct( // Generate the deserialize_content method as // -// fn deserialize_content(serialized_note: [Field; NOTE_SERILIZED_LEN]) -> Self { +// fn deserialize_content(serialized_note: [Field; NOTE_SERIALIZED_LEN]) -> Self { // NoteType { // note_field1: serialized_note[0] as Field, // note_field2: NoteFieldType2::from_field(serialized_note[1])... @@ -488,7 +493,7 @@ fn generate_note_properties_fn( // Automatically generate the method to compute the note's content hash as: // fn compute_note_content_hash(self: NoteType) -> Field { -// aztec::hash::pedersen_hash(self.serialize_content(), aztec::protocol_types::constants::GENERATOR_INDEX__NOTE_CONTENT_HASH) +// dep::aztec::hash::pedersen_hash(self.serialize_content(), dep::aztec::protocol_types::constants::GENERATOR_INDEX__NOTE_CONTENT_HASH) // } // fn generate_compute_note_content_hash( @@ -498,7 +503,7 @@ fn generate_compute_note_content_hash( let function_source = format!( " fn compute_note_content_hash(self: {}) -> Field {{ - aztec::hash::pedersen_hash(self.serialize_content(), aztec::protocol_types::constants::GENERATOR_INDEX__NOTE_CONTENT_HASH) + dep::aztec::hash::pedersen_hash(self.serialize_content(), dep::aztec::protocol_types::constants::GENERATOR_INDEX__NOTE_CONTENT_HASH) }} ", note_type @@ -525,10 +530,10 @@ fn generate_note_exports_global( let struct_source = format!( " #[abi(notes)] - global {0}_EXPORTS: (Field, str<{1}>) = ({2},\"{0}\"); + global {0}_EXPORTS: (Field, str<{1}>) = (0x{2},\"{0}\"); ", note_type, - note_type_id.len(), + note_type.len(), note_type_id ) .to_string(); @@ -557,7 +562,9 @@ fn generate_note_properties_struct_source( .iter() .filter_map(|(field_name, _)| { if field_name != note_header_field_name { - Some(format!("{}: aztec::note::note_getter_options::PropertySelector", field_name)) + Some(format!( + "{field_name}: dep::aztec::note::note_getter_options::PropertySelector" + )) } else { None } @@ -585,7 +592,7 @@ fn generate_note_properties_fn_source( .filter_map(|(index, (field_name, _))| { if field_name != note_header_field_name { Some(format!( - "{}: aztec::note::note_getter_options::PropertySelector {{ index: {}, offset: 0, length: 32 }}", + "{}: dep::aztec::note::note_getter_options::PropertySelector {{ index: {}, offset: 0, length: 32 }}", field_name, index )) @@ -662,7 +669,9 @@ fn generate_note_deserialize_content_source( ) } } else { - format!("{}: aztec::note::note_header::NoteHeader::empty()", note_header_field_name) + format!( + "{note_header_field_name}: dep::aztec::note::note_header::NoteHeader::empty()" + ) } }) .collect::>() @@ -679,10 +688,18 @@ fn generate_note_deserialize_content_source( .to_string() } +// TODO(#7165): nuke this function // Utility function to generate the note type id as a Field -fn note_type_id(note_type: &str) -> String { +fn compute_note_type_id(note_type: &str) -> u32 { // TODO(#4519) Improve automatic note id generation and assignment - note_type.chars().map(|c| (c as u32).to_string()).collect::>().join("") + let mut keccak = Keccak::v256(); + let mut result = [0u8; 32]; + keccak.update(note_type.as_bytes()); + keccak.finalize(&mut result); + // Take the first 4 bytes of the hash and convert them to an integer + // If you change the following value you have to change NUM_BYTES_PER_NOTE_TYPE_ID in l1_note_payload.ts as well + let num_bytes_per_note_type_id = 4; + u32::from_be_bytes(result[0..num_bytes_per_note_type_id].try_into().unwrap()) } pub fn inject_note_exports( @@ -711,29 +728,42 @@ pub fn inject_note_exports( }, file_id, ))?; - let init_function = + let get_note_type_id_function = context.def_interner.function(&func_id).block(&context.def_interner); - let init_function_statement_id = init_function.statements().first().ok_or(( - AztecMacroError::CouldNotExportStorageLayout { - span: None, - secondary_message: Some(format!( - "Could not retrieve note id statement from function for note {}", - note.borrow().name.0.contents - )), - }, - file_id, - ))?; - let note_id_statement = context.def_interner.statement(init_function_statement_id); + let get_note_type_id_statement_id = + get_note_type_id_function.statements().first().ok_or(( + AztecMacroError::CouldNotExportStorageLayout { + span: None, + secondary_message: Some(format!( + "Could not retrieve note id statement from function for note {}", + note.borrow().name.0.contents + )), + }, + file_id, + ))?; + let note_type_id_statement = + context.def_interner.statement(get_note_type_id_statement_id); - let note_id_value = match note_id_statement { + let note_type_id = match note_type_id_statement { HirStatement::Expression(expression_id) => { match context.def_interner.expression(&expression_id) { HirExpression::Literal(HirLiteral::Integer(value, _)) => Ok(value), + HirExpression::Literal(_) => Err(( + AztecMacroError::CouldNotExportStorageLayout { + span: None, + secondary_message: Some( + "note_type_id statement must be a literal integer expression" + .to_string(), + ), + }, + file_id, + )), _ => Err(( AztecMacroError::CouldNotExportStorageLayout { span: None, secondary_message: Some( - "note_id statement must be a literal expression".to_string(), + "note_type_id statement must be a literal expression" + .to_string(), ), }, file_id, @@ -741,9 +771,10 @@ pub fn inject_note_exports( } } _ => Err(( - AztecMacroError::CouldNotAssignStorageSlots { + AztecMacroError::CouldNotExportStorageLayout { + span: None, secondary_message: Some( - "note_id statement must be an expression".to_string(), + "note_type_id statement must be an expression".to_string(), ), }, file_id, @@ -751,7 +782,7 @@ pub fn inject_note_exports( }?; let global = generate_note_exports_global( ¬e.borrow().name.0.contents, - ¬e_id_value.to_string(), + ¬e_type_id.to_hex(), ) .map_err(|err| (err, file_id))?; diff --git a/aztec_macros/src/transforms/storage.rs b/aztec_macros/src/transforms/storage.rs index 8b778b4cca6..c302dd87aa5 100644 --- a/aztec_macros/src/transforms/storage.rs +++ b/aztec_macros/src/transforms/storage.rs @@ -91,7 +91,7 @@ pub fn inject_context_in_storage(module: &mut SortedModule) -> Result<(), AztecM r#struct.attributes.iter().any(|attr| is_custom_attribute(attr, "aztec(storage)")) }) .unwrap(); - storage_struct.generics.push(ident("Context")); + storage_struct.generics.push(ident("Context").into()); storage_struct .fields .iter_mut() @@ -243,9 +243,11 @@ pub fn generate_storage_implementation( span: Some(Span::default()), }, type_span: Span::default(), - generics: vec![generic_context_ident], + generics: vec![generic_context_ident.into()], methods: vec![(init, Span::default())], + + where_clause: vec![], }; module.impls.push(storage_impl); @@ -497,6 +499,7 @@ pub fn assign_storage_slots( pub fn generate_storage_layout( module: &mut SortedModule, storage_struct_name: String, + module_name: &str, ) -> Result<(), AztecMacroError> { let definition = module .types @@ -504,33 +507,28 @@ pub fn generate_storage_layout( .find(|r#struct| r#struct.name.0.contents == *storage_struct_name) .unwrap(); - let mut generic_args = vec![]; let mut storable_fields = vec![]; let mut storable_fields_impl = vec![]; - definition.fields.iter().enumerate().for_each(|(index, (field_ident, field_type))| { - storable_fields.push(format!("{}: aztec::prelude::Storable", field_ident, index)); - generic_args.push(format!("N{}", index)); - storable_fields_impl.push(format!( - "{}: aztec::prelude::Storable {{ slot: 0, typ: \"{}\" }}", - field_ident, - field_type.to_string().replace("plain::", "") - )); + definition.fields.iter().for_each(|(field_ident, _)| { + storable_fields.push(format!("{}: dep::aztec::prelude::Storable", field_ident)); + storable_fields_impl + .push(format!("{}: dep::aztec::prelude::Storable {{ slot: 0 }}", field_ident,)); }); let storage_fields_source = format!( " - struct StorageLayout<{}> {{ + struct StorageLayout {{ {} }} #[abi(storage)] - global STORAGE_LAYOUT = StorageLayout {{ + global {}_STORAGE_LAYOUT = StorageLayout {{ {} }}; ", - generic_args.join(", "), storable_fields.join(",\n"), + module_name, storable_fields_impl.join(",\n") ); diff --git a/aztec_macros/src/utils/ast_utils.rs b/aztec_macros/src/utils/ast_utils.rs index 4467c4bca4b..48b3b25747b 100644 --- a/aztec_macros/src/utils/ast_utils.rs +++ b/aztec_macros/src/utils/ast_utils.rs @@ -161,7 +161,7 @@ macro_rules! chained_dep { ( $base:expr $(, $tail:expr)* ) => { { let mut base_path = ident_path($base); - base_path.kind = PathKind::Plain; + base_path.kind = PathKind::Dep; $( base_path.segments.push(ident($tail)); )* diff --git a/aztec_macros/src/utils/constants.rs b/aztec_macros/src/utils/constants.rs index 848cca0477d..2178f7a2526 100644 --- a/aztec_macros/src/utils/constants.rs +++ b/aztec_macros/src/utils/constants.rs @@ -1,4 +1,3 @@ pub const FUNCTION_TREE_HEIGHT: u32 = 5; pub const MAX_CONTRACT_PRIVATE_FUNCTIONS: usize = 2_usize.pow(FUNCTION_TREE_HEIGHT); -pub const SIGNATURE_PLACEHOLDER: &str = "SIGNATURE_PLACEHOLDER"; pub const SELECTOR_PLACEHOLDER: &str = "SELECTOR_PLACEHOLDER"; diff --git a/aztec_macros/src/utils/errors.rs b/aztec_macros/src/utils/errors.rs index 852b5f1e57a..557d065cb25 100644 --- a/aztec_macros/src/utils/errors.rs +++ b/aztec_macros/src/utils/errors.rs @@ -14,6 +14,7 @@ pub enum AztecMacroError { CouldNotAssignStorageSlots { secondary_message: Option }, CouldNotImplementComputeNoteHashAndOptionallyANullifier { secondary_message: Option }, CouldNotImplementNoteInterface { span: Option, secondary_message: Option }, + CouldNotImplementEventInterface { secondary_message: Option }, MultipleStorageDefinitions { span: Option }, CouldNotExportStorageLayout { span: Option, secondary_message: Option }, CouldNotInjectContextGenericInStorage { secondary_message: Option }, @@ -67,6 +68,11 @@ impl From for MacroError { secondary_message, span }, + AztecMacroError::CouldNotImplementEventInterface { secondary_message } => MacroError { + primary_message: "Could not implement automatic methods for event, please provide an implementation of the EventInterface trait".to_string(), + secondary_message, + span: None, + }, AztecMacroError::MultipleStorageDefinitions { span } => MacroError { primary_message: "Only one struct can be tagged as #[aztec(storage)]".to_string(), secondary_message: None, diff --git a/compiler/noirc_driver/src/lib.rs b/compiler/noirc_driver/src/lib.rs index dde2bd08d74..4ba9b85f967 100644 --- a/compiler/noirc_driver/src/lib.rs +++ b/compiler/noirc_driver/src/lib.rs @@ -544,14 +544,15 @@ pub fn compile_no_check( return Ok(cached_program.expect("cache must exist for hashes to match")); } let return_visibility = program.return_visibility; + let ssa_evaluator_options = noirc_evaluator::ssa::SsaEvaluatorOptions { + enable_ssa_logging: options.show_ssa, + enable_brillig_logging: options.show_brillig, + force_brillig_output: options.force_brillig, + print_codegen_timings: options.benchmark_codegen, + }; - let SsaProgramArtifact { program, debug, warnings, names, error_types, .. } = create_program( - program, - options.show_ssa, - options.show_brillig, - options.force_brillig, - options.benchmark_codegen, - )?; + let SsaProgramArtifact { program, debug, warnings, names, error_types, .. } = + create_program(program, &ssa_evaluator_options)?; let abi = abi_gen::gen_abi(context, &main_function, return_visibility, error_types); let file_map = filter_relevant_files(&debug, &context.file_manager); diff --git a/compiler/noirc_driver/tests/stdlib_warnings.rs b/compiler/noirc_driver/tests/stdlib_warnings.rs index 327c8daad06..47ce893d202 100644 --- a/compiler/noirc_driver/tests/stdlib_warnings.rs +++ b/compiler/noirc_driver/tests/stdlib_warnings.rs @@ -27,7 +27,7 @@ fn stdlib_does_not_produce_constant_warnings() -> Result<(), ErrorsAndWarnings> let ((), warnings) = noirc_driver::check_crate(&mut context, root_crate_id, false, false, false)?; - assert_eq!(warnings, Vec::new(), "stdlib is producing warnings"); + assert_eq!(warnings, Vec::new(), "stdlib is producing {} warnings", warnings.len()); Ok(()) } diff --git a/compiler/noirc_errors/src/reporter.rs b/compiler/noirc_errors/src/reporter.rs index cb5abbe2079..d817b48691f 100644 --- a/compiler/noirc_errors/src/reporter.rs +++ b/compiler/noirc_errors/src/reporter.rs @@ -1,3 +1,5 @@ +use std::io::IsTerminal; + use crate::{FileDiagnostic, Location, Span}; use codespan_reporting::diagnostic::{Diagnostic, Label}; use codespan_reporting::files::Files; @@ -148,7 +150,9 @@ pub fn report<'files>( call_stack: &[Location], deny_warnings: bool, ) -> bool { - let writer = StandardStream::stderr(ColorChoice::Always); + let color_choice = + if std::io::stderr().is_terminal() { ColorChoice::Auto } else { ColorChoice::Never }; + let writer = StandardStream::stderr(color_choice); let config = codespan_reporting::term::Config::default(); let stack_trace = stack_trace(files, call_stack); @@ -202,14 +206,14 @@ fn stack_trace<'files>( let path = files.name(call_item.file).expect("should get file path"); let source = files.source(call_item.file).expect("should get file source"); - let (line, column) = location(source.as_ref(), call_item.span.start()); + let (line, column) = line_and_column_from_span(source.as_ref(), &call_item.span); result += &format!("{}. {}:{}:{}\n", i + 1, path, line, column); } result } -fn location(source: &str, span_start: u32) -> (u32, u32) { +pub fn line_and_column_from_span(source: &str, span: &Span) -> (u32, u32) { let mut line = 1; let mut column = 0; @@ -221,7 +225,7 @@ fn location(source: &str, span_start: u32) -> (u32, u32) { column = 0; } - if span_start <= i as u32 { + if span.start() <= i as u32 { break; } } diff --git a/compiler/noirc_evaluator/Cargo.toml b/compiler/noirc_evaluator/Cargo.toml index aa30eef9156..72a52b43741 100644 --- a/compiler/noirc_evaluator/Cargo.toml +++ b/compiler/noirc_evaluator/Cargo.toml @@ -12,6 +12,7 @@ license.workspace = true noirc_frontend.workspace = true noirc_errors.workspace = true acvm.workspace = true +bn254_blackbox_solver.workspace = true fxhash.workspace = true iter-extended.workspace = true thiserror.workspace = true diff --git a/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs b/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs index b441e8be3eb..c2bc1e32cd6 100644 --- a/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs +++ b/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs @@ -22,6 +22,7 @@ use acvm::{acir::AcirField, FieldElement}; use fxhash::{FxHashMap as HashMap, FxHashSet as HashSet}; use iter_extended::vecmap; use num_bigint::BigUint; +use std::rc::Rc; use super::brillig_black_box::convert_black_box_call; use super::brillig_block_variables::BlockVariables; @@ -582,6 +583,11 @@ impl<'block> BrilligBlock<'block> { 1, ); } + + // `Intrinsic::AsWitness` is used to provide hints to acir-gen on optimal expression splitting. + // It is then useless in the brillig runtime and so we can ignore it + Value::Intrinsic(Intrinsic::AsWitness) => (), + _ => { unreachable!("unsupported function call type {:?}", dfg[*func]) } @@ -1629,7 +1635,7 @@ impl<'block> BrilligBlock<'block> { new_variable } } - Value::Array { array, .. } => { + Value::Array { array, typ } => { if let Some(variable) = self.variables.get_constant(value_id, dfg) { variable } else { @@ -1664,23 +1670,7 @@ impl<'block> BrilligBlock<'block> { // Write the items - // Allocate a register for the iterator - let iterator_register = - self.brillig_context.make_usize_constant_instruction(0_usize.into()); - - for element_id in array.iter() { - let element_variable = self.convert_ssa_value(*element_id, dfg); - // Store the item in memory - self.store_variable_in_array(pointer, iterator_register, element_variable); - // Increment the iterator - self.brillig_context.codegen_usize_op_in_place( - iterator_register.address, - BrilligBinaryOp::Add, - 1, - ); - } - - self.brillig_context.deallocate_single_addr(iterator_register); + self.initialize_constant_array(array, typ, dfg, pointer); new_variable } @@ -1705,6 +1695,125 @@ impl<'block> BrilligBlock<'block> { } } + fn initialize_constant_array( + &mut self, + data: &im::Vector, + typ: &Type, + dfg: &DataFlowGraph, + pointer: MemoryAddress, + ) { + if data.is_empty() { + return; + } + let item_types = typ.clone().element_types(); + + // Find out if we are repeating the same item over and over + let first_item = data.iter().take(item_types.len()).copied().collect(); + let mut is_repeating = true; + + for item_index in (item_types.len()..data.len()).step_by(item_types.len()) { + let item: Vec<_> = (0..item_types.len()).map(|i| data[item_index + i]).collect(); + if first_item != item { + is_repeating = false; + break; + } + } + + // If all the items are single address, and all have the same initial value, we can initialize the array in a runtime loop. + // Since the cost in instructions for a runtime loop is in the order of magnitude of 10, we only do this if the item_count is bigger than that. + let item_count = data.len() / item_types.len(); + + if item_count > 10 + && is_repeating + && item_types.iter().all(|typ| matches!(typ, Type::Numeric(_))) + { + self.initialize_constant_array_runtime( + item_types, first_item, item_count, pointer, dfg, + ); + } else { + self.initialize_constant_array_comptime(data, dfg, pointer); + } + } + + fn initialize_constant_array_runtime( + &mut self, + item_types: Rc>, + item_to_repeat: Vec, + item_count: usize, + pointer: MemoryAddress, + dfg: &DataFlowGraph, + ) { + let mut subitem_to_repeat_variables = Vec::with_capacity(item_types.len()); + for subitem_id in item_to_repeat.into_iter() { + subitem_to_repeat_variables.push(self.convert_ssa_value(subitem_id, dfg)); + } + + let data_length_variable = self + .brillig_context + .make_usize_constant_instruction((item_count * item_types.len()).into()); + + // If this is an array with complex subitems, we need a custom step in the loop to write all the subitems while iterating. + if item_types.len() > 1 { + let step_variable = + self.brillig_context.make_usize_constant_instruction(item_types.len().into()); + + let subitem_pointer = + SingleAddrVariable::new_usize(self.brillig_context.allocate_register()); + + let initializer_fn = |ctx: &mut BrilligContext<_>, iterator: SingleAddrVariable| { + ctx.mov_instruction(subitem_pointer.address, iterator.address); + for subitem in subitem_to_repeat_variables.into_iter() { + Self::store_variable_in_array_with_ctx(ctx, pointer, subitem_pointer, subitem); + ctx.codegen_usize_op_in_place(subitem_pointer.address, BrilligBinaryOp::Add, 1); + } + }; + + self.brillig_context.codegen_loop_with_bound_and_step( + data_length_variable.address, + step_variable.address, + initializer_fn, + ); + + self.brillig_context.deallocate_single_addr(step_variable); + self.brillig_context.deallocate_single_addr(subitem_pointer); + } else { + let subitem = subitem_to_repeat_variables.into_iter().next().unwrap(); + + let initializer_fn = |ctx: &mut _, iterator_register| { + Self::store_variable_in_array_with_ctx(ctx, pointer, iterator_register, subitem); + }; + + self.brillig_context.codegen_loop(data_length_variable.address, initializer_fn); + } + + self.brillig_context.deallocate_single_addr(data_length_variable); + } + + fn initialize_constant_array_comptime( + &mut self, + data: &im::Vector>, + dfg: &DataFlowGraph, + pointer: MemoryAddress, + ) { + // Allocate a register for the iterator + let iterator_register = + self.brillig_context.make_usize_constant_instruction(0_usize.into()); + + for element_id in data.iter() { + let element_variable = self.convert_ssa_value(*element_id, dfg); + // Store the item in memory + self.store_variable_in_array(pointer, iterator_register, element_variable); + // Increment the iterator + self.brillig_context.codegen_usize_op_in_place( + iterator_register.address, + BrilligBinaryOp::Add, + 1, + ); + } + + self.brillig_context.deallocate_single_addr(iterator_register); + } + /// Converts an SSA `ValueId` into a `MemoryAddress`. Initializes if necessary. fn convert_ssa_single_addr_value( &mut self, diff --git a/compiler/noirc_evaluator/src/brillig/brillig_ir/codegen_control_flow.rs b/compiler/noirc_evaluator/src/brillig/brillig_ir/codegen_control_flow.rs index 10badcd7308..5741089a497 100644 --- a/compiler/noirc_evaluator/src/brillig/brillig_ir/codegen_control_flow.rs +++ b/compiler/noirc_evaluator/src/brillig/brillig_ir/codegen_control_flow.rs @@ -38,11 +38,12 @@ impl BrilligContext { self.stop_instruction(); } - /// This codegen will issue a loop that will iterate iteration_count times + /// This codegen will issue a loop do for (let iterator_register = 0; i < loop_bound; i += step) /// The body of the loop should be issued by the caller in the on_iteration closure. - pub(crate) fn codegen_loop( + pub(crate) fn codegen_loop_with_bound_and_step( &mut self, - iteration_count: MemoryAddress, + loop_bound: MemoryAddress, + step: MemoryAddress, on_iteration: impl FnOnce(&mut BrilligContext, SingleAddrVariable), ) { let iterator_register = self.make_usize_constant_instruction(0_u128.into()); @@ -52,13 +53,13 @@ impl BrilligContext { // Loop body - // Check if iterator < iteration_count + // Check if iterator < loop_bound let iterator_less_than_iterations = SingleAddrVariable { address: self.allocate_register(), bit_size: 1 }; self.memory_op_instruction( iterator_register.address, - iteration_count, + loop_bound, iterator_less_than_iterations.address, BrilligBinaryOp::LessThan, ); @@ -72,8 +73,13 @@ impl BrilligContext { // Call the on iteration function on_iteration(self, iterator_register); - // Increment the iterator register - self.codegen_usize_op_in_place(iterator_register.address, BrilligBinaryOp::Add, 1); + // Add step to the iterator register + self.memory_op_instruction( + iterator_register.address, + step, + iterator_register.address, + BrilligBinaryOp::Add, + ); self.jump_instruction(loop_label); @@ -85,6 +91,18 @@ impl BrilligContext { self.deallocate_single_addr(iterator_register); } + /// This codegen will issue a loop that will iterate iteration_count times + /// The body of the loop should be issued by the caller in the on_iteration closure. + pub(crate) fn codegen_loop( + &mut self, + iteration_count: MemoryAddress, + on_iteration: impl FnOnce(&mut BrilligContext, SingleAddrVariable), + ) { + let step = self.make_usize_constant_instruction(1_u128.into()); + self.codegen_loop_with_bound_and_step(iteration_count, step.address, on_iteration); + self.deallocate_single_addr(step); + } + /// This codegen will issue an if-then branch that will check if the condition is true /// and if so, perform the instructions given in `f(self, true)` and otherwise perform the /// instructions given in `f(self, false)`. A boolean is passed instead of two separate diff --git a/compiler/noirc_evaluator/src/errors.rs b/compiler/noirc_evaluator/src/errors.rs index dd63a254a18..0879056ad36 100644 --- a/compiler/noirc_evaluator/src/errors.rs +++ b/compiler/noirc_evaluator/src/errors.rs @@ -35,6 +35,12 @@ pub enum RuntimeError { UnknownLoopBound { call_stack: CallStack }, #[error("Argument is not constant")] AssertConstantFailed { call_stack: CallStack }, + #[error("The static_assert message is not constant")] + StaticAssertDynamicMessage { call_stack: CallStack }, + #[error("Argument is dynamic")] + StaticAssertDynamicPredicate { call_stack: CallStack }, + #[error("Argument is false")] + StaticAssertFailed { call_stack: CallStack }, #[error("Nested slices are not supported")] NestedSlice { call_stack: CallStack }, #[error("Big Integer modulus do no match")] @@ -120,6 +126,9 @@ impl RuntimeError { | RuntimeError::UnInitialized { call_stack, .. } | RuntimeError::UnknownLoopBound { call_stack } | RuntimeError::AssertConstantFailed { call_stack } + | RuntimeError::StaticAssertDynamicMessage { call_stack } + | RuntimeError::StaticAssertDynamicPredicate { call_stack } + | RuntimeError::StaticAssertFailed { call_stack } | RuntimeError::IntegerOutOfBounds { call_stack, .. } | RuntimeError::UnsupportedIntegerSize { call_stack, .. } | RuntimeError::NestedSlice { call_stack, .. } diff --git a/compiler/noirc_evaluator/src/ssa.rs b/compiler/noirc_evaluator/src/ssa.rs index 7e362599fb5..0e0c3f1e631 100644 --- a/compiler/noirc_evaluator/src/ssa.rs +++ b/compiler/noirc_evaluator/src/ssa.rs @@ -48,47 +48,54 @@ pub mod ssa_gen; /// and Brillig functions for unconstrained execution. pub(crate) fn optimize_into_acir( program: Program, - print_passes: bool, - print_brillig_trace: bool, - force_brillig_output: bool, - print_timings: bool, + options: &SsaEvaluatorOptions, ) -> Result { let ssa_gen_span = span!(Level::TRACE, "ssa_generation"); let ssa_gen_span_guard = ssa_gen_span.enter(); - let ssa = SsaBuilder::new(program, print_passes, force_brillig_output, print_timings)? - .run_pass(Ssa::defunctionalize, "After Defunctionalization:") - .run_pass(Ssa::remove_paired_rc, "After Removing Paired rc_inc & rc_decs:") - .run_pass(Ssa::separate_runtime, "After Runtime Separation:") - .run_pass(Ssa::resolve_is_unconstrained, "After Resolving IsUnconstrained:") - .run_pass(Ssa::inline_functions, "After Inlining:") - // Run mem2reg with the CFG separated into blocks - .run_pass(Ssa::mem2reg, "After Mem2Reg:") - .run_pass(Ssa::as_slice_optimization, "After `as_slice` optimization") - .try_run_pass(Ssa::evaluate_assert_constant, "After Assert Constant:")? - .try_run_pass(Ssa::unroll_loops_iteratively, "After Unrolling:")? - .run_pass(Ssa::simplify_cfg, "After Simplifying:") - .run_pass(Ssa::flatten_cfg, "After Flattening:") - .run_pass(Ssa::remove_bit_shifts, "After Removing Bit Shifts:") - // Run mem2reg once more with the flattened CFG to catch any remaining loads/stores - .run_pass(Ssa::mem2reg, "After Mem2Reg:") - // Run the inlining pass again to handle functions with `InlineType::NoPredicates`. - // Before flattening is run, we treat functions marked with the `InlineType::NoPredicates` as an entry point. - // This pass must come immediately following `mem2reg` as the succeeding passes - // may create an SSA which inlining fails to handle. - .run_pass(Ssa::inline_functions_with_no_predicates, "After Inlining:") - .run_pass(Ssa::remove_if_else, "After Remove IfElse:") - .run_pass(Ssa::fold_constants, "After Constant Folding:") - .run_pass(Ssa::remove_enable_side_effects, "After EnableSideEffects removal:") - .run_pass(Ssa::fold_constants_using_constraints, "After Constraint Folding:") - .run_pass(Ssa::dead_instruction_elimination, "After Dead Instruction Elimination:") - .run_pass(Ssa::array_set_optimization, "After Array Set Optimizations:") - .finish(); - - let brillig = time("SSA to Brillig", print_timings, || ssa.to_brillig(print_brillig_trace)); + let ssa = SsaBuilder::new( + program, + options.enable_ssa_logging, + options.force_brillig_output, + options.print_codegen_timings, + )? + .run_pass(Ssa::defunctionalize, "After Defunctionalization:") + .run_pass(Ssa::remove_paired_rc, "After Removing Paired rc_inc & rc_decs:") + .run_pass(Ssa::separate_runtime, "After Runtime Separation:") + .run_pass(Ssa::resolve_is_unconstrained, "After Resolving IsUnconstrained:") + .run_pass(Ssa::inline_functions, "After Inlining:") + // Run mem2reg with the CFG separated into blocks + .run_pass(Ssa::mem2reg, "After Mem2Reg:") + .run_pass(Ssa::as_slice_optimization, "After `as_slice` optimization") + .try_run_pass( + Ssa::evaluate_static_assert_and_assert_constant, + "After `static_assert` and `assert_constant`:", + )? + .try_run_pass(Ssa::unroll_loops_iteratively, "After Unrolling:")? + .run_pass(Ssa::simplify_cfg, "After Simplifying:") + .run_pass(Ssa::flatten_cfg, "After Flattening:") + .run_pass(Ssa::remove_bit_shifts, "After Removing Bit Shifts:") + // Run mem2reg once more with the flattened CFG to catch any remaining loads/stores + .run_pass(Ssa::mem2reg, "After Mem2Reg:") + // Run the inlining pass again to handle functions with `InlineType::NoPredicates`. + // Before flattening is run, we treat functions marked with the `InlineType::NoPredicates` as an entry point. + // This pass must come immediately following `mem2reg` as the succeeding passes + // may create an SSA which inlining fails to handle. + .run_pass(Ssa::inline_functions_with_no_predicates, "After Inlining:") + .run_pass(Ssa::remove_if_else, "After Remove IfElse:") + .run_pass(Ssa::fold_constants, "After Constant Folding:") + .run_pass(Ssa::remove_enable_side_effects, "After EnableSideEffects removal:") + .run_pass(Ssa::fold_constants_using_constraints, "After Constraint Folding:") + .run_pass(Ssa::dead_instruction_elimination, "After Dead Instruction Elimination:") + .run_pass(Ssa::array_set_optimization, "After Array Set Optimizations:") + .finish(); + + let brillig = time("SSA to Brillig", options.print_codegen_timings, || { + ssa.to_brillig(options.enable_brillig_logging) + }); drop(ssa_gen_span_guard); - time("SSA to ACIR", print_timings, || ssa.into_acir(&brillig)) + time("SSA to ACIR", options.print_codegen_timings, || ssa.into_acir(&brillig)) } // Helper to time SSA passes @@ -144,17 +151,26 @@ impl SsaProgramArtifact { } } +pub struct SsaEvaluatorOptions { + /// Emit debug information for the intermediate SSA IR + pub enable_ssa_logging: bool, + + pub enable_brillig_logging: bool, + + /// Force Brillig output (for step debugging) + pub force_brillig_output: bool, + + /// Pretty print benchmark times of each code generation pass + pub print_codegen_timings: bool, +} + /// Compiles the [`Program`] into [`ACIR``][acvm::acir::circuit::Program]. /// /// The output ACIR is backend-agnostic and so must go through a transformation pass before usage in proof generation. -#[allow(clippy::type_complexity)] #[tracing::instrument(level = "trace", skip_all)] pub fn create_program( program: Program, - enable_ssa_logging: bool, - enable_brillig_logging: bool, - force_brillig_output: bool, - print_codegen_timings: bool, + options: &SsaEvaluatorOptions, ) -> Result { let debug_variables = program.debug_variables.clone(); let debug_types = program.debug_types.clone(); @@ -163,13 +179,7 @@ pub fn create_program( let func_sigs = program.function_signatures.clone(); let recursive = program.recursive; - let (generated_acirs, generated_brillig, error_types) = optimize_into_acir( - program, - enable_ssa_logging, - enable_brillig_logging, - force_brillig_output, - print_codegen_timings, - )?; + let (generated_acirs, generated_brillig, error_types) = optimize_into_acir(program, options)?; assert_eq!( generated_acirs.len(), func_sigs.len(), diff --git a/compiler/noirc_evaluator/src/ssa/acir_gen/acir_ir/acir_variable.rs b/compiler/noirc_evaluator/src/ssa/acir_gen/acir_ir/acir_variable.rs index 56b869fbf6b..928a7b105ea 100644 --- a/compiler/noirc_evaluator/src/ssa/acir_gen/acir_ir/acir_variable.rs +++ b/compiler/noirc_evaluator/src/ssa/acir_gen/acir_ir/acir_variable.rs @@ -1547,6 +1547,26 @@ impl AcirContext { brillig_function_index: u32, brillig_stdlib_func: Option, ) -> Result, RuntimeError> { + let predicate = self.var_to_expression(predicate)?; + if predicate.is_zero() { + // If the predicate has a constant value of zero, the brillig call will never be executed. + // We can then immediately zero out all of its outputs as this is the value which would be written + // if we waited until runtime to resolve this call. + let outputs_var = vecmap(outputs, |output| match output { + AcirType::NumericType(_) => { + let var = self.add_constant(F::zero()); + AcirValue::Var(var, output.clone()) + } + AcirType::Array(element_types, size) => { + self.zeroed_array_output(&element_types, size) + } + }); + + return Ok(outputs_var); + } + // Remove "always true" predicates. + let predicate = if predicate == Expression::one() { None } else { Some(predicate) }; + let brillig_inputs: Vec> = try_vecmap(inputs, |i| -> Result<_, InternalError> { match i { @@ -1594,10 +1614,9 @@ impl AcirContext { acir_value } }); - let predicate = self.var_to_expression(predicate)?; self.acir_ir.brillig_call( - Some(predicate), + predicate, generated_brillig, brillig_inputs, brillig_outputs, @@ -1668,6 +1687,27 @@ impl AcirContext { Ok(()) } + /// Recursively create zeroed-out acir values for returned arrays. This is necessary because a brillig returned array can have nested arrays as elements. + fn zeroed_array_output(&mut self, element_types: &[AcirType], size: usize) -> AcirValue { + let mut array_values = im::Vector::new(); + for _ in 0..size { + for element_type in element_types { + match element_type { + AcirType::Array(nested_element_types, nested_size) => { + let nested_acir_value = + self.zeroed_array_output(nested_element_types, *nested_size); + array_values.push_back(nested_acir_value); + } + AcirType::NumericType(_) => { + let var = self.add_constant(F::zero()); + array_values.push_back(AcirValue::Var(var, element_type.clone())); + } + } + } + } + AcirValue::Array(array_values) + } + /// Recursively create acir values for returned arrays. This is necessary because a brillig returned array can have nested arrays as elements. /// A singular array of witnesses is collected for a top level array, by deflattening the assigned witnesses at each level. fn brillig_array_output( diff --git a/compiler/noirc_evaluator/src/ssa/acir_gen/acir_ir/generated_acir.rs b/compiler/noirc_evaluator/src/ssa/acir_gen/acir_ir/generated_acir.rs index 9d271f7cd9c..bcccfac8950 100644 --- a/compiler/noirc_evaluator/src/ssa/acir_gen/acir_ir/generated_acir.rs +++ b/compiler/noirc_evaluator/src/ssa/acir_gen/acir_ir/generated_acir.rs @@ -439,7 +439,7 @@ impl GeneratedAcir { let inputs = vec![BrilligInputs::Single(expr)]; let outputs = vec![BrilligOutputs::Simple(inverted_witness)]; self.brillig_call( - Some(Expression::one()), + None, &inverse_code, inputs, outputs, @@ -709,6 +709,7 @@ fn black_box_expected_output_size(name: BlackBoxFunc) -> Option { BlackBoxFunc::Poseidon2Permutation => None, BlackBoxFunc::Sha256Compression => Some(8), + // Pedersen commitment returns a point BlackBoxFunc::PedersenCommitment => Some(2), diff --git a/compiler/noirc_evaluator/src/ssa/ir/dfg.rs b/compiler/noirc_evaluator/src/ssa/ir/dfg.rs index f763ae52d50..994386f8197 100644 --- a/compiler/noirc_evaluator/src/ssa/ir/dfg.rs +++ b/compiler/noirc_evaluator/src/ssa/ir/dfg.rs @@ -497,9 +497,22 @@ impl DataFlowGraph { } } - /// True if the given ValueId refers to a constant value + /// True if the given ValueId refers to a (recursively) constant value pub(crate) fn is_constant(&self, argument: ValueId) -> bool { - !matches!(&self[self.resolve(argument)], Value::Instruction { .. } | Value::Param { .. }) + match &self[self.resolve(argument)] { + Value::Instruction { .. } | Value::Param { .. } => false, + Value::Array { array, .. } => array.iter().all(|element| self.is_constant(*element)), + _ => true, + } + } + + /// True that the input is a non-zero `Value::NumericConstant` + pub(crate) fn is_constant_true(&self, argument: ValueId) -> bool { + if let Some(constant) = self.get_numeric_constant(argument) { + !constant.is_zero() + } else { + false + } } } diff --git a/compiler/noirc_evaluator/src/ssa/ir/instruction.rs b/compiler/noirc_evaluator/src/ssa/ir/instruction.rs index e21deb9ef79..f854e8e0693 100644 --- a/compiler/noirc_evaluator/src/ssa/ir/instruction.rs +++ b/compiler/noirc_evaluator/src/ssa/ir/instruction.rs @@ -52,6 +52,7 @@ pub(crate) enum Intrinsic { ArrayLen, AsSlice, AssertConstant, + StaticAssert, SlicePushBack, SlicePushFront, SlicePopBack, @@ -67,6 +68,7 @@ pub(crate) enum Intrinsic { AsField, AsWitness, IsUnconstrained, + DerivePedersenGenerators, } impl std::fmt::Display for Intrinsic { @@ -75,6 +77,7 @@ impl std::fmt::Display for Intrinsic { Intrinsic::ArrayLen => write!(f, "array_len"), Intrinsic::AsSlice => write!(f, "as_slice"), Intrinsic::AssertConstant => write!(f, "assert_constant"), + Intrinsic::StaticAssert => write!(f, "static_assert"), Intrinsic::SlicePushBack => write!(f, "slice_push_back"), Intrinsic::SlicePushFront => write!(f, "slice_push_front"), Intrinsic::SlicePopBack => write!(f, "slice_pop_back"), @@ -92,6 +95,7 @@ impl std::fmt::Display for Intrinsic { Intrinsic::AsField => write!(f, "as_field"), Intrinsic::AsWitness => write!(f, "as_witness"), Intrinsic::IsUnconstrained => write!(f, "is_unconstrained"), + Intrinsic::DerivePedersenGenerators => write!(f, "derive_pedersen_generators"), } } } @@ -102,9 +106,10 @@ impl Intrinsic { /// If there are no side effects then the `Intrinsic` can be removed if the result is unused. pub(crate) fn has_side_effects(&self) -> bool { match self { - Intrinsic::AssertConstant | Intrinsic::ApplyRangeConstraint | Intrinsic::AsWitness => { - true - } + Intrinsic::AssertConstant + | Intrinsic::StaticAssert + | Intrinsic::ApplyRangeConstraint + | Intrinsic::AsWitness => true, // These apply a constraint that the input must fit into a specified number of limbs. Intrinsic::ToBits(_) | Intrinsic::ToRadix(_) => true, @@ -120,7 +125,8 @@ impl Intrinsic { | Intrinsic::StrAsBytes | Intrinsic::FromField | Intrinsic::AsField - | Intrinsic::IsUnconstrained => false, + | Intrinsic::IsUnconstrained + | Intrinsic::DerivePedersenGenerators => false, // Some black box functions have side-effects Intrinsic::BlackBox(func) => matches!( @@ -139,6 +145,7 @@ impl Intrinsic { "array_len" => Some(Intrinsic::ArrayLen), "as_slice" => Some(Intrinsic::AsSlice), "assert_constant" => Some(Intrinsic::AssertConstant), + "static_assert" => Some(Intrinsic::StaticAssert), "apply_range_constraint" => Some(Intrinsic::ApplyRangeConstraint), "slice_push_back" => Some(Intrinsic::SlicePushBack), "slice_push_front" => Some(Intrinsic::SlicePushFront), @@ -155,6 +162,7 @@ impl Intrinsic { "as_field" => Some(Intrinsic::AsField), "as_witness" => Some(Intrinsic::AsWitness), "is_unconstrained" => Some(Intrinsic::IsUnconstrained), + "derive_pedersen_generators" => Some(Intrinsic::DerivePedersenGenerators), other => BlackBoxFunc::lookup(other).map(Intrinsic::BlackBox), } } diff --git a/compiler/noirc_evaluator/src/ssa/ir/instruction/call.rs b/compiler/noirc_evaluator/src/ssa/ir/instruction/call.rs index 74e5653c7ba..281ab7c3057 100644 --- a/compiler/noirc_evaluator/src/ssa/ir/instruction/call.rs +++ b/compiler/noirc_evaluator/src/ssa/ir/instruction/call.rs @@ -2,6 +2,7 @@ use fxhash::FxHashMap as HashMap; use std::{collections::VecDeque, rc::Rc}; use acvm::{acir::AcirField, acir::BlackBoxFunc, BlackBoxResolutionError, FieldElement}; +use bn254_blackbox_solver::derive_generators; use iter_extended::vecmap; use num_bigint::BigUint; @@ -245,6 +246,21 @@ pub(super) fn simplify_call( SimplifyResult::None } } + Intrinsic::StaticAssert => { + if arguments.len() != 2 { + panic!("ICE: static_assert called with wrong number of arguments") + } + + if !dfg.is_constant(arguments[1]) { + return SimplifyResult::None; + } + + if dfg.is_constant_true(arguments[0]) { + SimplifyResult::Remove + } else { + SimplifyResult::None + } + } Intrinsic::ApplyRangeConstraint => { let value = arguments[0]; let max_bit_size = dfg.get_numeric_constant(arguments[1]); @@ -295,6 +311,13 @@ pub(super) fn simplify_call( } Intrinsic::AsWitness => SimplifyResult::None, Intrinsic::IsUnconstrained => SimplifyResult::None, + Intrinsic::DerivePedersenGenerators => { + if let Some(Type::Array(_, len)) = ctrl_typevars.unwrap().first() { + simplify_derive_generators(dfg, arguments, *len as u32) + } else { + unreachable!("Derive Pedersen Generators must return an array"); + } + } } } @@ -438,7 +461,9 @@ fn simplify_black_box_func( BlackBoxFunc::SHA256 => simplify_hash(dfg, arguments, acvm::blackbox_solver::sha256), BlackBoxFunc::Blake2s => simplify_hash(dfg, arguments, acvm::blackbox_solver::blake2s), BlackBoxFunc::Blake3 => simplify_hash(dfg, arguments, acvm::blackbox_solver::blake3), - BlackBoxFunc::Keccakf1600 => SimplifyResult::None, //TODO(Guillaume) + BlackBoxFunc::PedersenCommitment + | BlackBoxFunc::PedersenHash + | BlackBoxFunc::Keccakf1600 => SimplifyResult::None, //TODO(Guillaume) BlackBoxFunc::Keccak256 => { match (dfg.get_array_constant(arguments[0]), dfg.get_numeric_constant(arguments[1])) { (Some((input, _)), Some(num_bytes)) if array_is_constant(dfg, &input) => { @@ -468,8 +493,6 @@ fn simplify_black_box_func( BlackBoxFunc::MultiScalarMul | BlackBoxFunc::SchnorrVerify - | BlackBoxFunc::PedersenCommitment - | BlackBoxFunc::PedersenHash | BlackBoxFunc::EmbeddedCurveAdd => { // Currently unsolvable here as we rely on an implementation in the backend. SimplifyResult::None @@ -626,3 +649,47 @@ fn simplify_signature( _ => SimplifyResult::None, } } + +fn simplify_derive_generators( + dfg: &mut DataFlowGraph, + arguments: &[ValueId], + num_generators: u32, +) -> SimplifyResult { + if arguments.len() == 2 { + let domain_separator_string = dfg.get_array_constant(arguments[0]); + let starting_index = dfg.get_numeric_constant(arguments[1]); + if let (Some(domain_separator_string), Some(starting_index)) = + (domain_separator_string, starting_index) + { + let domain_separator_bytes = domain_separator_string + .0 + .iter() + .map(|&x| dfg.get_numeric_constant(x).unwrap().to_u128() as u8) + .collect::>(); + let generators = derive_generators( + &domain_separator_bytes, + num_generators, + starting_index.try_to_u32().expect("argument is declared as u32"), + ); + let is_infinite = dfg.make_constant(FieldElement::zero(), Type::bool()); + let mut results = Vec::new(); + for gen in generators { + let x_big: BigUint = gen.x.into(); + let x = FieldElement::from_be_bytes_reduce(&x_big.to_bytes_be()); + let y_big: BigUint = gen.y.into(); + let y = FieldElement::from_be_bytes_reduce(&y_big.to_bytes_be()); + results.push(dfg.make_constant(x, Type::field())); + results.push(dfg.make_constant(y, Type::field())); + results.push(is_infinite); + } + let len = results.len(); + let result = + dfg.make_array(results.into(), Type::Array(vec![Type::field()].into(), len)); + SimplifyResult::SimplifiedTo(result) + } else { + SimplifyResult::None + } + } else { + unreachable!("Unexpected number of arguments to derive_generators"); + } +} diff --git a/compiler/noirc_evaluator/src/ssa/opt/assert_constant.rs b/compiler/noirc_evaluator/src/ssa/opt/assert_constant.rs index a3608f89612..ae0681a55ff 100644 --- a/compiler/noirc_evaluator/src/ssa/opt/assert_constant.rs +++ b/compiler/noirc_evaluator/src/ssa/opt/assert_constant.rs @@ -22,7 +22,9 @@ impl Ssa { /// since we must go through every instruction to find all references to `assert_constant` /// while loop unrolling only touches blocks with loops in them. #[tracing::instrument(level = "trace", skip(self))] - pub(crate) fn evaluate_assert_constant(mut self) -> Result { + pub(crate) fn evaluate_static_assert_and_assert_constant( + mut self, + ) -> Result { for function in self.functions.values_mut() { for block in function.reachable_blocks() { // Unfortunately we can't just use instructions.retain(...) here since @@ -54,10 +56,13 @@ fn check_instruction( instruction: InstructionId, ) -> Result { let assert_constant_id = function.dfg.import_intrinsic(Intrinsic::AssertConstant); + let static_assert_id = function.dfg.import_intrinsic(Intrinsic::StaticAssert); match &function.dfg[instruction] { Instruction::Call { func, arguments } => { if *func == assert_constant_id { evaluate_assert_constant(function, instruction, arguments) + } else if *func == static_assert_id { + evaluate_static_assert(function, instruction, arguments) } else { Ok(true) } @@ -82,3 +87,35 @@ fn evaluate_assert_constant( Err(RuntimeError::AssertConstantFailed { call_stack }) } } + +/// Evaluate a call to `static_assert`, returning an error if the value is false +/// or not constant (see assert_constant). +/// +/// When it passes, Ok(false) is returned. This signifies a +/// success but also that the instruction need not be reinserted into the block being unrolled +/// since it has already been evaluated. +fn evaluate_static_assert( + function: &Function, + instruction: InstructionId, + arguments: &[ValueId], +) -> Result { + if arguments.len() != 2 { + panic!("ICE: static_assert called with wrong number of arguments") + } + + if !function.dfg.is_constant(arguments[1]) { + let call_stack = function.dfg.get_call_stack(instruction); + return Err(RuntimeError::StaticAssertDynamicMessage { call_stack }); + } + + if function.dfg.is_constant_true(arguments[0]) { + Ok(false) + } else { + let call_stack = function.dfg.get_call_stack(instruction); + if function.dfg.is_constant(arguments[0]) { + Err(RuntimeError::StaticAssertFailed { call_stack }) + } else { + Err(RuntimeError::StaticAssertDynamicPredicate { call_stack }) + } + } +} diff --git a/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg.rs b/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg.rs index 58f70ba9192..c7ce3aaa155 100644 --- a/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg.rs +++ b/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg.rs @@ -1379,28 +1379,28 @@ mod test { fn should_not_merge_incorrectly_to_false() { // Regression test for #1792 // Tests that it does not simplify a true constraint an always-false constraint - // fn main f1 { - // b0(): - // v4 = call pedersen([Field 0], u32 0) - // v5 = array_get v4, index Field 0 - // v6 = cast v5 as u32 - // v8 = mod v6, u32 2 - // v9 = cast v8 as u1 - // v10 = allocate - // store Field 0 at v10 - // jmpif v9 then: b1, else: b2 - // b1(): - // v14 = add v5, Field 1 - // store v14 at v10 - // jmp b3() - // b3(): - // v12 = eq v9, u1 1 - // constrain v12 - // return - // b2(): - // store Field 0 at v10 - // jmp b3() - // } + // acir(inline) fn main f1 { + // b0(v0: [u8; 2]): + // v4 = call keccak256(v0, u8 2) + // v5 = array_get v4, index u8 0 + // v6 = cast v5 as u32 + // v8 = truncate v6 to 1 bits, max_bit_size: 32 + // v9 = cast v8 as u1 + // v10 = allocate + // store u8 0 at v10 + // jmpif v9 then: b2, else: b3 + // b2(): + // v12 = cast v5 as Field + // v13 = add v12, Field 1 + // store v13 at v10 + // jmp b4() + // b4(): + // constrain v9 == u1 1 + // return + // b3(): + // store u8 0 at v10 + // jmp b4() + // } let main_id = Id::test_new(1); let mut builder = FunctionBuilder::new("main".into(), main_id); @@ -1409,20 +1409,18 @@ mod test { let b2 = builder.insert_block(); let b3 = builder.insert_block(); - let element_type = Rc::new(vec![Type::field()]); - let array_type = Type::Array(element_type.clone(), 1); - - let zero = builder.field_constant(0_u128); - let zero_array = builder.array_constant(im::Vector::unit(zero), array_type); - let i_zero = builder.numeric_constant(0_u128, Type::unsigned(32)); - let pedersen = builder - .import_intrinsic_id(Intrinsic::BlackBox(acvm::acir::BlackBoxFunc::PedersenCommitment)); - let v4 = builder.insert_call( - pedersen, - vec![zero_array, i_zero], - vec![Type::Array(element_type, 2)], - )[0]; - let v5 = builder.insert_array_get(v4, zero, Type::field()); + let element_type = Rc::new(vec![Type::unsigned(8)]); + let array_type = Type::Array(element_type.clone(), 2); + let array = builder.add_parameter(array_type); + + let zero = builder.numeric_constant(0_u128, Type::unsigned(8)); + let two = builder.numeric_constant(2_u128, Type::unsigned(8)); + + let keccak = + builder.import_intrinsic_id(Intrinsic::BlackBox(acvm::acir::BlackBoxFunc::Keccak256)); + let v4 = + builder.insert_call(keccak, vec![array, two], vec![Type::Array(element_type, 32)])[0]; + let v5 = builder.insert_array_get(v4, zero, Type::unsigned(8)); let v6 = builder.insert_cast(v5, Type::unsigned(32)); let i_two = builder.numeric_constant(2_u128, Type::unsigned(32)); let v8 = builder.insert_binary(v6, BinaryOp::Mod, i_two); @@ -1435,7 +1433,9 @@ mod test { builder.switch_to_block(b1); let one = builder.field_constant(1_u128); - let v14 = builder.insert_binary(v5, BinaryOp::Add, one); + let v5b = builder.insert_cast(v5, Type::field()); + let v13: Id = builder.insert_binary(v5b, BinaryOp::Add, one); + let v14 = builder.insert_cast(v13, Type::unsigned(8)); builder.insert_store(v10, v14); builder.terminate_with_jmp(b3, vec![]); @@ -1449,8 +1449,9 @@ mod test { builder.insert_constrain(v12, v_true, None); builder.terminate_with_return(vec![]); - let ssa = builder.finish().flatten_cfg(); - let main = ssa.main(); + let ssa = builder.finish(); + let flattened_ssa = ssa.flatten_cfg(); + let main = flattened_ssa.main(); // Now assert that there is not an always-false constraint after flattening: let mut constrain_count = 0; diff --git a/compiler/noirc_evaluator/src/ssa/opt/remove_enable_side_effects.rs b/compiler/noirc_evaluator/src/ssa/opt/remove_enable_side_effects.rs index 6db76996747..f9a3c9a55eb 100644 --- a/compiler/noirc_evaluator/src/ssa/opt/remove_enable_side_effects.rs +++ b/compiler/noirc_evaluator/src/ssa/opt/remove_enable_side_effects.rs @@ -150,6 +150,7 @@ impl Context { Intrinsic::ArrayLen | Intrinsic::AssertConstant + | Intrinsic::StaticAssert | Intrinsic::ApplyRangeConstraint | Intrinsic::StrAsBytes | Intrinsic::ToBits(_) @@ -159,7 +160,8 @@ impl Context { | Intrinsic::AsField | Intrinsic::AsSlice | Intrinsic::AsWitness - | Intrinsic::IsUnconstrained => false, + | Intrinsic::IsUnconstrained + | Intrinsic::DerivePedersenGenerators => false, }, // We must assume that functions contain a side effect as we cannot inspect more deeply. diff --git a/compiler/noirc_evaluator/src/ssa/opt/remove_if_else.rs b/compiler/noirc_evaluator/src/ssa/opt/remove_if_else.rs index 6ca7eb74e9d..242eea7d6f4 100644 --- a/compiler/noirc_evaluator/src/ssa/opt/remove_if_else.rs +++ b/compiler/noirc_evaluator/src/ssa/opt/remove_if_else.rs @@ -226,6 +226,7 @@ fn slice_capacity_change( // These cases don't affect slice capacities Intrinsic::AssertConstant + | Intrinsic::StaticAssert | Intrinsic::ApplyRangeConstraint | Intrinsic::ArrayLen | Intrinsic::StrAsBytes @@ -233,6 +234,7 @@ fn slice_capacity_change( | Intrinsic::FromField | Intrinsic::AsField | Intrinsic::AsWitness - | Intrinsic::IsUnconstrained => SizeChange::None, + | Intrinsic::IsUnconstrained + | Intrinsic::DerivePedersenGenerators => SizeChange::None, } } diff --git a/compiler/noirc_frontend/src/ast/expression.rs b/compiler/noirc_frontend/src/ast/expression.rs index 075df4ee4e3..87cc7990753 100644 --- a/compiler/noirc_frontend/src/ast/expression.rs +++ b/compiler/noirc_frontend/src/ast/expression.rs @@ -5,9 +5,11 @@ use crate::ast::{ Ident, ItemVisibility, Path, Pattern, Recoverable, Statement, StatementKind, UnresolvedTraitConstraint, UnresolvedType, UnresolvedTypeData, Visibility, }; +use crate::hir::def_collector::errors::DefCollectorErrorKind; use crate::macros_api::StructId; use crate::node_interner::ExprId; -use crate::token::{Attributes, Token}; +use crate::token::{Attributes, Token, Tokens}; +use crate::{Kind, Type}; use acvm::{acir::AcirField, FieldElement}; use iter_extended::vecmap; use noirc_errors::{Span, Spanned}; @@ -33,18 +35,10 @@ pub enum ExpressionKind { Tuple(Vec), Lambda(Box), Parenthesized(Box), - Quote(BlockExpression, Span), + Quote(Tokens), Unquote(Box), Comptime(BlockExpression, Span), - /// Unquote expressions are replaced with UnquoteMarkers when Quoted - /// expressions are resolved. Since the expression being quoted remains an - /// ExpressionKind (rather than a resolved ExprId), the UnquoteMarker must be - /// here in the AST even though it is technically HIR-only. - /// Each usize in an UnquoteMarker is an index which corresponds to the index of the - /// expression in the Hir::Quote expression list to replace it with. - UnquoteMarker(usize), - // This variant is only emitted when inlining the result of comptime // code. It is used to translate function values back into the AST while // guaranteeing they have the same instantiated type and definition id without resolving again. @@ -54,7 +48,72 @@ pub enum ExpressionKind { /// A Vec of unresolved names for type variables. /// For `fn foo(...)` this corresponds to vec!["A", "B"]. -pub type UnresolvedGenerics = Vec; +pub type UnresolvedGenerics = Vec; + +#[derive(Debug, PartialEq, Eq, Clone, Hash)] +pub enum UnresolvedGeneric { + Variable(Ident), + Numeric { ident: Ident, typ: UnresolvedType }, +} + +impl UnresolvedGeneric { + pub fn span(&self) -> Span { + match self { + UnresolvedGeneric::Variable(ident) => ident.0.span(), + UnresolvedGeneric::Numeric { ident, typ } => { + ident.0.span().merge(typ.span.unwrap_or_default()) + } + } + } + + pub fn kind(&self) -> Result { + match self { + UnresolvedGeneric::Variable(_) => Ok(Kind::Normal), + UnresolvedGeneric::Numeric { typ, .. } => { + let typ = self.resolve_numeric_kind_type(typ)?; + Ok(Kind::Numeric(Box::new(typ))) + } + } + } + + fn resolve_numeric_kind_type( + &self, + typ: &UnresolvedType, + ) -> Result { + use crate::ast::UnresolvedTypeData::{FieldElement, Integer}; + + match typ.typ { + FieldElement => Ok(Type::FieldElement), + Integer(sign, bits) => Ok(Type::Integer(sign, bits)), + // Only fields and integers are supported for numeric kinds + _ => Err(DefCollectorErrorKind::UnsupportedNumericGenericType { + ident: self.ident().clone(), + typ: typ.typ.clone(), + }), + } + } + + pub(crate) fn ident(&self) -> &Ident { + match self { + UnresolvedGeneric::Variable(ident) | UnresolvedGeneric::Numeric { ident, .. } => ident, + } + } +} + +impl Display for UnresolvedGeneric { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + UnresolvedGeneric::Variable(ident) => write!(f, "{ident}"), + UnresolvedGeneric::Numeric { ident, typ } => write!(f, "let {ident}: {typ}"), + } + } +} + +impl From for UnresolvedGeneric { + fn from(value: Ident) -> Self { + UnresolvedGeneric::Variable(value) + } +} impl ExpressionKind { pub fn into_path(self) -> Option { @@ -130,23 +189,6 @@ impl ExpressionKind { struct_type: None, })) } - - /// Returns true if the expression is a literal integer - pub fn is_integer(&self) -> bool { - self.as_integer().is_some() - } - - fn as_integer(&self) -> Option { - let literal = match self { - ExpressionKind::Literal(literal) => literal, - _ => return None, - }; - - match literal { - Literal::Integer(integer, _) => Some(*integer), - _ => None, - } - } } impl Recoverable for ExpressionKind { @@ -557,12 +599,14 @@ impl Display for ExpressionKind { } Lambda(lambda) => lambda.fmt(f), Parenthesized(sub_expr) => write!(f, "({sub_expr})"), - Quote(block, _) => write!(f, "quote {block}"), Comptime(block, _) => write!(f, "comptime {block}"), Error => write!(f, "Error"), Resolved(_) => write!(f, "?Resolved"), Unquote(expr) => write!(f, "$({expr})"), - UnquoteMarker(index) => write!(f, "${index}"), + Quote(tokens) => { + let tokens = vecmap(&tokens.0, ToString::to_string); + write!(f, "quote {{ {} }}", tokens.join(" ")) + } } } } @@ -763,22 +807,32 @@ impl Display for FunctionDefinition { writeln!(f, "{:?}", self.attributes)?; let parameters = vecmap(&self.parameters, |Param { visibility, pattern, typ, span: _ }| { - format!("{pattern}: {visibility} {typ}") + if *visibility == Visibility::Public { + format!("{pattern}: {visibility} {typ}") + } else { + format!("{pattern}: {typ}") + } }); let where_clause = vecmap(&self.where_clause, ToString::to_string); let where_clause_str = if !where_clause.is_empty() { - format!("where {}", where_clause.join(", ")) + format!(" where {}", where_clause.join(", ")) } else { "".to_string() }; + let return_type = if matches!(&self.return_type, FunctionReturnType::Default(_)) { + String::new() + } else { + format!(" -> {}", self.return_type) + }; + write!( f, - "fn {}({}) -> {} {} {}", + "fn {}({}){}{} {}", self.name, parameters.join(", "), - self.return_type, + return_type, where_clause_str, self.body ) diff --git a/compiler/noirc_frontend/src/ast/mod.rs b/compiler/noirc_frontend/src/ast/mod.rs index a1ae349b537..dfe4258744a 100644 --- a/compiler/noirc_frontend/src/ast/mod.rs +++ b/compiler/noirc_frontend/src/ast/mod.rs @@ -22,6 +22,7 @@ pub use traits::*; pub use type_alias::*; use crate::{ + node_interner::QuotedTypeId, parser::{ParserError, ParserErrorReason}, token::IntType, BinaryTypeOperator, @@ -119,6 +120,10 @@ pub enum UnresolvedTypeData { // The type of quoted code for metaprogramming Quoted(crate::QuotedType), + /// An already resolved type. These can only be parsed if they were present in the token stream + /// as a result of being spliced into a macro's token stream input. + Resolved(QuotedTypeId), + Unspecified, // This is for when the user declares a variable without specifying it's type Error, } @@ -146,7 +151,7 @@ pub struct UnaryRhsMethodCall { } /// The precursor to TypeExpression, this is the type that the parser allows -/// to be used in the length position of an array type. Only constants, variables, +/// to be used in the length position of an array type. Only constant integers, variables, /// and numeric binary operators are allowed here. #[derive(Debug, PartialEq, Eq, Clone, Hash)] pub enum UnresolvedTypeExpression { @@ -221,6 +226,7 @@ impl std::fmt::Display for UnresolvedTypeData { Error => write!(f, "error"), Unspecified => write!(f, "unspecified"), Parenthesized(typ) => write!(f, "({typ})"), + Resolved(_) => write!(f, "(resolved type)"), } } } @@ -259,6 +265,10 @@ impl UnresolvedType { pub fn unspecified() -> UnresolvedType { UnresolvedType { typ: UnresolvedTypeData::Unspecified, span: None } } + + pub(crate) fn is_type_expression(&self) -> bool { + matches!(&self.typ, UnresolvedTypeData::Expression(_)) + } } impl UnresolvedTypeData { diff --git a/compiler/noirc_frontend/src/ast/structure.rs b/compiler/noirc_frontend/src/ast/structure.rs index bda6b8c0b11..bb2d89841b9 100644 --- a/compiler/noirc_frontend/src/ast/structure.rs +++ b/compiler/noirc_frontend/src/ast/structure.rs @@ -20,7 +20,7 @@ impl NoirStruct { pub fn new( name: Ident, attributes: Vec, - generics: Vec, + generics: UnresolvedGenerics, fields: Vec<(Ident, UnresolvedType)>, span: Span, ) -> NoirStruct { diff --git a/compiler/noirc_frontend/src/ast/traits.rs b/compiler/noirc_frontend/src/ast/traits.rs index 772675723b5..b1b14e3f657 100644 --- a/compiler/noirc_frontend/src/ast/traits.rs +++ b/compiler/noirc_frontend/src/ast/traits.rs @@ -14,7 +14,7 @@ use crate::node_interner::TraitId; #[derive(Clone, Debug)] pub struct NoirTrait { pub name: Ident, - pub generics: Vec, + pub generics: UnresolvedGenerics, pub where_clause: Vec, pub span: Span, pub items: Vec, @@ -26,7 +26,7 @@ pub struct NoirTrait { pub enum TraitItem { Function { name: Ident, - generics: Vec, + generics: UnresolvedGenerics, parameters: Vec<(Ident, UnresolvedType)>, return_type: FunctionReturnType, where_clause: Vec, @@ -49,6 +49,7 @@ pub struct TypeImpl { pub object_type: UnresolvedType, pub type_span: Span, pub generics: UnresolvedGenerics, + pub where_clause: Vec, pub methods: Vec<(NoirFunction, Span)>, } diff --git a/compiler/noirc_frontend/src/elaborator/expressions.rs b/compiler/noirc_frontend/src/elaborator/expressions.rs index d6ad752b67a..7d304990dd8 100644 --- a/compiler/noirc_frontend/src/elaborator/expressions.rs +++ b/compiler/noirc_frontend/src/elaborator/expressions.rs @@ -18,7 +18,7 @@ use crate::{ HirArrayLiteral, HirBinaryOp, HirBlockExpression, HirCallExpression, HirCastExpression, HirConstructorExpression, HirIfExpression, HirIndexExpression, HirInfixExpression, HirLambda, HirMemberAccess, HirMethodCallExpression, HirMethodReference, - HirPrefixExpression, HirQuoted, + HirPrefixExpression, }, traits::TraitConstraint, }, @@ -28,7 +28,8 @@ use crate::{ MethodCallExpression, PrefixExpression, }, node_interner::{DefinitionKind, ExprId, FuncId}, - QuotedType, Shared, StructType, Type, + token::Tokens, + Kind, QuotedType, Shared, StructType, Type, }; use super::Elaborator; @@ -51,14 +52,27 @@ impl<'context> Elaborator<'context> { ExpressionKind::If(if_) => self.elaborate_if(*if_), ExpressionKind::Variable(variable, generics) => { let generics = generics.map(|option_inner| { - option_inner.into_iter().map(|generic| self.resolve_type(generic)).collect() + option_inner + .into_iter() + .map(|generic| { + // All type expressions should resolve to a `Type::Constant` + if generic.is_type_expression() { + self.resolve_type_inner( + generic, + &Kind::Numeric(Box::new(Type::default_int_type())), + ) + } else { + self.resolve_type(generic) + } + }) + .collect() }); return self.elaborate_variable(variable, generics); } ExpressionKind::Tuple(tuple) => self.elaborate_tuple(tuple), ExpressionKind::Lambda(lambda) => self.elaborate_lambda(*lambda), ExpressionKind::Parenthesized(expr) => return self.elaborate_expression(*expr), - ExpressionKind::Quote(quote, _) => self.elaborate_quote(quote), + ExpressionKind::Quote(quote) => self.elaborate_quote(quote), ExpressionKind::Comptime(comptime, _) => { return self.elaborate_comptime_block(comptime, expr.span) } @@ -68,9 +82,6 @@ impl<'context> Elaborator<'context> { self.push_err(ResolverError::UnquoteUsedOutsideQuote { span: expr.span }); (HirExpression::Error, Type::Error) } - ExpressionKind::UnquoteMarker(index) => { - unreachable!("UnquoteMarker({index}) remaining in runtime code") - } }; let id = self.interner.push_expr(hir_expr); self.interner.push_expr_location(id, expr.span, self.file); @@ -646,16 +657,15 @@ impl<'context> Elaborator<'context> { (expr, Type::Function(arg_types, Box::new(body_type), Box::new(env_type))) } - fn elaborate_quote(&mut self, mut block: BlockExpression) -> (HirExpression, Type) { - let mut unquoted_exprs = Vec::new(); - self.find_unquoted_exprs_in_block(&mut block, &mut unquoted_exprs); - let quoted = HirQuoted { quoted_block: block, unquoted_exprs }; - (HirExpression::Quote(quoted), Type::Quoted(QuotedType::Expr)) + fn elaborate_quote(&mut self, mut tokens: Tokens) -> (HirExpression, Type) { + tokens = self.find_unquoted_exprs_tokens(tokens); + (HirExpression::Quote(tokens), Type::Quoted(QuotedType::Quoted)) } fn elaborate_comptime_block(&mut self, block: BlockExpression, span: Span) -> (ExprId, Type) { let (block, _typ) = self.elaborate_block_expression(block); - let mut interpreter = Interpreter::new(self.interner, &mut self.comptime_scopes); + let mut interpreter = + Interpreter::new(self.interner, &mut self.comptime_scopes, self.crate_id); let value = interpreter.evaluate_block(block); self.inline_comptime_value(value, span) } @@ -716,7 +726,7 @@ impl<'context> Elaborator<'context> { location: Location, return_type: Type, ) -> Option<(HirExpression, Type)> { - self.unify(&return_type, &Type::Quoted(QuotedType::Expr), || { + self.unify(&return_type, &Type::Quoted(QuotedType::Quoted), || { TypeCheckError::MacroReturningNonExpr { typ: return_type.clone(), span: location.span } }); @@ -728,7 +738,8 @@ impl<'context> Elaborator<'context> { } }; - let mut interpreter = Interpreter::new(self.interner, &mut self.comptime_scopes); + let mut interpreter = + Interpreter::new(self.interner, &mut self.comptime_scopes, self.crate_id); let mut comptime_args = Vec::new(); let mut errors = Vec::new(); @@ -748,7 +759,8 @@ impl<'context> Elaborator<'context> { return None; } - let result = interpreter.call_function(function, comptime_args, location); + let bindings = interpreter.interner.get_instantiation_bindings(func).clone(); + let result = interpreter.call_function(function, comptime_args, bindings, location); let (expr_id, typ) = self.inline_comptime_value(result, location.span); Some((self.interner.expression(&expr_id), typ)) } diff --git a/compiler/noirc_frontend/src/elaborator/lints.rs b/compiler/noirc_frontend/src/elaborator/lints.rs index b86912940eb..af6f4cdb42f 100644 --- a/compiler/noirc_frontend/src/elaborator/lints.rs +++ b/compiler/noirc_frontend/src/elaborator/lints.rs @@ -137,7 +137,7 @@ pub(super) fn unconstrained_function_args( function_args .iter() .filter_map(|(typ, _, span)| { - if type_contains_mutable_reference(typ) { + if !typ.is_valid_for_unconstrained_boundary() { Some(TypeCheckError::ConstrainedReferenceToUnconstrained { span: *span }) } else { None @@ -153,17 +153,13 @@ pub(super) fn unconstrained_function_return( ) -> Option { if return_type.contains_slice() { Some(TypeCheckError::UnconstrainedSliceReturnToConstrained { span }) - } else if type_contains_mutable_reference(return_type) { + } else if !return_type.is_valid_for_unconstrained_boundary() { Some(TypeCheckError::UnconstrainedReferenceToConstrained { span }) } else { None } } -fn type_contains_mutable_reference(typ: &Type) -> bool { - matches!(&typ.follow_bindings(), Type::MutableReference(_)) -} - /// Only entrypoint functions require a `pub` visibility modifier applied to their return types. /// /// Application of `pub` to other functions is not meaningful and is a mistake. diff --git a/compiler/noirc_frontend/src/elaborator/mod.rs b/compiler/noirc_frontend/src/elaborator/mod.rs index 91d6ba71927..ae8237706cc 100644 --- a/compiler/noirc_frontend/src/elaborator/mod.rs +++ b/compiler/noirc_frontend/src/elaborator/mod.rs @@ -6,12 +6,13 @@ use std::{ use crate::{ ast::{FunctionKind, UnresolvedTraitConstraint}, hir::{ - comptime::{self, Interpreter, Value}, + comptime::{self, Interpreter, InterpreterError, Value}, def_collector::{ dc_crate::{ filter_literal_globals, CompilationError, ImplMap, UnresolvedGlobal, UnresolvedStruct, UnresolvedTypeAlias, }, + dc_mod, errors::DuplicateType, }, resolution::{errors::ResolverError, path_resolver::PathResolver, resolver::LambdaContext}, @@ -22,6 +23,7 @@ use crate::{ expr::HirIdent, function::{FunctionBody, Parameters}, traits::TraitConstraint, + types::{Generics, Kind, ResolvedGeneric}, }, macros_api::{ BlockExpression, Ident, NodeInterner, NoirFunction, NoirStruct, Pattern, @@ -30,10 +32,11 @@ use crate::{ node_interner::{ DefinitionId, DefinitionKind, DependencyId, ExprId, FuncId, GlobalId, TraitId, TypeAliasId, }, - Shared, Type, TypeVariable, + parser::TopLevelStatement, + Shared, Type, TypeBindings, TypeVariable, }; use crate::{ - ast::{TraitBound, UnresolvedGenerics}, + ast::{TraitBound, UnresolvedGeneric, UnresolvedGenerics}, graph::CrateId, hir::{ def_collector::{dc_crate::CollectedItems, errors::DefCollectorErrorKind}, @@ -44,7 +47,6 @@ use crate::{ hir_def::function::{FuncMeta, HirFunction}, macros_api::{Param, Path, UnresolvedType, UnresolvedTypeData}, node_interner::TraitImplId, - Generics, }; use crate::{ hir::{ @@ -90,22 +92,13 @@ pub struct Elaborator<'context> { file: FileId, - in_unconstrained_fn: bool, nested_loops: usize, - /// True if the current module is a contract. - /// This is usually determined by self.path_resolver.module_id(), but it can - /// be overridden for impls. Impls are an odd case since the methods within resolve - /// as if they're in the parent module, but should be placed in a child module. - /// Since they should be within a child module, in_contract is manually set to false - /// for these so we can still resolve them in the parent module without them being in a contract. - in_contract: bool, - /// Contains a mapping of the current struct or functions's generics to /// unique type variables if we're resolving a struct. Empty otherwise. /// This is a Vec rather than a map to preserve the order a functions generics /// were declared in. - generics: Vec<(Rc, TypeVariable, Span)>, + generics: Vec, /// When resolving lambda expressions, we need to keep track of the variables /// that are captured. We do this in order to create the hidden environment @@ -181,9 +174,7 @@ impl<'context> Elaborator<'context> { interner: &mut context.def_interner, def_maps: &mut context.def_maps, file: FileId::dummy(), - in_unconstrained_fn: false, nested_loops: 0, - in_contract: false, generics: Vec::new(), lambda_stack: Vec::new(), self_type: None, @@ -239,11 +230,12 @@ impl<'context> Elaborator<'context> { self.define_type_alias(alias_id, alias); } + // Must resolve structs before we resolve globals. + let generated_items = self.collect_struct_definitions(items.types); + self.define_function_metas(&mut items.functions, &mut items.impls, &mut items.trait_impls); - self.collect_traits(items.traits); - // Must resolve structs before we resolve globals. - self.collect_struct_definitions(items.types); + self.collect_traits(items.traits); // Before we resolve any function symbols we must go through our impls and // re-collect the methods within into their proper module. This cannot be @@ -265,6 +257,16 @@ impl<'context> Elaborator<'context> { self.elaborate_global(global); } + // After everything is collected, we can elaborate our generated items. + // It may be better to inline these within `items` entirely since elaborating them + // all here means any globals will not see these. Inlining them completely within `items` + // means we must be more careful about missing any additional items that need to be already + // elaborated. E.g. if a new struct is created, we've already passed the code path to + // elaborate them. + if !generated_items.is_empty() { + self.elaborate_items(generated_items); + } + for functions in items.functions { self.elaborate_functions(functions); } @@ -318,12 +320,6 @@ impl<'context> Elaborator<'context> { let old_function = std::mem::replace(&mut self.current_function, Some(id)); - // Without this, impl methods can accidentally be placed in contracts. See #3254 - let was_in_contract = self.in_contract; - if self.self_type.is_some() { - self.in_contract = false; - } - self.scopes.start_function(); let old_item = std::mem::replace(&mut self.current_item, Some(DependencyId::Function(id))); @@ -331,18 +327,26 @@ impl<'context> Elaborator<'context> { self.trait_bounds = func_meta.trait_constraints.clone(); - if self.interner.function_modifiers(&id).is_unconstrained { - self.in_unconstrained_fn = true; + // Introduce all numeric generics into scope + for generic in &func_meta.all_generics { + if let Kind::Numeric(typ) = &generic.kind { + let definition = DefinitionKind::GenericType(generic.type_var.clone()); + let ident = Ident::new(generic.name.to_string(), generic.span); + let hir_ident = + self.add_variable_decl_inner(ident, false, false, false, definition); + self.interner.push_definition_type(hir_ident.id, *typ.clone()); + } } // The DefinitionIds for each parameter were already created in define_function_meta // so we need to reintroduce the same IDs into scope here. for parameter in &func_meta.parameter_idents { let name = self.interner.definition_name(parameter.id).to_owned(); - self.add_existing_variable_to_scope(name, parameter.clone()); + self.add_existing_variable_to_scope(name, parameter.clone(), true); } self.generics = func_meta.all_generics.clone(); + self.declare_numeric_generics(&func_meta.parameters, func_meta.return_type()); self.add_trait_constraints_to_scope(&func_meta); @@ -414,9 +418,7 @@ impl<'context> Elaborator<'context> { self.trait_bounds.clear(); self.type_variables.clear(); - self.in_unconstrained_fn = false; self.interner.update_fn(id, hir_func); - self.in_contract = was_in_contract; self.current_function = old_function; self.current_item = old_item; } @@ -439,7 +441,7 @@ impl<'context> Elaborator<'context> { generics.push(new_generic.clone()); let name = format!("impl {trait_path}"); - let generic_type = Type::NamedGeneric(new_generic, Rc::new(name)); + let generic_type = Type::NamedGeneric(new_generic, Rc::new(name), Kind::Normal); let trait_bound = TraitBound { trait_path, trait_id: None, trait_generics }; if let Some(new_constraint) = self.resolve_trait_bound(&trait_bound, generic_type.clone()) { @@ -456,25 +458,56 @@ impl<'context> Elaborator<'context> { // Map the generic to a fresh type variable let id = self.interner.next_type_variable_id(); let typevar = TypeVariable::unbound(id); - let span = generic.0.span(); + let ident = generic.ident(); + let span = ident.0.span(); + + // Resolve the generic's kind + let kind = self.resolve_generic_kind(generic); // Check for name collisions of this generic - let name = Rc::new(generic.0.contents.clone()); + let name = Rc::new(ident.0.contents.clone()); + + let resolved_generic = + ResolvedGeneric { name: name.clone(), type_var: typevar.clone(), kind, span }; - if let Some((_, _, first_span)) = self.find_generic(&name) { + if let Some(generic) = self.find_generic(&name) { self.push_err(ResolverError::DuplicateDefinition { - name: generic.0.contents.clone(), - first_span: *first_span, + name: ident.0.contents.clone(), + first_span: generic.span, second_span: span, }); } else { - self.generics.push((name, typevar.clone(), span)); + self.generics.push(resolved_generic.clone()); } - typevar + resolved_generic }) } + /// Return the kind of an unresolved generic. + /// If a numeric generic has been specified, resolve the annotated type to make + /// sure only primitive numeric types are being used. + pub(super) fn resolve_generic_kind(&mut self, generic: &UnresolvedGeneric) -> Kind { + if let UnresolvedGeneric::Numeric { ident, typ } = generic { + let typ = typ.clone(); + let typ = if typ.is_type_expression() { + self.resolve_type_inner(typ, &Kind::Numeric(Box::new(Type::default_int_type()))) + } else { + self.resolve_type(typ.clone()) + }; + if !matches!(typ, Type::FieldElement | Type::Integer(_, _)) { + let unsupported_typ_err = ResolverError::UnsupportedNumericGenericType { + ident: ident.clone(), + typ: typ.clone(), + }; + self.push_err(unsupported_typ_err); + } + Kind::Numeric(Box::new(typ)) + } else { + Kind::Normal + } + } + fn push_err(&mut self, error: impl Into) { self.errors.push((error.into(), self.file)); } @@ -523,12 +556,20 @@ impl<'context> Elaborator<'context> { } fn resolve_trait_bound(&mut self, bound: &TraitBound, typ: Type) -> Option { - let trait_generics = vecmap(&bound.trait_generics, |typ| self.resolve_type(typ.clone())); + let the_trait = self.lookup_trait_or_error(bound.trait_path.clone())?; + + let resolved_generics = &the_trait.generics.clone(); + assert_eq!(resolved_generics.len(), bound.trait_generics.len()); + let generics_with_types = resolved_generics.iter().zip(&bound.trait_generics); + let trait_generics = vecmap(generics_with_types, |(generic, typ)| { + self.resolve_type_inner(typ.clone(), &generic.kind) + }); - let span = bound.trait_path.span(); let the_trait = self.lookup_trait_or_error(bound.trait_path.clone())?; let trait_id = the_trait.id; + let span = bound.trait_path.span(); + let expected_generics = the_trait.generics.len(); let actual_generics = trait_generics.len(); @@ -557,11 +598,13 @@ impl<'context> Elaborator<'context> { ) { self.current_function = Some(func_id); - // Without this, impl methods can accidentally be placed in contracts. See #3254 - let was_in_contract = self.in_contract; - if self.self_type.is_some() { - self.in_contract = false; - } + let in_contract = if self.self_type.is_some() { + // Without this, impl methods can accidentally be placed in contracts. + // See: https://github.com/noir-lang/noir/issues/3254 + false + } else { + self.in_contract() + }; self.scopes.start_function(); self.current_item = Some(DependencyId::Function(func_id)); @@ -570,12 +613,13 @@ impl<'context> Elaborator<'context> { let id = self.interner.function_definition_id(func_id); let name_ident = HirIdent::non_trait_method(id, location); - let is_entry_point = self.is_entry_point_function(func); + let is_entry_point = self.is_entry_point_function(func, in_contract); self.run_lint(|_| lints::inlining_attributes(func).map(Into::into)); self.run_lint(|_| lints::missing_pub(func, is_entry_point).map(Into::into)); self.run_lint(|elaborator| { - lints::unnecessary_pub_return(func, elaborator.pub_allowed(func)).map(Into::into) + lints::unnecessary_pub_return(func, elaborator.pub_allowed(func, in_contract)) + .map(Into::into) }); self.run_lint(|_| lints::oracle_not_marked_unconstrained(func).map(Into::into)); self.run_lint(|elaborator| { @@ -591,12 +635,12 @@ impl<'context> Elaborator<'context> { let has_no_predicates_attribute = func.attributes().is_no_predicates(); let should_fold = func.attributes().is_foldable(); let has_inline_attribute = has_no_predicates_attribute || should_fold; - let is_pub_allowed = self.pub_allowed(func); + let is_pub_allowed = self.pub_allowed(func, in_contract); self.add_generics(&func.def.generics); let mut trait_constraints = self.resolve_trait_constraints(&func.def.where_clause); - let mut generics = vecmap(&self.generics, |(_, typevar, _)| typevar.clone()); + let mut generics = vecmap(&self.generics, |generic| generic.type_var.clone()); let mut parameters = Vec::new(); let mut parameter_types = Vec::new(); let mut parameter_idents = Vec::new(); @@ -612,7 +656,7 @@ impl<'context> Elaborator<'context> { UnresolvedTypeData::TraitAsType(path, args) => { self.desugar_impl_trait_arg(path, args, &mut generics, &mut trait_constraints) } - _ => self.resolve_type_inner(typ), + _ => self.resolve_type_inner(typ, &Kind::Normal), }; self.check_if_type_is_valid_for_program_input( @@ -621,6 +665,7 @@ impl<'context> Elaborator<'context> { has_inline_attribute, type_span, ); + let pattern = self.elaborate_pattern_and_store_ids( pattern, typ.clone(), @@ -645,8 +690,8 @@ impl<'context> Elaborator<'context> { let direct_generics = func.def.generics.iter(); let direct_generics = direct_generics - .filter_map(|generic| self.find_generic(&generic.0.contents)) - .map(|(name, typevar, _span)| (name.clone(), typevar.clone())) + .filter_map(|generic| self.find_generic(&generic.ident().0.contents)) + .map(|ResolvedGeneric { name, type_var, .. }| (name.clone(), type_var.clone())) .collect(); let statements = std::mem::take(&mut func.def.body.statements); @@ -669,11 +714,11 @@ impl<'context> Elaborator<'context> { is_entry_point, is_trait_function, has_inline_attribute, + source_crate: self.crate_id, function_body: FunctionBody::Unresolved(func.kind, body, func.def.span), }; self.interner.push_fn_meta(meta, func_id); - self.in_contract = was_in_contract; self.current_function = None; self.scopes.end_function(); self.current_item = None; @@ -698,18 +743,30 @@ impl<'context> Elaborator<'context> { /// True if the `pub` keyword is allowed on parameters in this function /// `pub` on function parameters is only allowed for entry point functions - fn pub_allowed(&self, func: &NoirFunction) -> bool { - self.is_entry_point_function(func) || func.attributes().is_foldable() + fn pub_allowed(&self, func: &NoirFunction, in_contract: bool) -> bool { + self.is_entry_point_function(func, in_contract) || func.attributes().is_foldable() + } + + /// Returns `true` if the current module is a contract. + /// + /// This is usually determined by `self.module_id()`, but it can + /// be overridden for impls. Impls are an odd case since the methods within resolve + /// as if they're in the parent module, but should be placed in a child module. + /// Since they should be within a child module, they should be elaborated as if + /// `in_contract` is `false` so we can still resolve them in the parent module without them being in a contract. + fn in_contract(&self) -> bool { + self.module_id().module(self.def_maps).is_contract } - fn is_entry_point_function(&self, func: &NoirFunction) -> bool { - if self.in_contract { + fn is_entry_point_function(&self, func: &NoirFunction, in_contract: bool) -> bool { + if in_contract { func.attributes().is_contract_entry_point() } else { func.name() == MAIN_FUNCTION } } + // TODO(https://github.com/noir-lang/noir/issues/5156): Remove implicit numeric generics fn declare_numeric_generics(&mut self, params: &Parameters, return_type: &Type) { if self.generics.is_empty() { return; @@ -722,12 +779,27 @@ impl<'context> Elaborator<'context> { // We can fail to find the generic in self.generics if it is an implicit one created // by the compiler. This can happen when, e.g. eliding array lengths using the slice // syntax [T]. - if let Some((name, _, span)) = - self.generics.iter().find(|(name, _, _)| name.as_ref() == &name_to_find) + if let Some(ResolvedGeneric { name, span, kind, .. }) = + self.generics.iter_mut().find(|generic| generic.name.as_ref() == &name_to_find) { + let scope = self.scopes.get_mut_scope(); + let value = scope.find(&name_to_find); + if value.is_some() { + // With the addition of explicit numeric generics we do not want to introduce numeric generics in this manner + // However, this is going to be a big breaking change so for now we simply issue a warning while users have time + // to transition to the new syntax + // e.g. this code would break with a duplicate definition error: + // ``` + // fn foo(arr: [Field; N]) { } + // ``` + continue; + } + *kind = Kind::Numeric(Box::new(Type::default_int_type())); let ident = Ident::new(name.to_string(), *span); let definition = DefinitionKind::GenericType(type_variable); - self.add_variable_decl_inner(ident, false, false, false, definition); + self.add_variable_decl_inner(ident.clone(), false, false, false, definition); + + self.push_err(ResolverError::UseExplicitNumericGeneric { ident }); } } } @@ -753,7 +825,7 @@ impl<'context> Elaborator<'context> { } } - fn elaborate_impls(&mut self, impls: Vec<(Vec, Span, UnresolvedFunctions)>) { + fn elaborate_impls(&mut self, impls: Vec<(UnresolvedGenerics, Span, UnresolvedFunctions)>) { for (_, _, functions) in impls { self.file = functions.file_id; self.recover_generics(|this| this.elaborate_functions(functions)); @@ -783,7 +855,7 @@ impl<'context> Elaborator<'context> { fn collect_impls( &mut self, module: LocalModuleId, - impls: &mut [(Vec, Span, UnresolvedFunctions)], + impls: &mut [(UnresolvedGenerics, Span, UnresolvedFunctions)], ) { self.local_module = module; @@ -800,7 +872,6 @@ impl<'context> Elaborator<'context> { self.local_module = trait_impl.module_id; self.file = trait_impl.file_id; self.current_trait_impl = trait_impl.impl_id; - trait_impl.trait_id = self.resolve_trait_by_path(trait_impl.trait_path.clone()); let self_type = trait_impl.methods.self_type.clone(); let self_type = @@ -844,7 +915,7 @@ impl<'context> Elaborator<'context> { methods, }); - let generics = vecmap(&self.generics, |(_, type_variable, _)| type_variable.clone()); + let generics = vecmap(&self.generics, |generic| generic.type_var.clone()); if let Err((prev_span, prev_file)) = self.interner.add_trait_implementation( self_type.clone(), @@ -868,6 +939,7 @@ impl<'context> Elaborator<'context> { } self.generics.clear(); + self.current_trait_impl = None; self.self_type = None; } @@ -1087,11 +1159,18 @@ impl<'context> Elaborator<'context> { self.generics.clear(); } - fn collect_struct_definitions(&mut self, structs: BTreeMap) { + fn collect_struct_definitions( + &mut self, + structs: BTreeMap, + ) -> CollectedItems { // This is necessary to avoid cloning the entire struct map // when adding checks after each struct field is resolved. let struct_ids = structs.keys().copied().collect::>(); + // This will contain any additional top-level items that are generated at compile-time + // via macros. This often includes derived trait impls. + let mut generated_items = CollectedItems::default(); + // Resolve each field in each struct. // Each struct should already be present in the NodeInterner after def collection. for (type_id, mut typ) in structs { @@ -1100,14 +1179,35 @@ impl<'context> Elaborator<'context> { let attributes = std::mem::take(&mut typ.struct_def.attributes); let span = typ.struct_def.span; - let (generics, fields) = self.resolve_struct_fields(typ.struct_def, type_id); + let fields = self.resolve_struct_fields(typ.struct_def, type_id); self.interner.update_struct(type_id, |struct_def| { struct_def.set_fields(fields); - struct_def.generics = generics; + + // TODO(https://github.com/noir-lang/noir/issues/5156): Remove this with implicit numeric generics + // This is only necessary for resolving named types when implicit numeric generics are used. + let mut found_names = Vec::new(); + struct_def.find_numeric_generics_in_fields(&mut found_names); + for generic in struct_def.generics.iter_mut() { + for found_generic in found_names.iter() { + if found_generic == generic.name.as_str() { + if matches!(generic.kind, Kind::Normal) { + let ident = Ident::new(generic.name.to_string(), generic.span); + self.errors.push(( + CompilationError::ResolverError( + ResolverError::UseExplicitNumericGeneric { ident }, + ), + self.file, + )); + generic.kind = Kind::Numeric(Box::new(Type::default_int_type())); + } + break; + } + } + } }); - self.run_comptime_attributes_on_struct(attributes, type_id, span); + self.run_comptime_attributes_on_struct(attributes, type_id, span, &mut generated_items); } // Check whether the struct fields have nested slices @@ -1129,6 +1229,8 @@ impl<'context> Elaborator<'context> { } } } + + generated_items } fn run_comptime_attributes_on_struct( @@ -1136,48 +1238,73 @@ impl<'context> Elaborator<'context> { attributes: Vec, struct_id: StructId, span: Span, + generated_items: &mut CollectedItems, ) { for attribute in attributes { if let SecondaryAttribute::Custom(name) = attribute { - match self.lookup_global(Path::from_single(name, span)) { - Ok(id) => { - let definition = self.interner.definition(id); - if let DefinitionKind::Function(function) = &definition.kind { - let function = *function; - let mut interpreter = - Interpreter::new(self.interner, &mut self.comptime_scopes); - - let location = Location::new(span, self.file); - let arguments = vec![(Value::TypeDefinition(struct_id), location)]; - let result = interpreter.call_function(function, arguments, location); - if let Err(error) = result { - self.errors.push(error.into_compilation_error_pair()); - } - } else { - self.push_err(ResolverError::NonFunctionInAnnotation { span }); - } - } - Err(_) => self.push_err(ResolverError::UnknownAnnotation { span }), + if let Err(error) = + self.run_comptime_attribute_on_struct(name, struct_id, span, generated_items) + { + self.errors.push(error); } } } } + fn run_comptime_attribute_on_struct( + &mut self, + attribute: String, + struct_id: StructId, + span: Span, + generated_items: &mut CollectedItems, + ) -> Result<(), (CompilationError, FileId)> { + let id = self + .lookup_global(Path::from_single(attribute, span)) + .map_err(|_| (ResolverError::UnknownAnnotation { span }.into(), self.file))?; + + let definition = self.interner.definition(id); + let DefinitionKind::Function(function) = definition.kind else { + return Err((ResolverError::NonFunctionInAnnotation { span }.into(), self.file)); + }; + let mut interpreter = + Interpreter::new(self.interner, &mut self.comptime_scopes, self.crate_id); + + let location = Location::new(span, self.file); + let arguments = vec![(Value::TypeDefinition(struct_id), location)]; + + let value = interpreter + .call_function(function, arguments, TypeBindings::new(), location) + .map_err(|error| error.into_compilation_error_pair())?; + + if value != Value::Unit { + let item = value + .into_top_level_item(location) + .map_err(|error| error.into_compilation_error_pair())?; + + self.add_item(item, generated_items, location); + } + + Ok(()) + } + pub fn resolve_struct_fields( &mut self, unresolved: NoirStruct, struct_id: StructId, - ) -> (Generics, Vec<(Ident, Type)>) { + ) -> Vec<(Ident, Type)> { self.recover_generics(|this| { - let generics = this.add_generics(&unresolved.generics); - this.current_item = Some(DependencyId::Struct(struct_id)); this.resolving_ids.insert(struct_id); + + let struct_def = this.interner.get_struct(struct_id); + this.add_existing_generics(&unresolved.generics, &struct_def.borrow().generics); + let fields = vecmap(unresolved.fields, |(ident, typ)| (ident, this.resolve_type(typ))); + this.resolving_ids.remove(&struct_id); - (generics, fields) + fields }) } @@ -1190,7 +1317,7 @@ impl<'context> Elaborator<'context> { self.current_item = Some(DependencyId::Global(global_id)); let let_stmt = global.stmt_def; - if !self.in_contract + if !self.in_contract() && let_stmt.attributes.iter().any(|attr| matches!(attr, SecondaryAttribute::Abi(_))) { let span = let_stmt.pattern.span(); @@ -1231,7 +1358,8 @@ impl<'context> Elaborator<'context> { let definition_id = global.definition_id; let location = global.location; - let mut interpreter = Interpreter::new(self.interner, &mut self.comptime_scopes); + let mut interpreter = + Interpreter::new(self.interner, &mut self.comptime_scopes, self.crate_id); if let Err(error) = interpreter.evaluate_let(let_statement) { self.errors.push(error.into_compilation_error_pair()); @@ -1273,12 +1401,29 @@ impl<'context> Elaborator<'context> { self.file = trait_impl.file_id; self.local_module = trait_impl.module_id; + trait_impl.trait_id = self.resolve_trait_by_path(trait_impl.trait_path.clone()); let unresolved_type = &trait_impl.object_type; + self.add_generics(&trait_impl.generics); trait_impl.resolved_generics = self.generics.clone(); - let trait_generics = - vecmap(&trait_impl.trait_generics, |generic| self.resolve_type(generic.clone())); + // Fetch trait constraints here + let trait_generics = if let Some(trait_id) = trait_impl.trait_id { + let trait_def = self.interner.get_trait(trait_id); + let resolved_generics = trait_def.generics.clone(); + assert_eq!(resolved_generics.len(), trait_impl.trait_generics.len()); + trait_impl + .trait_generics + .iter() + .enumerate() + .map(|(i, generic)| { + self.resolve_type_inner(generic.clone(), &resolved_generics[i].kind) + }) + .collect() + } else { + // We still resolve as to continue type checking + vecmap(&trait_impl.trait_generics, |generic| self.resolve_type(generic.clone())) + }; trait_impl.resolved_trait_generics = trait_generics; @@ -1355,4 +1500,81 @@ impl<'context> Elaborator<'context> { items.functions = function_sets; (comptime, items) } + + fn add_item( + &mut self, + item: TopLevelStatement, + generated_items: &mut CollectedItems, + location: Location, + ) { + match item { + TopLevelStatement::Function(function) => { + let id = self.interner.push_empty_fn(); + let module = self.module_id(); + self.interner.push_function(id, &function.def, module, location); + let functions = vec![(self.local_module, id, function)]; + generated_items.functions.push(UnresolvedFunctions { + file_id: self.file, + functions, + trait_id: None, + self_type: None, + }); + } + TopLevelStatement::TraitImpl(mut trait_impl) => { + let methods = dc_mod::collect_trait_impl_functions( + self.interner, + &mut trait_impl, + self.crate_id, + self.file, + self.local_module, + ); + + generated_items.trait_impls.push(UnresolvedTraitImpl { + file_id: self.file, + module_id: self.local_module, + trait_generics: trait_impl.trait_generics, + trait_path: trait_impl.trait_name, + object_type: trait_impl.object_type, + methods, + generics: trait_impl.impl_generics, + where_clause: trait_impl.where_clause, + + // These last fields are filled in later + trait_id: None, + impl_id: None, + resolved_object_type: None, + resolved_generics: Vec::new(), + resolved_trait_generics: Vec::new(), + }); + } + TopLevelStatement::Global(global) => { + let (global, error) = dc_mod::collect_global( + self.interner, + self.def_maps.get_mut(&self.crate_id).unwrap(), + global, + self.file, + self.local_module, + ); + + generated_items.globals.push(global); + if let Some(error) = error { + self.errors.push(error); + } + } + // Assume that an error has already been issued + TopLevelStatement::Error => (), + + TopLevelStatement::Module(_) + | TopLevelStatement::Import(_) + | TopLevelStatement::Struct(_) + | TopLevelStatement::Trait(_) + | TopLevelStatement::Impl(_) + | TopLevelStatement::TypeAlias(_) + | TopLevelStatement::SubModule(_) => { + let item = item.to_string(); + let error = InterpreterError::UnsupportedTopLevelItemUnquote { item, location }; + self.errors.push(error.into_compilation_error_pair()); + } + } + } } diff --git a/compiler/noirc_frontend/src/elaborator/patterns.rs b/compiler/noirc_frontend/src/elaborator/patterns.rs index e337726b579..4f04f5c523c 100644 --- a/compiler/noirc_frontend/src/elaborator/patterns.rs +++ b/compiler/noirc_frontend/src/elaborator/patterns.rs @@ -11,11 +11,10 @@ use crate::{ }, hir_def::{ expr::{HirIdent, ImplKind}, - function::FunctionBody, stmt::HirPattern, }, macros_api::{HirExpression, Ident, Path, Pattern}, - node_interner::{DefinitionId, DefinitionKind, DependencyId, ExprId, GlobalId, TraitImplKind}, + node_interner::{DefinitionId, DefinitionKind, ExprId, GlobalId, TraitImplKind}, Shared, StructType, Type, TypeBindings, }; @@ -302,9 +301,14 @@ impl<'context> Elaborator<'context> { ident } - pub fn add_existing_variable_to_scope(&mut self, name: String, ident: HirIdent) { + pub fn add_existing_variable_to_scope( + &mut self, + name: String, + ident: HirIdent, + warn_if_unused: bool, + ) { let second_span = ident.location.span; - let resolver_meta = ResolverMeta { num_times_used: 0, ident, warn_if_unused: true }; + let resolver_meta = ResolverMeta { num_times_used: 0, ident, warn_if_unused }; let old_value = self.scopes.get_mut_scope().add_key_value(name.clone(), resolver_meta); @@ -390,6 +394,7 @@ impl<'context> Elaborator<'context> { ) -> (ExprId, Type) { let span = variable.span; let expr = self.resolve_variable(variable); + let id = self.interner.push_expr(HirExpression::Ident(expr.clone(), generics.clone())); self.interner.push_expr_location(id, span, self.file); let typ = self.type_check_variable(expr, id, generics); @@ -415,16 +420,6 @@ impl<'context> Elaborator<'context> { match self.interner.definition(hir_ident.id).kind { DefinitionKind::Function(id) => { if let Some(current_item) = self.current_item { - // Lazily evaluate functions found within globals if necessary. - // Otherwise if we later attempt to evaluate the global it will - // see an empty function body. - if matches!(current_item, DependencyId::Global(_)) { - let meta = self.interner.function_meta(&id); - - if matches!(&meta.function_body, FunctionBody::Unresolved(..)) { - self.elaborate_function(id); - } - } self.interner.add_function_dependency(current_item, id); } } @@ -475,8 +470,8 @@ impl<'context> Elaborator<'context> { for (param, arg) in the_trait.generics.iter().zip(&constraint.trait_generics) { // Avoid binding t = t - if !arg.occurs(param.id()) { - bindings.insert(param.id(), (param.clone(), arg.clone())); + if !arg.occurs(param.type_var.id()) { + bindings.insert(param.type_var.id(), (param.type_var.clone(), arg.clone())); } } diff --git a/compiler/noirc_frontend/src/elaborator/statements.rs b/compiler/noirc_frontend/src/elaborator/statements.rs index dd3e2778726..0d67c9ed3e3 100644 --- a/compiler/noirc_frontend/src/elaborator/statements.rs +++ b/compiler/noirc_frontend/src/elaborator/statements.rs @@ -206,7 +206,10 @@ impl<'context> Elaborator<'context> { } fn elaborate_jump(&mut self, is_break: bool, span: noirc_errors::Span) -> (HirStatement, Type) { - if !self.in_unconstrained_fn { + let in_constrained_function = self + .current_function + .map_or(true, |func_id| !self.interner.function_modifiers(&func_id).is_unconstrained); + if in_constrained_function { self.push_err(ResolverError::JumpInConstrainedFn { is_break, span }); } if self.nested_loops == 0 { @@ -432,7 +435,8 @@ impl<'context> Elaborator<'context> { fn elaborate_comptime_statement(&mut self, statement: Statement) -> (HirStatement, Type) { let span = statement.span; let (hir_statement, _typ) = self.elaborate_statement(statement); - let mut interpreter = Interpreter::new(self.interner, &mut self.comptime_scopes); + let mut interpreter = + Interpreter::new(self.interner, &mut self.comptime_scopes, self.crate_id); let value = interpreter.evaluate_statement(hir_statement); let (expr, typ) = self.inline_comptime_value(value, span); (HirStatement::Expression(expr), typ) diff --git a/compiler/noirc_frontend/src/elaborator/traits.rs b/compiler/noirc_frontend/src/elaborator/traits.rs index 3e04dbc784a..77ac8e476f8 100644 --- a/compiler/noirc_frontend/src/elaborator/traits.rs +++ b/compiler/noirc_frontend/src/elaborator/traits.rs @@ -1,10 +1,12 @@ -use std::collections::BTreeMap; +use std::{collections::BTreeMap, rc::Rc}; use iter_extended::vecmap; use noirc_errors::Location; use crate::{ - ast::{FunctionKind, TraitItem, UnresolvedGenerics, UnresolvedTraitConstraint}, + ast::{ + FunctionKind, TraitItem, UnresolvedGeneric, UnresolvedGenerics, UnresolvedTraitConstraint, + }, hir::def_collector::dc_crate::UnresolvedTrait, hir_def::traits::{TraitConstant, TraitFunction, TraitType}, macros_api::{ @@ -13,7 +15,7 @@ use crate::{ }, node_interner::{FuncId, TraitId}, token::Attributes, - Type, TypeVariableKind, + Kind, ResolvedGeneric, Type, TypeVariableKind, }; use super::Elaborator; @@ -22,7 +24,11 @@ impl<'context> Elaborator<'context> { pub fn collect_traits(&mut self, traits: BTreeMap) { for (trait_id, unresolved_trait) in traits { self.recover_generics(|this| { - this.add_generics(&unresolved_trait.trait_def.generics); + let resolved_generics = this.interner.get_trait(trait_id).generics.clone(); + this.add_existing_generics( + &unresolved_trait.trait_def.generics, + &resolved_generics, + ); // Resolve order // 1. Trait Types ( Trait constants can have a trait type, therefore types before constants) @@ -34,7 +40,6 @@ impl<'context> Elaborator<'context> { this.interner.update_trait(trait_id, |trait_def| { trait_def.set_methods(methods); - trait_def.generics = vecmap(&this.generics, |(_, generic, _)| generic.clone()); }); }); @@ -87,10 +92,20 @@ impl<'context> Elaborator<'context> { Type::TypeVariable(self_typevar.clone(), TypeVariableKind::Normal); let name_span = the_trait.name.span(); - this.add_existing_generic("Self", name_span, self_typevar); + this.add_existing_generic( + &UnresolvedGeneric::Variable(Ident::from("Self")), + name_span, + &ResolvedGeneric { + name: Rc::new("Self".to_owned()), + type_var: self_typevar, + span: name_span, + kind: Kind::Normal, + }, + ); this.self_type = Some(self_type.clone()); let func_id = unresolved_trait.method_ids[&name.0.contents]; + this.resolve_trait_function( name, generics, @@ -105,7 +120,7 @@ impl<'context> Elaborator<'context> { let arguments = vecmap(&func_meta.parameters.0, |(_, typ, _)| typ.clone()); let return_type = func_meta.return_type().clone(); - let generics = vecmap(&this.generics, |(_, type_var, _)| type_var.clone()); + let generics = vecmap(&this.generics, |generic| generic.type_var.clone()); let default_impl_list: Vec<_> = unresolved_trait .fns_with_default_impl @@ -147,6 +162,7 @@ impl<'context> Elaborator<'context> { func_id: FuncId, ) { let old_generic_count = self.generics.len(); + self.scopes.start_function(); let kind = FunctionKind::Normal; diff --git a/compiler/noirc_frontend/src/elaborator/types.rs b/compiler/noirc_frontend/src/elaborator/types.rs index fcb7ac94c26..63cab40f9d3 100644 --- a/compiler/noirc_frontend/src/elaborator/types.rs +++ b/compiler/noirc_frontend/src/elaborator/types.rs @@ -5,7 +5,10 @@ use iter_extended::vecmap; use noirc_errors::{Location, Span}; use crate::{ - ast::{BinaryOpKind, IntegerBitSize, UnresolvedGenerics, UnresolvedTypeExpression}, + ast::{ + BinaryOpKind, IntegerBitSize, UnresolvedGeneric, UnresolvedGenerics, + UnresolvedTypeExpression, + }, hir::{ comptime::{Interpreter, Value}, def_map::ModuleDefId, @@ -28,48 +31,75 @@ use crate::{ UnaryOp, UnresolvedType, UnresolvedTypeData, }, node_interner::{DefinitionKind, ExprId, GlobalId, TraitId, TraitImplKind, TraitMethodId}, - Generics, Type, TypeBinding, TypeVariable, TypeVariableKind, + Generics, Kind, ResolvedGeneric, Type, TypeBinding, TypeVariable, TypeVariableKind, }; use super::{lints, Elaborator}; impl<'context> Elaborator<'context> { - /// Translates an UnresolvedType to a Type + /// Translates an UnresolvedType to a Type with a `TypeKind::Normal` pub(super) fn resolve_type(&mut self, typ: UnresolvedType) -> Type { let span = typ.span; - let resolved_type = self.resolve_type_inner(typ); + let resolved_type = self.resolve_type_inner(typ, &Kind::Normal); if resolved_type.is_nested_slice() { - self.push_err(ResolverError::NestedSlices { span: span.unwrap() }); + self.push_err(ResolverError::NestedSlices { + span: span.expect("Type should have span"), + }); } resolved_type } /// Translates an UnresolvedType into a Type and appends any /// freshly created TypeVariables created to new_variables. - pub fn resolve_type_inner(&mut self, typ: UnresolvedType) -> Type { + pub fn resolve_type_inner(&mut self, typ: UnresolvedType, kind: &Kind) -> Type { use crate::ast::UnresolvedTypeData::*; + let span = typ.span; + let resolved_type = match typ.typ { FieldElement => Type::FieldElement, Array(size, elem) => { - let elem = Box::new(self.resolve_type_inner(*elem)); - let size = self.convert_expression_type(size); + let elem = Box::new(self.resolve_type_inner(*elem, kind)); + let mut size = self.convert_expression_type(size); + // TODO(https://github.com/noir-lang/noir/issues/5156): Remove this once we only have explicit numeric generics + if let Type::NamedGeneric(type_var, name, _) = size { + size = Type::NamedGeneric( + type_var, + name, + Kind::Numeric(Box::new(Type::default_int_type())), + ); + } Type::Array(Box::new(size), elem) } Slice(elem) => { - let elem = Box::new(self.resolve_type_inner(*elem)); + let elem = Box::new(self.resolve_type_inner(*elem, kind)); Type::Slice(elem) } Expression(expr) => self.convert_expression_type(expr), Integer(sign, bits) => Type::Integer(sign, bits), Bool => Type::Bool, String(size) => { - let resolved_size = self.convert_expression_type(size); + let mut resolved_size = self.convert_expression_type(size); + // TODO(https://github.com/noir-lang/noir/issues/5156): Remove this once we only have explicit numeric generics + if let Type::NamedGeneric(type_var, name, _) = resolved_size { + resolved_size = Type::NamedGeneric( + type_var, + name, + Kind::Numeric(Box::new(Type::default_int_type())), + ); + } Type::String(Box::new(resolved_size)) } FormatString(size, fields) => { - let resolved_size = self.convert_expression_type(size); - let fields = self.resolve_type_inner(*fields); + let mut resolved_size = self.convert_expression_type(size); + if let Type::NamedGeneric(type_var, name, _) = resolved_size { + resolved_size = Type::NamedGeneric( + type_var, + name, + Kind::Numeric(Box::new(Type::default_int_type())), + ); + } + let fields = self.resolve_type_inner(*fields, kind); Type::FmtString(Box::new(resolved_size), Box::new(fields)) } Quoted(quoted) => Type::Quoted(quoted), @@ -79,10 +109,12 @@ impl<'context> Elaborator<'context> { Named(path, args, _) => self.resolve_named_type(path, args), TraitAsType(path, args) => self.resolve_trait_as_type(path, args), - Tuple(fields) => Type::Tuple(vecmap(fields, |field| self.resolve_type_inner(field))), + Tuple(fields) => { + Type::Tuple(vecmap(fields, |field| self.resolve_type_inner(field, kind))) + } Function(args, ret, env) => { - let args = vecmap(args, |arg| self.resolve_type_inner(arg)); - let ret = Box::new(self.resolve_type_inner(*ret)); + let args = vecmap(args, |arg| self.resolve_type_inner(arg, kind)); + let ret = Box::new(self.resolve_type_inner(*ret, kind)); // expect() here is valid, because the only places we don't have a span are omitted types // e.g. a function without return type implicitly has a spanless UnresolvedType::Unit return type @@ -90,10 +122,10 @@ impl<'context> Elaborator<'context> { let env_span = env.span.expect("Unexpected missing span for closure environment type"); - let env = Box::new(self.resolve_type_inner(*env)); + let env = Box::new(self.resolve_type_inner(*env, kind)); match *env { - Type::Unit | Type::Tuple(_) | Type::NamedGeneric(_, _) => { + Type::Unit | Type::Tuple(_) | Type::NamedGeneric(_, _, _) => { Type::Function(args, ret, env) } _ => { @@ -106,9 +138,10 @@ impl<'context> Elaborator<'context> { } } MutableReference(element) => { - Type::MutableReference(Box::new(self.resolve_type_inner(*element))) + Type::MutableReference(Box::new(self.resolve_type_inner(*element, kind))) } - Parenthesized(typ) => self.resolve_type_inner(*typ), + Parenthesized(typ) => self.resolve_type_inner(*typ, kind), + Resolved(id) => self.interner.get_quoted_type(id).clone(), }; if let Type::Struct(_, _) = resolved_type { @@ -120,11 +153,36 @@ impl<'context> Elaborator<'context> { ); } } + + // Check that any types with a type kind match the expected type kind supplied to this function + // TODO(https://github.com/noir-lang/noir/issues/5156): make this named generic check more general with `*resolved_kind != kind` + // as implicit numeric generics still existing makes this check more challenging to enforce + // An example of a more general check that we should switch to: + // if resolved_type.kind() != kind.clone() { + // let expected_typ_err = CompilationError::TypeError(TypeCheckError::TypeKindMismatch { + // expected_kind: kind.to_string(), + // expr_kind: resolved_type.kind().to_string(), + // expr_span: span.expect("Type should have span"), + // }); + // self.errors.push((expected_typ_err, self.file)); + // return Type::Error; + // } + if let Type::NamedGeneric(_, name, resolved_kind) = &resolved_type { + if matches!(resolved_kind, Kind::Numeric { .. }) && matches!(kind, Kind::Normal) { + let expected_typ_err = ResolverError::NumericGenericUsedForType { + name: name.to_string(), + span: span.expect("Type should have span"), + }; + self.push_err(expected_typ_err); + return Type::Error; + } + } + resolved_type } - pub fn find_generic(&self, target_name: &str) -> Option<&(Rc, TypeVariable, Span)> { - self.generics.iter().find(|(name, _, _)| name.as_ref() == target_name) + pub fn find_generic(&self, target_name: &str) -> Option<&ResolvedGeneric> { + self.generics.iter().find(|generic| generic.name.as_ref() == target_name) } fn resolve_named_type(&mut self, path: Path, args: Vec) -> Type { @@ -152,7 +210,6 @@ impl<'context> Elaborator<'context> { } let span = path.span(); - let mut args = vecmap(args, |arg| self.resolve_type_inner(arg)); if let Some(type_alias) = self.lookup_type_alias(path.clone()) { let type_alias = type_alias.borrow(); @@ -160,6 +217,10 @@ impl<'context> Elaborator<'context> { let type_alias_string = type_alias.to_string(); let id = type_alias.id; + let mut args = vecmap(type_alias.generics.iter().zip(args), |(generic, arg)| { + self.resolve_type_inner(arg, &generic.kind) + }); + self.verify_generics_count(expected_generic_count, &mut args, span, || { type_alias_string }); @@ -192,7 +253,7 @@ impl<'context> Elaborator<'context> { } let expected_generic_count = struct_type.borrow().generics.len(); - if !self.in_contract + if !self.in_contract() && self .interner .struct_attributes(&struct_type.borrow().id) @@ -203,6 +264,12 @@ impl<'context> Elaborator<'context> { span: struct_type.borrow().name.span(), }); } + + let mut args = + vecmap(struct_type.borrow().generics.iter().zip(args), |(generic, arg)| { + self.resolve_type_inner(arg, &generic.kind) + }); + self.verify_generics_count(expected_generic_count, &mut args, span, || { struct_type.borrow().to_string() }); @@ -219,10 +286,19 @@ impl<'context> Elaborator<'context> { } fn resolve_trait_as_type(&mut self, path: Path, args: Vec) -> Type { - let args = vecmap(args, |arg| self.resolve_type_inner(arg)); - - if let Some(t) = self.lookup_trait_or_error(path) { - Type::TraitAsType(t.id, Rc::new(t.name.to_string()), args) + // Fetch information needed from the trait as the closure for resolving all the `args` + // requires exclusive access to `self` + let trait_as_type_info = self + .lookup_trait_or_error(path) + .map(|t| (t.id, Rc::new(t.name.to_string()), t.generics.clone())); + + if let Some((id, name, resolved_generics)) = trait_as_type_info { + assert_eq!(resolved_generics.len(), args.len()); + let generics_with_types = resolved_generics.iter().zip(args); + let args = vecmap(generics_with_types, |(generic, typ)| { + self.resolve_type_inner(typ, &generic.kind) + }); + Type::TraitAsType(id, Rc::new(name.to_string()), args) } else { Type::Error } @@ -251,8 +327,9 @@ impl<'context> Elaborator<'context> { pub fn lookup_generic_or_global_type(&mut self, path: &Path) -> Option { if path.segments.len() == 1 { let name = &path.last_segment().0.contents; - if let Some((name, var, _)) = self.find_generic(name) { - return Some(Type::NamedGeneric(var.clone(), name.clone())); + if let Some(generic) = self.find_generic(name) { + let generic = generic.clone(); + return Some(Type::NamedGeneric(generic.type_var, generic.name, generic.kind)); } } @@ -318,9 +395,12 @@ impl<'context> Elaborator<'context> { let constraint = TraitConstraint { typ: self.self_type.clone()?, - trait_generics: Type::from_generics(&the_trait.generics), + trait_generics: Type::from_generics(&vecmap(&the_trait.generics, |generic| { + generic.type_var.clone() + })), trait_id, }; + return Some((method, constraint, false)); } } @@ -349,7 +429,9 @@ impl<'context> Elaborator<'context> { the_trait.self_type_typevar.clone(), TypeVariableKind::Normal, ), - trait_generics: Type::from_generics(&the_trait.generics), + trait_generics: Type::from_generics(&vecmap(&the_trait.generics, |generic| { + generic.type_var.clone() + })), trait_id, }; return Some((method, constraint, false)); @@ -371,7 +453,7 @@ impl<'context> Elaborator<'context> { } for constraint in self.trait_bounds.clone() { - if let Type::NamedGeneric(_, name) = &constraint.typ { + if let Type::NamedGeneric(_, name, _) = &constraint.typ { // if `path` is `T::method_name`, we're looking for constraint of the form `T: SomeTrait` if path.segments[0].0.contents != name.as_str() { continue; @@ -1077,7 +1159,7 @@ impl<'context> Elaborator<'context> { }); None } - Type::NamedGeneric(_, _) => { + Type::NamedGeneric(_, _, _) => { let func_meta = self.interner.function_meta( &self.current_function.expect("unexpected method outside a function"), ); @@ -1353,26 +1435,34 @@ impl<'context> Elaborator<'context> { } } - pub fn add_existing_generics(&mut self, names: &UnresolvedGenerics, generics: &Generics) { - assert_eq!(names.len(), generics.len()); + pub fn add_existing_generics( + &mut self, + unresolved_generics: &UnresolvedGenerics, + generics: &Generics, + ) { + assert_eq!(unresolved_generics.len(), generics.len()); - for (name, typevar) in names.iter().zip(generics) { - self.add_existing_generic(&name.0.contents, name.0.span(), typevar.clone()); + for (unresolved_generic, generic) in unresolved_generics.iter().zip(generics) { + self.add_existing_generic(unresolved_generic, unresolved_generic.span(), generic); } } - pub fn add_existing_generic(&mut self, name: &str, span: Span, typevar: TypeVariable) { - // Check for name collisions of this generic - let rc_name = Rc::new(name.to_owned()); + pub fn add_existing_generic( + &mut self, + unresolved_generic: &UnresolvedGeneric, + span: Span, + resolved_generic: &ResolvedGeneric, + ) { + let name = &unresolved_generic.ident().0.contents; - if let Some((_, _, first_span)) = self.find_generic(&rc_name) { + if let Some(generic) = self.find_generic(name) { self.push_err(ResolverError::DuplicateDefinition { - name: name.to_owned(), - first_span: *first_span, + name: name.clone(), + first_span: generic.span, second_span: span, }); } else { - self.generics.push((rc_name, typevar, span)); + self.generics.push(resolved_generic.clone()); } } @@ -1397,7 +1487,7 @@ impl<'context> Elaborator<'context> { | Type::Error | Type::TypeVariable(_, _) | Type::Constant(_) - | Type::NamedGeneric(_, _) + | Type::NamedGeneric(_, _, _) | Type::Quoted(_) | Type::Forall(_, _) => (), @@ -1408,7 +1498,7 @@ impl<'context> Elaborator<'context> { } Type::Array(length, element_type) => { - if let Type::NamedGeneric(type_variable, name) = length.as_ref() { + if let Type::NamedGeneric(type_variable, name, _) = length.as_ref() { found.insert(name.to_string(), type_variable.clone()); } Self::find_numeric_generics_in_type(element_type, found); @@ -1433,7 +1523,7 @@ impl<'context> Elaborator<'context> { Type::Struct(struct_type, generics) => { for (i, generic) in generics.iter().enumerate() { - if let Type::NamedGeneric(type_variable, name) = generic { + if let Type::NamedGeneric(type_variable, name, _) = generic { if struct_type.borrow().generic_is_numeric(i) { found.insert(name.to_string(), type_variable.clone()); } @@ -1444,7 +1534,7 @@ impl<'context> Elaborator<'context> { } Type::Alias(alias, generics) => { for (i, generic) in generics.iter().enumerate() { - if let Type::NamedGeneric(type_variable, name) = generic { + if let Type::NamedGeneric(type_variable, name, _) = generic { if alias.borrow().generic_is_numeric(i) { found.insert(name.to_string(), type_variable.clone()); } @@ -1455,12 +1545,12 @@ impl<'context> Elaborator<'context> { } Type::MutableReference(element) => Self::find_numeric_generics_in_type(element, found), Type::String(length) => { - if let Type::NamedGeneric(type_variable, name) = length.as_ref() { + if let Type::NamedGeneric(type_variable, name, _) = length.as_ref() { found.insert(name.to_string(), type_variable.clone()); } } Type::FmtString(length, fields) => { - if let Type::NamedGeneric(type_variable, name) = length.as_ref() { + if let Type::NamedGeneric(type_variable, name, _) = length.as_ref() { found.insert(name.to_string(), type_variable.clone()); } Self::find_numeric_generics_in_type(fields, found); diff --git a/compiler/noirc_frontend/src/elaborator/unquote.rs b/compiler/noirc_frontend/src/elaborator/unquote.rs index 6767f471f16..ed12ba21398 100644 --- a/compiler/noirc_frontend/src/elaborator/unquote.rs +++ b/compiler/noirc_frontend/src/elaborator/unquote.rs @@ -1,306 +1,41 @@ use crate::{ - ast::{ - ArrayLiteral, AssignStatement, ConstrainStatement, ConstructorExpression, IfExpression, - InfixExpression, Lambda, - }, - macros_api::{ - BlockExpression, CallExpression, CastExpression, Expression, ExpressionKind, - ForLoopStatement, ForRange, IndexExpression, LetStatement, Literal, MemberAccessExpression, - MethodCallExpression, PrefixExpression, Statement, StatementKind, - }, - node_interner::ExprId, + macros_api::Path, + token::{SpannedToken, Token, Tokens}, }; use super::Elaborator; impl<'a> Elaborator<'a> { - pub fn find_unquoted_exprs_in_block( - &mut self, - block: &mut BlockExpression, - unquoted_exprs: &mut Vec, - ) { - for statement in &mut block.statements { - self.find_unquoted_exprs_in_statement(statement, unquoted_exprs); - } - } - - fn find_unquoted_exprs_in_statement( - &mut self, - statement: &mut Statement, - unquoted_exprs: &mut Vec, - ) { - match &mut statement.kind { - StatementKind::Let(let_) => self.find_unquoted_exprs_in_let(let_, unquoted_exprs), - StatementKind::Constrain(constrain) => { - self.find_unquoted_exprs_in_constrain(constrain, unquoted_exprs); - } - StatementKind::Expression(expr) => { - self.find_unquoted_exprs_in_expr(expr, unquoted_exprs); - } - StatementKind::Assign(assign) => { - self.find_unquoted_exprs_in_assign(assign, unquoted_exprs); - } - StatementKind::For(for_) => self.find_unquoted_exprs_in_for(for_, unquoted_exprs), - StatementKind::Break => (), - StatementKind::Continue => (), - StatementKind::Comptime(comptime) => { - self.find_unquoted_exprs_in_statement(comptime, unquoted_exprs); - } - StatementKind::Semi(expr) => self.find_unquoted_exprs_in_expr(expr, unquoted_exprs), - StatementKind::Error => (), - } - } - - fn find_unquoted_exprs_in_constrain( - &mut self, - constrain: &mut ConstrainStatement, - unquoted_exprs: &mut Vec, - ) { - self.find_unquoted_exprs_in_expr(&mut constrain.0, unquoted_exprs); - if let Some(msg) = constrain.1.as_mut() { - self.find_unquoted_exprs_in_expr(msg, unquoted_exprs); - } - } - - fn find_unquoted_exprs_in_let( - &mut self, - let_: &mut LetStatement, - unquoted_exprs: &mut Vec, - ) { - self.find_unquoted_exprs_in_expr(&mut let_.expression, unquoted_exprs); - } - - fn find_unquoted_exprs_in_assign( - &mut self, - assign: &mut AssignStatement, - unquoted_exprs: &mut Vec, - ) { - self.find_unquoted_exprs_in_expr(&mut assign.expression, unquoted_exprs); - } - - fn find_unquoted_exprs_in_for( - &mut self, - for_: &mut ForLoopStatement, - unquoted_exprs: &mut Vec, - ) { - match &mut for_.range { - ForRange::Range(start, end) => { - self.find_unquoted_exprs_in_expr(start, unquoted_exprs); - self.find_unquoted_exprs_in_expr(end, unquoted_exprs); - } - ForRange::Array(array) => { - self.find_unquoted_exprs_in_expr(array, unquoted_exprs); - } - }; - self.find_unquoted_exprs_in_expr(&mut for_.block, unquoted_exprs); - } - - fn find_unquoted_exprs_in_expr( - &mut self, - expr: &mut Expression, - unquoted_exprs: &mut Vec, - ) { - match &mut expr.kind { - ExpressionKind::Literal(literal) => { - self.find_unquoted_exprs_in_literal(literal, unquoted_exprs); - } - ExpressionKind::Block(block) => { - self.find_unquoted_exprs_in_block(block, unquoted_exprs); - } - ExpressionKind::Prefix(prefix) => { - self.find_unquoted_exprs_in_prefix(prefix, unquoted_exprs); - } - ExpressionKind::Index(index) => { - self.find_unquoted_exprs_in_index(index, unquoted_exprs); - } - ExpressionKind::Call(call) => self.find_unquoted_exprs_in_call(call, unquoted_exprs), - ExpressionKind::MethodCall(call) => { - self.find_unquoted_exprs_in_method_call(call, unquoted_exprs); - } - ExpressionKind::Constructor(constructor) => { - self.find_unquoted_exprs_in_constructor(constructor, unquoted_exprs); - } - ExpressionKind::MemberAccess(access) => { - self.find_unquoted_exprs_in_access(access, unquoted_exprs); - } - ExpressionKind::Cast(cast) => self.find_unquoted_exprs_in_cast(cast, unquoted_exprs), - ExpressionKind::Infix(infix) => { - self.find_unquoted_exprs_in_infix(infix, unquoted_exprs); - } - ExpressionKind::If(if_) => self.find_unquoted_exprs_in_if(if_, unquoted_exprs), - ExpressionKind::Variable(_, _) => (), - ExpressionKind::Tuple(tuple) => { - self.find_unquoted_exprs_in_tuple(tuple, unquoted_exprs); - } - ExpressionKind::Lambda(lambda) => { - self.find_unquoted_exprs_in_lambda(lambda, unquoted_exprs); - } - ExpressionKind::Parenthesized(expr) => { - self.find_unquoted_exprs_in_expr(expr, unquoted_exprs); - } - ExpressionKind::Quote(quote, _) => { - self.find_unquoted_exprs_in_block(quote, unquoted_exprs); - } - ExpressionKind::Comptime(block, _) => { - self.find_unquoted_exprs_in_block(block, unquoted_exprs); - } - ExpressionKind::Resolved(_) => (), - ExpressionKind::Error => (), - ExpressionKind::UnquoteMarker(_) => (), - ExpressionKind::Unquote(unquoted) => { - // Avoid an expensive clone for unquoted - let empty_expr = Expression::new(ExpressionKind::Error, unquoted.span); - let unquote = std::mem::replace(unquoted.as_mut(), empty_expr); - self.replace_unquote(expr, unquote, unquoted_exprs); - } - } - } - - fn find_unquoted_exprs_in_literal( - &mut self, - literal: &mut Literal, - unquoted_exprs: &mut Vec, - ) { - match literal { - Literal::Array(array) | Literal::Slice(array) => match array { - ArrayLiteral::Standard(elements) => { - for element in elements { - self.find_unquoted_exprs_in_expr(element, unquoted_exprs); + /// Go through the given tokens looking for a '$' token followed by a variable to unquote. + /// Each time these two tokens are found, they are replaced by a new UnquoteMarker token + /// containing the ExprId of the resolved variable to unquote. + pub fn find_unquoted_exprs_tokens(&mut self, tokens: Tokens) -> Tokens { + let token_count = tokens.0.len(); + let mut new_tokens = Vec::with_capacity(token_count); + let mut tokens = tokens.0.into_iter(); + + while let Some(token) = tokens.next() { + let is_unquote = matches!(token.token(), Token::DollarSign); + new_tokens.push(token); + + if is_unquote { + if let Some(next) = tokens.next() { + let span = next.to_span(); + + match next.into_token() { + Token::Ident(name) => { + // Don't want the leading `$` anymore + new_tokens.pop(); + let path = Path::from_single(name, span); + let (expr_id, _) = self.elaborate_variable(path, None); + new_tokens.push(SpannedToken::new(Token::UnquoteMarker(expr_id), span)); + } + other_next => new_tokens.push(SpannedToken::new(other_next, span)), } } - ArrayLiteral::Repeated { repeated_element, length } => { - self.find_unquoted_exprs_in_expr(repeated_element, unquoted_exprs); - self.find_unquoted_exprs_in_expr(length, unquoted_exprs); - } - }, - Literal::Bool(_) - | Literal::Integer(_, _) - | Literal::Str(_) - | Literal::RawStr(_, _) - | Literal::FmtStr(_) - | Literal::Unit => (), - } - } - - fn find_unquoted_exprs_in_prefix( - &mut self, - prefix: &mut PrefixExpression, - unquoted_exprs: &mut Vec, - ) { - self.find_unquoted_exprs_in_expr(&mut prefix.rhs, unquoted_exprs); - } - - fn find_unquoted_exprs_in_index( - &mut self, - index: &mut IndexExpression, - unquoted_exprs: &mut Vec, - ) { - self.find_unquoted_exprs_in_expr(&mut index.collection, unquoted_exprs); - self.find_unquoted_exprs_in_expr(&mut index.index, unquoted_exprs); - } - - fn find_unquoted_exprs_in_call( - &mut self, - call: &mut CallExpression, - unquoted_exprs: &mut Vec, - ) { - self.find_unquoted_exprs_in_expr(&mut call.func, unquoted_exprs); - - for arg in &mut call.arguments { - self.find_unquoted_exprs_in_expr(arg, unquoted_exprs); - } - } - - fn find_unquoted_exprs_in_method_call( - &mut self, - call: &mut MethodCallExpression, - unquoted_exprs: &mut Vec, - ) { - self.find_unquoted_exprs_in_expr(&mut call.object, unquoted_exprs); - - for arg in &mut call.arguments { - self.find_unquoted_exprs_in_expr(arg, unquoted_exprs); - } - } - - fn find_unquoted_exprs_in_constructor( - &mut self, - constructor: &mut ConstructorExpression, - unquoted_exprs: &mut Vec, - ) { - for (_, field) in &mut constructor.fields { - self.find_unquoted_exprs_in_expr(field, unquoted_exprs); - } - } - - fn find_unquoted_exprs_in_access( - &mut self, - member_access: &mut MemberAccessExpression, - unquoted_exprs: &mut Vec, - ) { - self.find_unquoted_exprs_in_expr(&mut member_access.lhs, unquoted_exprs); - } - - fn find_unquoted_exprs_in_cast( - &mut self, - cast: &mut CastExpression, - unquoted_exprs: &mut Vec, - ) { - self.find_unquoted_exprs_in_expr(&mut cast.lhs, unquoted_exprs); - } - - fn find_unquoted_exprs_in_infix( - &mut self, - infix: &mut InfixExpression, - unquoted_exprs: &mut Vec, - ) { - self.find_unquoted_exprs_in_expr(&mut infix.lhs, unquoted_exprs); - self.find_unquoted_exprs_in_expr(&mut infix.rhs, unquoted_exprs); - } - - fn find_unquoted_exprs_in_if( - &mut self, - if_: &mut IfExpression, - unquoted_exprs: &mut Vec, - ) { - self.find_unquoted_exprs_in_expr(&mut if_.condition, unquoted_exprs); - self.find_unquoted_exprs_in_expr(&mut if_.consequence, unquoted_exprs); - - if let Some(alternate) = if_.alternative.as_mut() { - self.find_unquoted_exprs_in_expr(alternate, unquoted_exprs); - } - } - - fn find_unquoted_exprs_in_tuple( - &mut self, - tuple: &mut [Expression], - unquoted_exprs: &mut Vec, - ) { - for field in tuple { - self.find_unquoted_exprs_in_expr(field, unquoted_exprs); + } } - } - - fn find_unquoted_exprs_in_lambda( - &mut self, - lambda: &mut Lambda, - unquoted_exprs: &mut Vec, - ) { - self.find_unquoted_exprs_in_expr(&mut lambda.body, unquoted_exprs); - } - /// Elaborate and store the unquoted expression in the given vector, then - /// replace it with an unquote expression with an UnquoteMarker expression to mark the position - /// to replace it with later. - fn replace_unquote( - &mut self, - expr: &mut Expression, - unquoted: Expression, - unquoted_exprs: &mut Vec, - ) { - let (expr_id, _) = self.elaborate_expression(unquoted); - let unquote_marker_id = unquoted_exprs.len(); - unquoted_exprs.push(expr_id); - expr.kind = ExpressionKind::UnquoteMarker(unquote_marker_id); + Tokens(new_tokens) } } diff --git a/compiler/noirc_frontend/src/hir/comptime/errors.rs b/compiler/noirc_frontend/src/hir/comptime/errors.rs index 05962420f8a..d2c7acee2a3 100644 --- a/compiler/noirc_frontend/src/hir/comptime/errors.rs +++ b/compiler/noirc_frontend/src/hir/comptime/errors.rs @@ -1,5 +1,11 @@ -use crate::{hir::def_collector::dc_crate::CompilationError, Type}; +use std::rc::Rc; + +use crate::{ + hir::def_collector::dc_crate::CompilationError, parser::ParserError, token::Tokens, Type, +}; use acvm::{acir::AcirField, FieldElement}; +use fm::FileId; +use iter_extended::vecmap; use noirc_errors::{CustomDiagnostic, Location}; use super::value::Value; @@ -35,6 +41,9 @@ pub enum InterpreterError { NonStructInConstructor { typ: Type, location: Location }, CannotInlineMacro { value: Value, location: Location }, UnquoteFoundDuringEvaluation { location: Location }, + FailedToParseMacro { error: ParserError, tokens: Rc, rule: &'static str, file: FileId }, + UnsupportedTopLevelItemUnquote { item: String, location: Location }, + NonComptimeFnCallInSameCrate { function: String, location: Location }, Unimplemented { item: String, location: Location }, @@ -94,9 +103,14 @@ impl InterpreterError { | InterpreterError::NonStructInConstructor { location, .. } | InterpreterError::CannotInlineMacro { location, .. } | InterpreterError::UnquoteFoundDuringEvaluation { location, .. } + | InterpreterError::UnsupportedTopLevelItemUnquote { location, .. } + | InterpreterError::NonComptimeFnCallInSameCrate { location, .. } | InterpreterError::Unimplemented { location, .. } | InterpreterError::BreakNotInLoop { location, .. } | InterpreterError::ContinueNotInLoop { location, .. } => *location, + InterpreterError::FailedToParseMacro { error, file, .. } => { + Location::new(error.span(), *file) + } InterpreterError::Break | InterpreterError::Continue => { panic!("Tried to get the location of Break/Continue error!") } @@ -249,7 +263,8 @@ impl<'a> From<&'a InterpreterError> for CustomDiagnostic { CustomDiagnostic::simple_error(msg, String::new(), location.span) } InterpreterError::CannotInlineMacro { value, location } => { - let msg = "Cannot inline value into runtime code if it contains references".into(); + let typ = value.get_type(); + let msg = format!("Cannot inline values of type `{typ}` into this position"); let secondary = format!("Cannot inline value {value:?}"); CustomDiagnostic::simple_error(msg, secondary, location.span) } @@ -258,6 +273,45 @@ impl<'a> From<&'a InterpreterError> for CustomDiagnostic { let secondary = "This is a bug".into(); CustomDiagnostic::simple_error(msg, secondary, location.span) } + InterpreterError::FailedToParseMacro { error, tokens, rule, file: _ } => { + let message = format!("Failed to parse macro's token stream into {rule}"); + let tokens = vecmap(&tokens.0, ToString::to_string).join(" "); + + // 10 is an aribtrary number of tokens here chosen to fit roughly onto one line + let token_stream = if tokens.len() > 10 { + format!("The resulting token stream was: {tokens}") + } else { + format!( + "The resulting token stream was: (stream starts on next line)\n {tokens}" + ) + }; + + let push_the_problem_on_the_library_author = "To avoid this error in the future, try adding input validation to your macro. Erroring out early with an `assert` can be a good way to provide a user-friendly error message".into(); + + let mut diagnostic = CustomDiagnostic::from(error); + // Swap the parser's primary note to become the secondary note so that it is + // more clear this error originates from failing to parse a macro. + let secondary = std::mem::take(&mut diagnostic.message); + diagnostic.add_secondary(secondary, error.span()); + diagnostic.message = message; + diagnostic.add_note(token_stream); + diagnostic.add_note(push_the_problem_on_the_library_author); + diagnostic + } + InterpreterError::UnsupportedTopLevelItemUnquote { item, location } => { + let msg = "Unsupported statement type to unquote".into(); + let secondary = + "Only functions, globals, and trait impls can be unquoted here".into(); + let mut error = CustomDiagnostic::simple_error(msg, secondary, location.span); + error.add_note(format!("Unquoted item was:\n{item}")); + error + } + InterpreterError::NonComptimeFnCallInSameCrate { function, location } => { + let msg = format!("`{function}` cannot be called in a `comptime` context here"); + let secondary = + "This function must be `comptime` or in a separate crate to be called".into(); + CustomDiagnostic::simple_error(msg, secondary, location.span) + } InterpreterError::Unimplemented { item, location } => { let msg = format!("{item} is currently unimplemented"); CustomDiagnostic::simple_error(msg, String::new(), location.span) diff --git a/compiler/noirc_frontend/src/hir/comptime/interpreter.rs b/compiler/noirc_frontend/src/hir/comptime/interpreter.rs index 21b6897f5dd..d2b98569bbb 100644 --- a/compiler/noirc_frontend/src/hir/comptime/interpreter.rs +++ b/compiler/noirc_frontend/src/hir/comptime/interpreter.rs @@ -7,7 +7,9 @@ use noirc_errors::Location; use rustc_hash::FxHashMap as HashMap; use crate::ast::{BinaryOpKind, FunctionKind, IntegerBitSize, Signedness}; -use crate::hir_def::expr::HirQuoted; +use crate::graph::CrateId; +use crate::monomorphization::{perform_instantiation_bindings, undo_instantiation_bindings}; +use crate::token::Tokens; use crate::{ hir_def::{ expr::{ @@ -26,10 +28,8 @@ use crate::{ Shared, Type, TypeBinding, TypeBindings, TypeVariableKind, }; -use self::unquote::UnquoteArgs; - use super::errors::{IResult, InterpreterError}; -use super::value::Value; +use super::value::{unwrap_rc, Value}; mod builtin; mod unquote; @@ -44,6 +44,8 @@ pub struct Interpreter<'interner> { /// up all currently visible definitions. scopes: &'interner mut Vec>, + crate_id: CrateId, + in_loop: bool, } @@ -52,11 +54,25 @@ impl<'a> Interpreter<'a> { pub(crate) fn new( interner: &'a mut NodeInterner, scopes: &'a mut Vec>, + crate_id: CrateId, ) -> Self { - Self { interner, scopes, in_loop: false } + Self { interner, scopes, crate_id, in_loop: false } } pub(crate) fn call_function( + &mut self, + function: FuncId, + arguments: Vec<(Value, Location)>, + instantiation_bindings: TypeBindings, + location: Location, + ) -> IResult { + perform_instantiation_bindings(&instantiation_bindings); + let result = self.call_function_inner(function, arguments, location); + undo_instantiation_bindings(instantiation_bindings); + result + } + + fn call_function_inner( &mut self, function: FuncId, arguments: Vec<(Value, Location)>, @@ -71,6 +87,14 @@ impl<'a> Interpreter<'a> { }); } + let is_comptime = self.interner.function_modifiers(&function).is_comptime; + if !is_comptime && meta.source_crate == self.crate_id { + // Calling non-comptime functions from within the current crate is restricted + // as non-comptime items will have not been elaborated yet. + let function = self.interner.function_name(&function).to_owned(); + return Err(InterpreterError::NonComptimeFnCallInSameCrate { function, location }); + } + if meta.kind != FunctionKind::Normal { return self.call_builtin(function, arguments, location); } @@ -100,14 +124,8 @@ impl<'a> Interpreter<'a> { .expect("all builtin functions must contain a function attribute which contains the opcode which it links to"); if let Some(builtin) = func_attrs.builtin() { - match builtin.as_str() { - "array_len" => builtin::array_len(&arguments), - "as_slice" => builtin::as_slice(arguments), - _ => { - let item = format!("Comptime evaluation for builtin function {builtin}"); - Err(InterpreterError::Unimplemented { item, location }) - } - } + let builtin = builtin.clone(); + builtin::call_builtin(self.interner, &builtin, arguments, location) } else if let Some(foreign) = func_attrs.foreign() { let item = format!("Comptime evaluation for foreign functions like {foreign}"); Err(InterpreterError::Unimplemented { item, location }) @@ -196,7 +214,8 @@ impl<'a> Interpreter<'a> { ) -> IResult<()> { match pattern { HirPattern::Identifier(identifier) => { - self.define(identifier.id, typ, argument, location) + self.define(identifier.id, argument); + Ok(()) } HirPattern::Mutable(pattern, _) => { self.define_pattern(pattern, typ, argument, location) @@ -218,8 +237,6 @@ impl<'a> Interpreter<'a> { }, HirPattern::Struct(struct_type, pattern_fields, _) => { self.push_scope(); - self.type_check(typ, &argument, location)?; - self.type_check(struct_type, &argument, location)?; let res = match argument { Value::Struct(fields, struct_type) if fields.len() == pattern_fields.len() => { @@ -255,30 +272,8 @@ impl<'a> Interpreter<'a> { } /// Define a new variable in the current scope - fn define( - &mut self, - id: DefinitionId, - typ: &Type, - argument: Value, - location: Location, - ) -> IResult<()> { - // Temporarily disabled since this fails on generic types - // self.type_check(typ, &argument, location)?; + fn define(&mut self, id: DefinitionId, argument: Value) { self.current_scope_mut().insert(id, argument); - Ok(()) - } - - /// Mutate an existing variable, potentially from a prior scope. - /// Also type checks the value being assigned - fn checked_mutate( - &mut self, - id: DefinitionId, - typ: &Type, - argument: Value, - location: Location, - ) -> IResult<()> { - self.type_check(typ, &argument, location)?; - self.mutate(id, argument, location) } /// Mutate an existing variable, potentially from a prior scope @@ -317,15 +312,6 @@ impl<'a> Interpreter<'a> { Err(InterpreterError::NonComptimeVarReferenced { name, location }) } - fn type_check(&self, typ: &Type, value: &Value, location: Location) -> IResult<()> { - let typ = typ.follow_bindings(); - let value_type = value.get_type(); - - typ.try_unify(&value_type, &mut TypeBindings::new()).map_err(|_| { - InterpreterError::TypeMismatch { expected: typ, value: value.clone(), location } - }) - } - /// Evaluate an expression and return the result pub fn evaluate(&mut self, id: ExprId) -> IResult { match self.interner.expression(&id) { @@ -343,9 +329,9 @@ impl<'a> Interpreter<'a> { HirExpression::If(if_) => self.evaluate_if(if_, id), HirExpression::Tuple(tuple) => self.evaluate_tuple(tuple), HirExpression::Lambda(lambda) => self.evaluate_lambda(lambda, id), - HirExpression::Quote(block) => self.evaluate_quote(block, id), + HirExpression::Quote(tokens) => self.evaluate_quote(tokens, id), HirExpression::Comptime(block) => self.evaluate_block(block), - HirExpression::Unquote(block) => { + HirExpression::Unquote(tokens) => { // An Unquote expression being found is indicative of a macro being // expanded within another comptime fn which we don't currently support. let location = self.interner.expr_location(&id); @@ -363,8 +349,9 @@ impl<'a> Interpreter<'a> { match &definition.kind { DefinitionKind::Function(function_id) => { - let typ = self.interner.id_type(id); - Ok(Value::Function(*function_id, typ)) + let typ = self.interner.id_type(id).follow_bindings(); + let bindings = Rc::new(self.interner.get_instantiation_bindings(id).clone()); + Ok(Value::Function(*function_id, typ, bindings)) } DefinitionKind::Local(_) => self.lookup(&ident), DefinitionKind::Global(global_id) => { @@ -535,7 +522,7 @@ impl<'a> Interpreter<'a> { } fn evaluate_array(&mut self, array: HirArrayLiteral, id: ExprId) -> IResult { - let typ = self.interner.id_type(id); + let typ = self.interner.id_type(id).follow_bindings(); match array { HirArrayLiteral::Standard(elements) => { @@ -617,10 +604,13 @@ impl<'a> Interpreter<'a> { let rhs = self.evaluate(infix.rhs)?; // TODO: Need to account for operator overloading - assert!( - self.interner.get_selected_impl_for_expression(id).is_none(), - "Operator overloading is unimplemented in the interpreter" - ); + // See https://github.com/noir-lang/noir/issues/4925 + if self.interner.get_selected_impl_for_expression(id).is_some() { + return Err(InterpreterError::Unimplemented { + item: "Operator overloading in the interpreter".to_string(), + location: infix.operator.location, + }); + } use InterpreterError::InvalidValuesForBinary; match infix.operator.kind { @@ -929,13 +919,25 @@ impl<'a> Interpreter<'a> { }) .collect::>()?; - let typ = self.interner.id_type(id); + let typ = self.interner.id_type(id).follow_bindings(); Ok(Value::Struct(fields, typ)) } fn evaluate_access(&mut self, access: HirMemberAccess, id: ExprId) -> IResult { let (fields, struct_type) = match self.evaluate(access.lhs)? { Value::Struct(fields, typ) => (fields, typ), + Value::Tuple(fields) => { + let (fields, field_types): (HashMap, Value>, Vec) = fields + .into_iter() + .enumerate() + .map(|(i, field)| { + let field_type = field.get_type().into_owned(); + let key_val_pair = (Rc::new(i.to_string()), field); + (key_val_pair, field_type) + }) + .unzip(); + (fields, Type::Tuple(field_types)) + } value => { let location = self.interner.expr_location(&id); return Err(InterpreterError::NonTupleOrStructInMemberAccess { value, location }); @@ -958,7 +960,10 @@ impl<'a> Interpreter<'a> { let location = self.interner.expr_location(&id); match function { - Value::Function(function_id, _) => self.call_function(function_id, arguments, location), + Value::Function(function_id, _, bindings) => { + let bindings = unwrap_rc(bindings); + self.call_function(function_id, arguments, bindings, location) + } Value::Closure(closure, env, _) => self.call_closure(closure, env, arguments, location), value => Err(InterpreterError::NonFunctionCalled { value, location }), } @@ -987,7 +992,7 @@ impl<'a> Interpreter<'a> { }; if let Some(method) = method { - self.call_function(method, arguments, location) + self.call_function(method, arguments, TypeBindings::new(), location) } else { Err(InterpreterError::NoMethodFound { name: method_name.clone(), typ, location }) } @@ -1132,17 +1137,14 @@ impl<'a> Interpreter<'a> { let environment = try_vecmap(&lambda.captures, |capture| self.lookup_id(capture.ident.id, location))?; - let typ = self.interner.id_type(id); + let typ = self.interner.id_type(id).follow_bindings(); Ok(Value::Closure(lambda, environment, typ)) } - fn evaluate_quote(&mut self, mut quoted: HirQuoted, expr_id: ExprId) -> IResult { - let file = self.interner.expr_location(&expr_id).file; - let values = try_vecmap(quoted.unquoted_exprs, |value| self.evaluate(value))?; - let args = UnquoteArgs { values, file }; - - self.substitute_unquoted_values_into_block(&mut quoted.quoted_block, &args); - Ok(Value::Code(Rc::new(quoted.quoted_block))) + fn evaluate_quote(&mut self, mut tokens: Tokens, expr_id: ExprId) -> IResult { + let location = self.interner.expr_location(&expr_id); + tokens = self.substitute_unquoted_values_into_tokens(tokens, location)?; + Ok(Value::Code(Rc::new(tokens))) } pub fn evaluate_statement(&mut self, statement: StmtId) -> IResult { @@ -1196,9 +1198,7 @@ impl<'a> Interpreter<'a> { fn store_lvalue(&mut self, lvalue: HirLValue, rhs: Value) -> IResult<()> { match lvalue { - HirLValue::Ident(ident, typ) => { - self.checked_mutate(ident.id, &typ, rhs, ident.location) - } + HirLValue::Ident(ident, typ) => self.mutate(ident.id, rhs, ident.location), HirLValue::Dereference { lvalue, element_type: _, location } => { match self.evaluate_lvalue(&lvalue)? { Value::Pointer(value) => { @@ -1217,7 +1217,7 @@ impl<'a> Interpreter<'a> { } Value::Struct(mut fields, typ) => { fields.insert(Rc::new(field_name.0.contents), rhs); - self.store_lvalue(*object, Value::Struct(fields, typ)) + self.store_lvalue(*object, Value::Struct(fields, typ.follow_bindings())) } value => { Err(InterpreterError::NonTupleOrStructInMemberAccess { value, location }) diff --git a/compiler/noirc_frontend/src/hir/comptime/interpreter/builtin.rs b/compiler/noirc_frontend/src/hir/comptime/interpreter/builtin.rs index 9216ba271c1..1c0c4e6f274 100644 --- a/compiler/noirc_frontend/src/hir/comptime/interpreter/builtin.rs +++ b/compiler/noirc_frontend/src/hir/comptime/interpreter/builtin.rs @@ -1,11 +1,35 @@ +use std::rc::Rc; + use noirc_errors::Location; use crate::{ - hir::comptime::{errors::IResult, Value}, - Type, + hir::comptime::{errors::IResult, InterpreterError, Value}, + macros_api::NodeInterner, + token::{SpannedToken, Token, Tokens}, + QuotedType, Type, }; -pub(super) fn array_len(arguments: &[(Value, Location)]) -> IResult { +pub(super) fn call_builtin( + interner: &mut NodeInterner, + name: &str, + arguments: Vec<(Value, Location)>, + location: Location, +) -> IResult { + match name { + "array_len" => array_len(&arguments), + "as_slice" => as_slice(arguments), + "slice_push_back" => slice_push_back(arguments), + "type_def_as_type" => type_def_as_type(interner, arguments), + "type_def_generics" => type_def_generics(interner, arguments), + "type_def_fields" => type_def_fields(interner, arguments), + _ => { + let item = format!("Comptime evaluation for builtin function {name}"); + Err(InterpreterError::Unimplemented { item, location }) + } + } +} + +fn array_len(arguments: &[(Value, Location)]) -> IResult { assert_eq!(arguments.len(), 1, "ICE: `array_len` should only receive a single argument"); match &arguments[0].0 { Value::Array(values, _) | Value::Slice(values, _) => Ok(Value::U32(values.len() as u32)), @@ -14,7 +38,7 @@ pub(super) fn array_len(arguments: &[(Value, Location)]) -> IResult { } } -pub(super) fn as_slice(mut arguments: Vec<(Value, Location)>) -> IResult { +fn as_slice(mut arguments: Vec<(Value, Location)>) -> IResult { assert_eq!(arguments.len(), 1, "ICE: `as_slice` should only receive a single argument"); let (array, _) = arguments.pop().unwrap(); match array { @@ -23,3 +47,112 @@ pub(super) fn as_slice(mut arguments: Vec<(Value, Location)>) -> IResult _ => unreachable!("ICE: Cannot convert types other than arrays into slices"), } } + +fn slice_push_back(mut arguments: Vec<(Value, Location)>) -> IResult { + assert_eq!(arguments.len(), 2, "ICE: `slice_push_back` should only receive two arguments"); + let (element, _) = arguments.pop().unwrap(); + let (slice, _) = arguments.pop().unwrap(); + match slice { + Value::Slice(mut values, typ) => { + values.push_back(element); + Ok(Value::Slice(values, typ)) + } + // Type checking should prevent this branch being taken. + _ => unreachable!("ICE: `slice_push_back` expects a slice as its first argument"), + } +} + +/// fn as_type(self) -> Quoted +fn type_def_as_type( + interner: &NodeInterner, + mut arguments: Vec<(Value, Location)>, +) -> IResult { + assert_eq!(arguments.len(), 1, "ICE: `generics` should only receive a single argument"); + let (type_def, span) = match arguments.pop() { + Some((Value::TypeDefinition(id), location)) => (id, location.span), + other => { + unreachable!("ICE: `as_type` expected a `TypeDefinition` argument, found {other:?}") + } + }; + + let struct_def = interner.get_struct(type_def); + let struct_def = struct_def.borrow(); + let make_token = |name| SpannedToken::new(Token::Ident(name), span); + + let mut tokens = vec![make_token(struct_def.name.to_string())]; + + for (i, generic) in struct_def.generics.iter().enumerate() { + if i != 0 { + tokens.push(SpannedToken::new(Token::Comma, span)); + } + tokens.push(make_token(generic.type_var.borrow().to_string())); + } + + Ok(Value::Code(Rc::new(Tokens(tokens)))) +} + +/// fn generics(self) -> [Quoted] +fn type_def_generics( + interner: &NodeInterner, + mut arguments: Vec<(Value, Location)>, +) -> IResult { + assert_eq!(arguments.len(), 1, "ICE: `generics` should only receive a single argument"); + let (type_def, span) = match arguments.pop() { + Some((Value::TypeDefinition(id), location)) => (id, location.span), + other => { + unreachable!("ICE: `as_type` expected a `TypeDefinition` argument, found {other:?}") + } + }; + + let struct_def = interner.get_struct(type_def); + + let generics = struct_def + .borrow() + .generics + .iter() + .map(|generic| { + let name = SpannedToken::new(Token::Ident(generic.type_var.borrow().to_string()), span); + Value::Code(Rc::new(Tokens(vec![name]))) + }) + .collect(); + + let typ = Type::Slice(Box::new(Type::Quoted(QuotedType::Quoted))); + Ok(Value::Slice(generics, typ)) +} + +/// fn fields(self) -> [(Quoted, Quoted)] +/// Returns (name, type) pairs of each field of this TypeDefinition +fn type_def_fields( + interner: &mut NodeInterner, + mut arguments: Vec<(Value, Location)>, +) -> IResult { + assert_eq!(arguments.len(), 1, "ICE: `generics` should only receive a single argument"); + let (type_def, span) = match arguments.pop() { + Some((Value::TypeDefinition(id), location)) => (id, location.span), + other => { + unreachable!("ICE: `as_type` expected a `TypeDefinition` argument, found {other:?}") + } + }; + + let struct_def = interner.get_struct(type_def); + let struct_def = struct_def.borrow(); + + let make_token = |name| SpannedToken::new(Token::Ident(name), span); + let make_quoted = |tokens| Value::Code(Rc::new(Tokens(tokens))); + + let mut fields = im::Vector::new(); + + for (name, typ) in struct_def.get_fields_as_written() { + let name = make_quoted(vec![make_token(name)]); + let id = interner.push_quoted_type(typ); + let typ = SpannedToken::new(Token::QuotedType(id), span); + let typ = Value::Code(Rc::new(Tokens(vec![typ]))); + fields.push_back(Value::Tuple(vec![name, typ])); + } + + let typ = Type::Slice(Box::new(Type::Tuple(vec![ + Type::Quoted(QuotedType::Quoted), + Type::Quoted(QuotedType::Quoted), + ]))); + Ok(Value::Slice(fields, typ)) +} diff --git a/compiler/noirc_frontend/src/hir/comptime/interpreter/unquote.rs b/compiler/noirc_frontend/src/hir/comptime/interpreter/unquote.rs index 58ca345ca90..a1ceb27afb2 100644 --- a/compiler/noirc_frontend/src/hir/comptime/interpreter/unquote.rs +++ b/compiler/noirc_frontend/src/hir/comptime/interpreter/unquote.rs @@ -1,287 +1,42 @@ -use fm::FileId; use noirc_errors::Location; use crate::{ - ast::{ - ArrayLiteral, AssignStatement, ConstrainStatement, ConstructorExpression, IfExpression, - InfixExpression, Lambda, - }, - hir::comptime::{errors::IResult, Value}, - macros_api::{ - BlockExpression, CallExpression, CastExpression, Expression, ExpressionKind, - ForLoopStatement, ForRange, IndexExpression, LetStatement, Literal, MemberAccessExpression, - MethodCallExpression, PrefixExpression, Statement, StatementKind, - }, + hir::comptime::{errors::IResult, value::unwrap_rc, Value}, + token::{SpannedToken, Token, Tokens}, }; use super::Interpreter; -pub(super) struct UnquoteArgs { - pub(super) values: Vec, - pub(super) file: FileId, -} - impl<'a> Interpreter<'a> { - pub(super) fn substitute_unquoted_values_into_block( - &mut self, - block: &mut BlockExpression, - args: &UnquoteArgs, - ) -> IResult<()> { - for statement in &mut block.statements { - self.substitute_unquoted_into_statement(statement, args)?; - } - Ok(()) - } - - fn substitute_unquoted_into_statement( - &mut self, - statement: &mut Statement, - args: &UnquoteArgs, - ) -> IResult<()> { - match &mut statement.kind { - StatementKind::Let(let_) => self.substitute_unquoted_into_let(let_, args), - StatementKind::Constrain(constrain) => { - self.substitute_unquoted_into_constrain(constrain, args) - } - StatementKind::Expression(expr) => self.substitute_unquoted_into_expr(expr, args), - StatementKind::Assign(assign) => self.substitute_unquoted_into_assign(assign, args), - StatementKind::For(for_) => self.substitute_unquoted_into_for(for_, args), - StatementKind::Break => Ok(()), - StatementKind::Continue => Ok(()), - StatementKind::Comptime(comptime) => { - self.substitute_unquoted_into_statement(comptime, args) - } - StatementKind::Semi(expr) => self.substitute_unquoted_into_expr(expr, args), - StatementKind::Error => Ok(()), - } - } - - fn substitute_unquoted_into_constrain( - &mut self, - constrain: &mut ConstrainStatement, - args: &UnquoteArgs, - ) -> IResult<()> { - self.substitute_unquoted_into_expr(&mut constrain.0, args)?; - if let Some(msg) = constrain.1.as_mut() { - self.substitute_unquoted_into_expr(msg, args)?; - } - Ok(()) - } - - fn substitute_unquoted_into_let( - &mut self, - let_: &mut LetStatement, - args: &UnquoteArgs, - ) -> IResult<()> { - self.substitute_unquoted_into_expr(&mut let_.expression, args) - } - - fn substitute_unquoted_into_assign( - &mut self, - assign: &mut AssignStatement, - args: &UnquoteArgs, - ) -> IResult<()> { - self.substitute_unquoted_into_expr(&mut assign.expression, args) - } - - fn substitute_unquoted_into_for( - &mut self, - for_: &mut ForLoopStatement, - args: &UnquoteArgs, - ) -> IResult<()> { - match &mut for_.range { - ForRange::Range(start, end) => { - self.substitute_unquoted_into_expr(start, args)?; - self.substitute_unquoted_into_expr(end, args)?; - } - ForRange::Array(array) => { - self.substitute_unquoted_into_expr(array, args)?; - } - }; - self.substitute_unquoted_into_expr(&mut for_.block, args) - } - - fn substitute_unquoted_into_expr( - &mut self, - expr: &mut Expression, - args: &UnquoteArgs, - ) -> IResult<()> { - match &mut expr.kind { - ExpressionKind::Literal(literal) => { - self.substitute_unquoted_into_literal(literal, args) - } - ExpressionKind::Block(block) => self.substitute_unquoted_values_into_block(block, args), - ExpressionKind::Prefix(prefix) => self.substitute_unquoted_into_prefix(prefix, args), - ExpressionKind::Index(index) => self.substitute_unquoted_into_index(index, args), - ExpressionKind::Call(call) => self.substitute_unquoted_into_call(call, args), - ExpressionKind::MethodCall(call) => { - self.substitute_unquoted_into_method_call(call, args) - } - ExpressionKind::Constructor(constructor) => { - self.substitute_unquoted_into_constructor(constructor, args) - } - ExpressionKind::MemberAccess(access) => { - self.substitute_unquoted_into_access(access, args) - } - ExpressionKind::Cast(cast) => self.substitute_unquoted_into_cast(cast, args), - ExpressionKind::Infix(infix) => self.substitute_unquoted_into_infix(infix, args), - ExpressionKind::If(if_) => self.substitute_unquoted_into_if(if_, args), - ExpressionKind::Variable(_, _) => Ok(()), - ExpressionKind::Tuple(tuple) => self.substitute_unquoted_into_tuple(tuple, args), - ExpressionKind::Lambda(lambda) => self.substitute_unquoted_into_lambda(lambda, args), - ExpressionKind::Parenthesized(expr) => self.substitute_unquoted_into_expr(expr, args), - ExpressionKind::Quote(quote, _) => { - self.substitute_unquoted_values_into_block(quote, args) - } - ExpressionKind::Unquote(unquote) => self.substitute_unquoted_into_expr(unquote, args), - ExpressionKind::Comptime(comptime, _) => { - self.substitute_unquoted_values_into_block(comptime, args) - } - ExpressionKind::Resolved(_) => Ok(()), - ExpressionKind::Error => Ok(()), - ExpressionKind::UnquoteMarker(index) => { - let location = Location::new(expr.span, args.file); - *expr = args.values[*index].clone().into_expression(self.interner, location)?; - Ok(()) - } - } - } - - fn substitute_unquoted_into_literal( - &mut self, - literal: &mut Literal, - args: &UnquoteArgs, - ) -> IResult<()> { - match literal { - Literal::Array(array) | Literal::Slice(array) => match array { - ArrayLiteral::Standard(elements) => { - for element in elements { - self.substitute_unquoted_into_expr(element, args)?; + /// Evaluates any expressions within UnquoteMarkers in the given token list + /// and replaces the expression held by the marker with the evaluated value + /// in expression form. + pub(super) fn substitute_unquoted_values_into_tokens( + &mut self, + tokens: Tokens, + location: Location, + ) -> IResult { + let mut new_tokens = Vec::with_capacity(tokens.0.len()); + + for token in tokens.0 { + let span = token.to_span(); + match token.token() { + Token::UnquoteMarker(id) => { + match self.evaluate(*id)? { + // If the value is already quoted we don't want to change the token stream by + // turning it into a Quoted block (which would add `quote`, `{`, and `}` tokens). + Value::Code(stream) => new_tokens.extend(unwrap_rc(stream).0), + value => { + let new_id = value.into_hir_expression(self.interner, location)?; + let new_token = Token::UnquoteMarker(new_id); + new_tokens.push(SpannedToken::new(new_token, span)); + } } - Ok(()) - } - ArrayLiteral::Repeated { repeated_element, length } => { - self.substitute_unquoted_into_expr(repeated_element, args)?; - self.substitute_unquoted_into_expr(length, args)?; - Ok(()) } - }, - Literal::Bool(_) - | Literal::Integer(_, _) - | Literal::Str(_) - | Literal::RawStr(_, _) - | Literal::FmtStr(_) - | Literal::Unit => Ok(()), - } - } - - fn substitute_unquoted_into_prefix( - &mut self, - prefix: &mut PrefixExpression, - args: &UnquoteArgs, - ) -> IResult<()> { - self.substitute_unquoted_into_expr(&mut prefix.rhs, args) - } - - fn substitute_unquoted_into_index( - &mut self, - index: &mut IndexExpression, - args: &UnquoteArgs, - ) -> IResult<()> { - self.substitute_unquoted_into_expr(&mut index.collection, args)?; - self.substitute_unquoted_into_expr(&mut index.index, args) - } - - fn substitute_unquoted_into_call( - &mut self, - call: &mut CallExpression, - args: &UnquoteArgs, - ) -> IResult<()> { - self.substitute_unquoted_into_expr(&mut call.func, args)?; - for arg in &mut call.arguments { - self.substitute_unquoted_into_expr(arg, args)?; - } - Ok(()) - } - - fn substitute_unquoted_into_method_call( - &mut self, - call: &mut MethodCallExpression, - args: &UnquoteArgs, - ) -> IResult<()> { - self.substitute_unquoted_into_expr(&mut call.object, args)?; - for arg in &mut call.arguments { - self.substitute_unquoted_into_expr(arg, args)?; - } - Ok(()) - } - - fn substitute_unquoted_into_constructor( - &mut self, - constructor: &mut ConstructorExpression, - args: &UnquoteArgs, - ) -> IResult<()> { - for (_, field) in &mut constructor.fields { - self.substitute_unquoted_into_expr(field, args)?; - } - Ok(()) - } - - fn substitute_unquoted_into_access( - &mut self, - access: &mut MemberAccessExpression, - args: &UnquoteArgs, - ) -> IResult<()> { - self.substitute_unquoted_into_expr(&mut access.lhs, args) - } - - fn substitute_unquoted_into_cast( - &mut self, - cast: &mut CastExpression, - args: &UnquoteArgs, - ) -> IResult<()> { - self.substitute_unquoted_into_expr(&mut cast.lhs, args) - } - - fn substitute_unquoted_into_infix( - &mut self, - infix: &mut InfixExpression, - args: &UnquoteArgs, - ) -> IResult<()> { - self.substitute_unquoted_into_expr(&mut infix.lhs, args)?; - self.substitute_unquoted_into_expr(&mut infix.rhs, args) - } - - fn substitute_unquoted_into_if( - &mut self, - if_: &mut IfExpression, - args: &UnquoteArgs, - ) -> IResult<()> { - self.substitute_unquoted_into_expr(&mut if_.condition, args)?; - self.substitute_unquoted_into_expr(&mut if_.consequence, args)?; - - if let Some(alternative) = if_.alternative.as_mut() { - self.substitute_unquoted_into_expr(alternative, args)?; - } - Ok(()) - } - - fn substitute_unquoted_into_tuple( - &mut self, - tuple: &mut [Expression], - args: &UnquoteArgs, - ) -> IResult<()> { - for field in tuple { - self.substitute_unquoted_into_expr(field, args)?; + _ => new_tokens.push(token), + } } - Ok(()) - } - fn substitute_unquoted_into_lambda( - &mut self, - lambda: &mut Lambda, - args: &UnquoteArgs, - ) -> IResult<()> { - self.substitute_unquoted_into_expr(&mut lambda.body, args) + Ok(Tokens(new_tokens)) } } diff --git a/compiler/noirc_frontend/src/hir/comptime/scan.rs b/compiler/noirc_frontend/src/hir/comptime/scan.rs index f2e21e608ea..770c523bd7f 100644 --- a/compiler/noirc_frontend/src/hir/comptime/scan.rs +++ b/compiler/noirc_frontend/src/hir/comptime/scan.rs @@ -100,7 +100,7 @@ impl<'interner> Interpreter<'interner> { // missed it somehow. In the future we may allow users to manually write unquote // expressions in their code but for now this is unreachable. HirExpression::Unquote(block) => { - unreachable!("Found unquote block while scanning: {block}") + unreachable!("Found unquote block while scanning: {block:?}") } } } diff --git a/compiler/noirc_frontend/src/hir/comptime/tests.rs b/compiler/noirc_frontend/src/hir/comptime/tests.rs index 43f6e21905b..870f2bc458a 100644 --- a/compiler/noirc_frontend/src/hir/comptime/tests.rs +++ b/compiler/noirc_frontend/src/hir/comptime/tests.rs @@ -7,15 +7,16 @@ use noirc_errors::Location; use super::errors::InterpreterError; use super::interpreter::Interpreter; use super::value::Value; +use crate::graph::CrateId; use crate::hir::type_check::test::type_check_src_code; fn interpret_helper(src: &str, func_namespace: Vec) -> Result { let (mut interner, main_id) = type_check_src_code(src, func_namespace); let mut scopes = vec![HashMap::default()]; - let mut interpreter = Interpreter::new(&mut interner, &mut scopes); + let mut interpreter = Interpreter::new(&mut interner, &mut scopes, CrateId::Root(0)); let no_location = Location::dummy(); - interpreter.call_function(main_id, Vec::new(), no_location) + interpreter.call_function(main_id, Vec::new(), HashMap::new(), no_location) } fn interpret(src: &str, func_namespace: Vec) -> Value { @@ -30,14 +31,14 @@ fn interpret_expect_error(src: &str, func_namespace: Vec) -> Interpreter #[test] fn interpreter_works() { - let program = "fn main() -> pub Field { 3 }"; + let program = "comptime fn main() -> pub Field { 3 }"; let result = interpret(program, vec!["main".into()]); assert_eq!(result, Value::Field(3u128.into())); } #[test] fn mutation_works() { - let program = "fn main() -> pub i8 { + let program = "comptime fn main() -> pub i8 { let mut x = 3; x = 4; x @@ -48,7 +49,7 @@ fn mutation_works() { #[test] fn mutating_references() { - let program = "fn main() -> pub i32 { + let program = "comptime fn main() -> pub i32 { let x = &mut 3; *x = 4; *x @@ -59,7 +60,7 @@ fn mutating_references() { #[test] fn mutating_mutable_references() { - let program = "fn main() -> pub i64 { + let program = "comptime fn main() -> pub i64 { let mut x = &mut 3; *x = 4; *x @@ -70,7 +71,7 @@ fn mutating_mutable_references() { #[test] fn mutating_arrays() { - let program = "fn main() -> pub u8 { + let program = "comptime fn main() -> pub u8 { let mut a1 = [1, 2, 3, 4]; a1[1] = 22; a1[1] @@ -81,7 +82,7 @@ fn mutating_arrays() { #[test] fn mutate_in_new_scope() { - let program = "fn main() -> pub u8 { + let program = "comptime fn main() -> pub u8 { let mut x = 0; x += 1; { @@ -95,7 +96,7 @@ fn mutate_in_new_scope() { #[test] fn for_loop() { - let program = "fn main() -> pub u8 { + let program = "comptime fn main() -> pub u8 { let mut x = 0; for i in 0 .. 6 { x += i; @@ -108,7 +109,7 @@ fn for_loop() { #[test] fn for_loop_u16() { - let program = "fn main() -> pub u16 { + let program = "comptime fn main() -> pub u16 { let mut x = 0; for i in 0 .. 6 { x += i; @@ -121,7 +122,7 @@ fn for_loop_u16() { #[test] fn for_loop_with_break() { - let program = "unconstrained fn main() -> pub u32 { + let program = "unconstrained comptime fn main() -> pub u32 { let mut x = 0; for i in 0 .. 6 { if i == 4 { @@ -137,7 +138,7 @@ fn for_loop_with_break() { #[test] fn for_loop_with_continue() { - let program = "unconstrained fn main() -> pub u64 { + let program = "unconstrained comptime fn main() -> pub u64 { let mut x = 0; for i in 0 .. 6 { if i == 4 { @@ -153,7 +154,7 @@ fn for_loop_with_continue() { #[test] fn assert() { - let program = "fn main() { + let program = "comptime fn main() { assert(1 == 1); }"; let result = interpret(program, vec!["main".into()]); @@ -162,7 +163,7 @@ fn assert() { #[test] fn assert_fail() { - let program = "fn main() { + let program = "comptime fn main() { assert(1 == 2); }"; let result = interpret_expect_error(program, vec!["main".into()]); @@ -171,7 +172,7 @@ fn assert_fail() { #[test] fn lambda() { - let program = "fn main() -> pub u8 { + let program = "comptime fn main() -> pub u8 { let f = |x: u8| x + 1; f(1) }"; @@ -182,11 +183,11 @@ fn lambda() { #[test] fn non_deterministic_recursion() { let program = " - fn main() -> pub u64 { + comptime fn main() -> pub u64 { fib(10) } - fn fib(x: u64) -> u64 { + comptime fn fib(x: u64) -> u64 { if x <= 1 { x } else { @@ -196,3 +197,18 @@ fn non_deterministic_recursion() { let result = interpret(program, vec!["main".into(), "fib".into()]); assert_eq!(result, Value::U64(55)); } + +#[test] +fn generic_functions() { + let program = " + fn main() -> pub u8 { + apply(1, |x| x + 1) + } + + fn apply(x: T, f: fn[Env](T) -> U) -> U { + f(x) + } + "; + let result = interpret(program, vec!["main".into(), "apply".into()]); + assert!(matches!(result, Value::U8(2))); +} diff --git a/compiler/noirc_frontend/src/hir/comptime/value.rs b/compiler/noirc_frontend/src/hir/comptime/value.rs index a75d4fd2b24..c956cdb5796 100644 --- a/compiler/noirc_frontend/src/hir/comptime/value.rs +++ b/compiler/noirc_frontend/src/hir/comptime/value.rs @@ -1,21 +1,22 @@ use std::{borrow::Cow, fmt::Display, rc::Rc}; use acvm::{AcirField, FieldElement}; +use chumsky::Parser; use im::Vector; use iter_extended::{try_vecmap, vecmap}; use noirc_errors::Location; use crate::{ - ast::{ - ArrayLiteral, BlockExpression, ConstructorExpression, Ident, IntegerBitSize, Signedness, - }, + ast::{ArrayLiteral, ConstructorExpression, Ident, IntegerBitSize, Signedness}, hir_def::expr::{HirArrayLiteral, HirConstructorExpression, HirIdent, HirLambda, ImplKind}, macros_api::{ Expression, ExpressionKind, HirExpression, HirLiteral, Literal, NodeInterner, Path, StructId, }, node_interner::{ExprId, FuncId}, - QuotedType, Shared, Type, + parser::{self, NoirParser, TopLevelStatement}, + token::{SpannedToken, Token, Tokens}, + QuotedType, Shared, Type, TypeBindings, }; use rustc_hash::FxHashMap as HashMap; @@ -35,14 +36,14 @@ pub enum Value { U32(u32), U64(u64), String(Rc), - Function(FuncId, Type), + Function(FuncId, Type, Rc), Closure(HirLambda, Vec, Type), Tuple(Vec), Struct(HashMap, Value>, Type), Pointer(Shared), Array(Vector, Type), Slice(Vector, Type), - Code(Rc), + Code(Rc), TypeDefinition(StructId), } @@ -64,7 +65,7 @@ impl Value { let length = Type::Constant(value.len() as u32); Type::String(Box::new(length)) } - Value::Function(_, typ) => return Cow::Borrowed(typ), + Value::Function(_, typ, _) => return Cow::Borrowed(typ), Value::Closure(_, _, typ) => return Cow::Borrowed(typ), Value::Tuple(fields) => { Type::Tuple(vecmap(fields, |field| field.get_type().into_owned())) @@ -72,7 +73,7 @@ impl Value { Value::Struct(_, typ) => return Cow::Borrowed(typ), Value::Array(_, typ) => return Cow::Borrowed(typ), Value::Slice(_, typ) => return Cow::Borrowed(typ), - Value::Code(_) => Type::Quoted(QuotedType::Expr), + Value::Code(_) => Type::Quoted(QuotedType::Quoted), Value::TypeDefinition(_) => Type::Quoted(QuotedType::TypeDefinition), Value::Pointer(element) => { let element = element.borrow().get_type().into_owned(); @@ -127,13 +128,14 @@ impl Value { ExpressionKind::Literal(Literal::Integer((value as u128).into(), false)) } Value::String(value) => ExpressionKind::Literal(Literal::Str(unwrap_rc(value))), - Value::Function(id, typ) => { + Value::Function(id, typ, bindings) => { let id = interner.function_definition_id(id); let impl_kind = ImplKind::NotATraitMethod; let ident = HirIdent { location, id, impl_kind }; let expr_id = interner.push_expr(HirExpression::Ident(ident, None)); interner.push_expr_location(expr_id, location.span, location.file); interner.push_expr_type(expr_id, typ); + interner.store_instantiation_bindings(expr_id, unwrap_rc(bindings)); ExpressionKind::Resolved(expr_id) } Value::Closure(_lambda, _env, _typ) => { @@ -174,7 +176,22 @@ impl Value { try_vecmap(elements, |element| element.into_expression(interner, location))?; ExpressionKind::Literal(Literal::Slice(ArrayLiteral::Standard(elements))) } - Value::Code(block) => ExpressionKind::Block(unwrap_rc(block)), + Value::Code(tokens) => { + // Wrap the tokens in '{' and '}' so that we can parse statements as well. + let mut tokens_to_parse = tokens.as_ref().clone(); + tokens_to_parse.0.insert(0, SpannedToken::new(Token::LeftBrace, location.span)); + tokens_to_parse.0.push(SpannedToken::new(Token::RightBrace, location.span)); + + return match parser::expression().parse(tokens_to_parse) { + Ok(expr) => Ok(expr), + Err(mut errors) => { + let error = errors.swap_remove(0); + let file = location.file; + let rule = "an expression"; + Err(InterpreterError::FailedToParseMacro { error, file, tokens, rule }) + } + }; + } Value::Pointer(_) | Value::TypeDefinition(_) => { return Err(InterpreterError::CannotInlineMacro { value: self, location }) } @@ -231,10 +248,15 @@ impl Value { HirExpression::Literal(HirLiteral::Integer((value as u128).into(), false)) } Value::String(value) => HirExpression::Literal(HirLiteral::Str(unwrap_rc(value))), - Value::Function(id, _typ) => { + Value::Function(id, typ, bindings) => { let id = interner.function_definition_id(id); let impl_kind = ImplKind::NotATraitMethod; - HirExpression::Ident(HirIdent { location, id, impl_kind }, None) + let ident = HirIdent { location, id, impl_kind }; + let expr_id = interner.push_expr(HirExpression::Ident(ident, None)); + interner.push_expr_location(expr_id, location.span, location.file); + interner.push_expr_type(expr_id, typ); + interner.store_instantiation_bindings(expr_id, unwrap_rc(bindings)); + return Ok(expr_id); } Value::Closure(_lambda, _env, _typ) => { // TODO: How should a closure's environment be inlined? @@ -303,13 +325,31 @@ impl Value { _ => None, } } + + pub(crate) fn into_top_level_item(self, location: Location) -> IResult { + match self { + Value::Code(tokens) => parse_tokens(tokens, parser::top_level_item(), location.file), + value => Err(InterpreterError::CannotInlineMacro { value, location }), + } + } } /// Unwraps an Rc value without cloning the inner value if the reference count is 1. Clones otherwise. -fn unwrap_rc(rc: Rc) -> T { +pub(crate) fn unwrap_rc(rc: Rc) -> T { Rc::try_unwrap(rc).unwrap_or_else(|rc| (*rc).clone()) } +fn parse_tokens(tokens: Rc, parser: impl NoirParser, file: fm::FileId) -> IResult { + match parser.parse(tokens.as_ref().clone()) { + Ok(expr) => Ok(expr), + Err(mut errors) => { + let error = errors.swap_remove(0); + let rule = "an expression"; + Err(InterpreterError::FailedToParseMacro { error, file, tokens, rule }) + } + } +} + impl Display for Value { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { @@ -328,7 +368,7 @@ impl Display for Value { Value::U32(value) => write!(f, "{value}"), Value::U64(value) => write!(f, "{value}"), Value::String(value) => write!(f, "{value}"), - Value::Function(_, _) => write!(f, "(function)"), + Value::Function(..) => write!(f, "(function)"), Value::Closure(_, _, _) => write!(f, "(closure)"), Value::Tuple(fields) => { let fields = vecmap(fields, ToString::to_string); @@ -351,7 +391,13 @@ impl Display for Value { let values = vecmap(values, ToString::to_string); write!(f, "&[{}]", values.join(", ")) } - Value::Code(block) => write!(f, "quote {block}"), + Value::Code(tokens) => { + write!(f, "quote {{")?; + for token in tokens.0.iter() { + write!(f, " {token}")?; + } + write!(f, " }}") + } Value::TypeDefinition(_) => write!(f, "(type definition)"), } } diff --git a/compiler/noirc_frontend/src/hir/def_collector/dc_crate.rs b/compiler/noirc_frontend/src/hir/def_collector/dc_crate.rs index 216ed5fc545..37ece01c805 100644 --- a/compiler/noirc_frontend/src/hir/def_collector/dc_crate.rs +++ b/compiler/noirc_frontend/src/hir/def_collector/dc_crate.rs @@ -5,7 +5,7 @@ use crate::graph::CrateId; use crate::hir::comptime::{Interpreter, InterpreterError}; use crate::hir::def_map::{CrateDefMap, LocalModuleId, ModuleId}; use crate::hir::resolution::errors::ResolverError; -use crate::{Type, TypeVariable}; +use crate::{ResolvedGeneric, Type}; use crate::hir::resolution::import::{resolve_import, ImportDirective, PathResolution}; use crate::hir::resolution::{ @@ -33,7 +33,6 @@ use iter_extended::vecmap; use noirc_errors::{CustomDiagnostic, Span}; use std::collections::{BTreeMap, HashMap}; -use std::rc::Rc; use std::vec; #[derive(Default)] @@ -125,7 +124,7 @@ pub struct UnresolvedTraitImpl { pub trait_id: Option, pub impl_id: Option, pub resolved_object_type: Option, - pub resolved_generics: Vec<(Rc, TypeVariable, Span)>, + pub resolved_generics: Vec, // The resolved generic on the trait itself. E.g. it is the `` in // `impl Foo for Bar { ... }` @@ -154,6 +153,7 @@ pub struct DefCollector { pub(crate) items: CollectedItems, } +#[derive(Default)] pub struct CollectedItems { pub(crate) functions: Vec, pub(crate) types: BTreeMap, @@ -164,6 +164,18 @@ pub struct CollectedItems { pub(crate) trait_impls: Vec, } +impl CollectedItems { + pub fn is_empty(&self) -> bool { + self.functions.is_empty() + && self.types.is_empty() + && self.type_aliases.is_empty() + && self.traits.is_empty() + && self.globals.is_empty() + && self.impls.is_empty() + && self.trait_impls.is_empty() + } +} + /// Maps the type and the module id in which the impl is defined to the functions contained in that /// impl along with the generics declared on the impl itself. This also contains the Span /// of the object_type of the impl, used to issue an error if the object type fails to resolve. @@ -379,6 +391,7 @@ impl DefCollector { def_collector.items.traits, crate_id, )); + // Must resolve structs before we resolve globals. resolved_module.errors.extend(resolve_structs( context, @@ -447,7 +460,7 @@ impl DefCollector { resolved_module.type_check(context); if !cycles_present { - resolved_module.evaluate_comptime(&mut context.def_interner); + resolved_module.evaluate_comptime(&mut context.def_interner, crate_id); } resolved_module.errors @@ -546,10 +559,10 @@ impl ResolvedModule { } /// Evaluate all `comptime` expressions in this module - fn evaluate_comptime(&mut self, interner: &mut NodeInterner) { + fn evaluate_comptime(&mut self, interner: &mut NodeInterner, crate_id: CrateId) { if self.count_errors() == 0 { let mut scopes = vec![HashMap::default()]; - let mut interpreter = Interpreter::new(interner, &mut scopes); + let mut interpreter = Interpreter::new(interner, &mut scopes, crate_id); for (_file, global) in &self.globals { if let Err(error) = interpreter.scan_global(*global) { diff --git a/compiler/noirc_frontend/src/hir/def_collector/dc_mod.rs b/compiler/noirc_frontend/src/hir/def_collector/dc_mod.rs index 5c196324b7d..ab9de6c25c4 100644 --- a/compiler/noirc_frontend/src/hir/def_collector/dc_mod.rs +++ b/compiler/noirc_frontend/src/hir/def_collector/dc_mod.rs @@ -11,6 +11,7 @@ use crate::ast::{ NoirStruct, NoirTrait, NoirTraitImpl, NoirTypeAlias, Pattern, TraitImplItem, TraitItem, TypeImpl, }; +use crate::macros_api::NodeInterner; use crate::{ graph::CrateId, hir::def_collector::dc_crate::{UnresolvedStruct, UnresolvedTrait}, @@ -26,7 +27,7 @@ use super::{ }, errors::{DefCollectorErrorKind, DuplicateType}, }; -use crate::hir::def_map::{LocalModuleId, ModuleData, ModuleId}; +use crate::hir::def_map::{CrateDefMap, LocalModuleId, ModuleData, ModuleId}; use crate::hir::resolution::import::ImportDirective; use crate::hir::Context; @@ -105,35 +106,19 @@ impl<'a> ModCollector<'a> { ) -> Vec<(CompilationError, fm::FileId)> { let mut errors = vec![]; for global in globals { - let name = global.pattern.name_ident().clone(); - - let global_id = context.def_interner.push_empty_global( - name.clone(), - self.module_id, + let (global, error) = collect_global( + &mut context.def_interner, + &mut self.def_collector.def_map, + global, self.file_id, - global.attributes.clone(), - matches!(global.pattern, Pattern::Mutable { .. }), + self.module_id, ); - // Add the statement to the scope so its path can be looked up later - let result = self.def_collector.def_map.modules[self.module_id.0] - .declare_global(name, global_id); - - if let Err((first_def, second_def)) = result { - let err = DefCollectorErrorKind::Duplicate { - typ: DuplicateType::Global, - first_def, - second_def, - }; - errors.push((err.into(), self.file_id)); + if let Some(error) = error { + errors.push(error); } - self.def_collector.items.globals.push(UnresolvedGlobal { - file_id: self.file_id, - module_id: self.module_id, - global_id, - stmt_def: global, - }); + self.def_collector.items.globals.push(global); } errors } @@ -149,8 +134,9 @@ impl<'a> ModCollector<'a> { self_type: None, }; - for (method, _) in r#impl.methods { + for (mut method, _) in r#impl.methods { let func_id = context.def_interner.push_empty_fn(); + method.def.where_clause.extend(r#impl.where_clause.clone()); let location = Location::new(method.span(), self.file_id); context.def_interner.push_function(func_id, &method.def, module_id, location); unresolved_functions.push_fn(self.module_id, func_id, method); @@ -168,11 +154,16 @@ impl<'a> ModCollector<'a> { impls: Vec, krate: CrateId, ) { - for trait_impl in impls { + for mut trait_impl in impls { let trait_name = trait_impl.trait_name.clone(); - let mut unresolved_functions = - self.collect_trait_impl_function_overrides(context, &trait_impl, krate); + let mut unresolved_functions = collect_trait_impl_functions( + &mut context.def_interner, + &mut trait_impl, + krate, + self.file_id, + self.module_id, + ); let module = ModuleId { krate, local_id: self.module_id }; @@ -204,33 +195,6 @@ impl<'a> ModCollector<'a> { } } - fn collect_trait_impl_function_overrides( - &mut self, - context: &mut Context, - trait_impl: &NoirTraitImpl, - krate: CrateId, - ) -> UnresolvedFunctions { - let mut unresolved_functions = UnresolvedFunctions { - file_id: self.file_id, - functions: Vec::new(), - trait_id: None, - self_type: None, - }; - - let module = ModuleId { krate, local_id: self.module_id }; - - for item in &trait_impl.items { - if let TraitImplItem::Function(impl_method) = item { - let func_id = context.def_interner.push_empty_fn(); - let location = Location::new(impl_method.span(), self.file_id); - context.def_interner.push_function(func_id, &impl_method.def, module, location); - unresolved_functions.push_fn(self.module_id, func_id, impl_method.clone()); - } - } - - unresolved_functions - } - fn collect_functions( &mut self, context: &mut Context, @@ -308,11 +272,21 @@ impl<'a> ModCollector<'a> { struct_def: struct_definition, }; + let resolved_generics = context.resolve_generics( + &unresolved.struct_def.generics, + &mut definition_errors, + self.file_id, + ); + // Create the corresponding module for the struct namespace let id = match self.push_child_module(&name, self.file_id, false, false) { - Ok(local_id) => { - context.def_interner.new_struct(&unresolved, krate, local_id, self.file_id) - } + Ok(local_id) => context.def_interner.new_struct( + &unresolved, + resolved_generics, + krate, + local_id, + self.file_id, + ), Err(error) => { definition_errors.push((error.into(), self.file_id)); continue; @@ -356,7 +330,14 @@ impl<'a> ModCollector<'a> { type_alias_def: type_alias, }; - let type_alias_id = context.def_interner.push_type_alias(&unresolved); + let resolved_generics = context.resolve_generics( + &unresolved.type_alias_def.generics, + &mut errors, + self.file_id, + ); + + let type_alias_id = + context.def_interner.push_type_alias(&unresolved, resolved_generics); // Add the type alias to scope so its path can be looked up later let result = self.def_collector.def_map.modules[self.module_id.0] @@ -516,6 +497,9 @@ impl<'a> ModCollector<'a> { } } + let resolved_generics = + context.resolve_generics(&trait_definition.generics, &mut errors, self.file_id); + // And store the TraitId -> TraitType mapping somewhere it is reachable let unresolved = UnresolvedTrait { file_id: self.file_id, @@ -525,7 +509,8 @@ impl<'a> ModCollector<'a> { method_ids, fns_with_default_impl: unresolved_functions, }; - context.def_interner.push_empty_trait(trait_id, &unresolved); + context.def_interner.push_empty_trait(trait_id, &unresolved, resolved_generics); + self.def_collector.items.traits.insert(trait_id, unresolved); } errors @@ -761,6 +746,60 @@ fn is_native_field(str: &str) -> bool { } } +pub(crate) fn collect_trait_impl_functions( + interner: &mut NodeInterner, + trait_impl: &mut NoirTraitImpl, + krate: CrateId, + file_id: FileId, + local_id: LocalModuleId, +) -> UnresolvedFunctions { + let mut unresolved_functions = + UnresolvedFunctions { file_id, functions: Vec::new(), trait_id: None, self_type: None }; + + let module = ModuleId { krate, local_id }; + + for item in std::mem::take(&mut trait_impl.items) { + if let TraitImplItem::Function(impl_method) = item { + let func_id = interner.push_empty_fn(); + let location = Location::new(impl_method.span(), file_id); + interner.push_function(func_id, &impl_method.def, module, location); + unresolved_functions.push_fn(local_id, func_id, impl_method); + } + } + + unresolved_functions +} + +pub(crate) fn collect_global( + interner: &mut NodeInterner, + def_map: &mut CrateDefMap, + global: LetStatement, + file_id: FileId, + module_id: LocalModuleId, +) -> (UnresolvedGlobal, Option<(CompilationError, FileId)>) { + let name = global.pattern.name_ident().clone(); + + let global_id = interner.push_empty_global( + name.clone(), + module_id, + file_id, + global.attributes.clone(), + matches!(global.pattern, Pattern::Mutable { .. }), + ); + + // Add the statement to the scope so its path can be looked up later + let result = def_map.modules[module_id.0].declare_global(name, global_id); + + let error = result.err().map(|(first_def, second_def)| { + let err = + DefCollectorErrorKind::Duplicate { typ: DuplicateType::Global, first_def, second_def }; + (err.into(), file_id) + }); + + let global = UnresolvedGlobal { file_id, module_id, global_id, stmt_def: global }; + (global, error) +} + #[cfg(test)] mod tests { use super::*; diff --git a/compiler/noirc_frontend/src/hir/def_collector/errors.rs b/compiler/noirc_frontend/src/hir/def_collector/errors.rs index edeb463e10d..af2264c3843 100644 --- a/compiler/noirc_frontend/src/hir/def_collector/errors.rs +++ b/compiler/noirc_frontend/src/hir/def_collector/errors.rs @@ -1,4 +1,4 @@ -use crate::ast::{Ident, Path}; +use crate::ast::{Ident, Path, UnresolvedTypeData}; use crate::hir::resolution::import::PathResolutionError; use noirc_errors::CustomDiagnostic as Diagnostic; @@ -66,6 +66,8 @@ pub enum DefCollectorErrorKind { TraitImplOrphaned { span: Span }, #[error("macro error : {0:?}")] MacroError(MacroError), + #[error("The only supported types of numeric generics are integers, fields, and booleans")] + UnsupportedNumericGenericType { ident: Ident, typ: UnresolvedTypeData }, } /// An error struct that macro processors can return. @@ -228,6 +230,15 @@ impl<'a> From<&'a DefCollectorErrorKind> for Diagnostic { DefCollectorErrorKind::MacroError(macro_error) => { Diagnostic::simple_error(macro_error.primary_message.clone(), macro_error.secondary_message.clone().unwrap_or_default(), macro_error.span.unwrap_or_default()) }, + DefCollectorErrorKind::UnsupportedNumericGenericType { ident, typ } => { + let name = &ident.0.contents; + + Diagnostic::simple_error( + format!("{name} has a type of {typ}. The only supported types of numeric generics are integers and fields"), + "Unsupported numeric generic type".to_string(), + ident.0.span(), + ) + } } } } diff --git a/compiler/noirc_frontend/src/hir/mod.rs b/compiler/noirc_frontend/src/hir/mod.rs index 55dc22d6c5d..71fdc6b30d2 100644 --- a/compiler/noirc_frontend/src/hir/mod.rs +++ b/compiler/noirc_frontend/src/hir/mod.rs @@ -5,17 +5,21 @@ pub mod resolution; pub mod scope; pub mod type_check; +use crate::ast::UnresolvedGenerics; use crate::debug::DebugInstrumenter; use crate::graph::{CrateGraph, CrateId}; use crate::hir_def::function::FuncMeta; use crate::node_interner::{FuncId, NodeInterner, StructId}; use crate::parser::ParserError; -use crate::ParsedModule; +use crate::{Generics, Kind, ParsedModule, ResolvedGeneric, Type, TypeVariable}; +use def_collector::dc_crate::CompilationError; use def_map::{Contract, CrateDefMap}; -use fm::FileManager; +use fm::{FileId, FileManager}; +use iter_extended::vecmap; use noirc_errors::Location; use std::borrow::Cow; use std::collections::{BTreeMap, HashMap}; +use std::rc::Rc; use self::def_map::TestFunction; @@ -80,7 +84,7 @@ impl Context<'_, '_> { } } - pub fn parsed_file_results(&self, file_id: fm::FileId) -> (ParsedModule, Vec) { + pub fn parsed_file_results(&self, file_id: FileId) -> (ParsedModule, Vec) { self.parsed_files.get(&file_id).expect("noir file wasn't parsed").clone() } @@ -256,4 +260,34 @@ impl Context<'_, '_> { pub fn module(&self, module_id: def_map::ModuleId) -> &def_map::ModuleData { module_id.module(&self.def_maps) } + + /// Generics need to be resolved before elaboration to distinguish + /// between normal and numeric generics. + /// This method is expected to be used during definition collection. + /// Each result is returned in a list rather than returned as a single result as to allow + /// definition collection to provide an error for each ill-formed numeric generic. + pub(crate) fn resolve_generics( + &mut self, + generics: &UnresolvedGenerics, + errors: &mut Vec<(CompilationError, FileId)>, + file_id: FileId, + ) -> Generics { + vecmap(generics, |generic| { + // Map the generic to a fresh type variable + let id = self.def_interner.next_type_variable_id(); + let type_var = TypeVariable::unbound(id); + let ident = generic.ident(); + let span = ident.0.span(); + + // Check for name collisions of this generic + let name = Rc::new(ident.0.contents.clone()); + + let kind = generic.kind().unwrap_or_else(|err| { + errors.push((err.into(), file_id)); + Kind::Numeric(Box::new(Type::Error)) + }); + + ResolvedGeneric { name, type_var, kind, span } + }) + } } diff --git a/compiler/noirc_frontend/src/hir/resolution/errors.rs b/compiler/noirc_frontend/src/hir/resolution/errors.rs index dad0149ec73..bf6de746791 100644 --- a/compiler/noirc_frontend/src/hir/resolution/errors.rs +++ b/compiler/noirc_frontend/src/hir/resolution/errors.rs @@ -98,6 +98,12 @@ pub enum ResolverError { NoPredicatesAttributeOnUnconstrained { ident: Ident }, #[error("#[fold] attribute is only allowed on constrained functions")] FoldAttributeOnUnconstrained { ident: Ident }, + #[error("The only supported types of numeric generics are integers, fields, and booleans")] + UnsupportedNumericGenericType { ident: Ident, typ: Type }, + #[error("Numeric generics should be explicit")] + UseExplicitNumericGeneric { ident: Ident }, + #[error("expected type, found numeric generic parameter")] + NumericGenericUsedForType { name: String, span: Span }, #[error("Invalid array length construction")] ArrayLengthInterpreter { error: InterpreterError }, #[error("The unquote operator '$' can only be used within a quote expression")] @@ -400,6 +406,31 @@ impl<'a> From<&'a ResolverError> for Diagnostic { diag.add_note("The `#[fold]` attribute specifies whether a constrained function should be treated as a separate circuit rather than inlined into the program entry point".to_owned()); diag } + ResolverError::UnsupportedNumericGenericType { ident , typ } => { + let name = &ident.0.contents; + + Diagnostic::simple_error( + format!("{name} has a type of {typ}. The only supported types of numeric generics are integers, fields, and booleans."), + "Unsupported numeric generic type".to_string(), + ident.0.span(), + ) + } + ResolverError::UseExplicitNumericGeneric { ident } => { + let name = &ident.0.contents; + + Diagnostic::simple_warning( + String::from("Noir now supports explicit numeric generics. Support for implicit numeric generics will be removed in the following release."), + format!("Numeric generic `{name}` should now be specified with `let {name}: `"), + ident.0.span(), + ) + } + ResolverError::NumericGenericUsedForType { name, span } => { + Diagnostic::simple_error( + format!("expected type, found numeric generic parameter {name}"), + String::from("not a type"), + *span, + ) + } ResolverError::ArrayLengthInterpreter { error } => Diagnostic::from(error), ResolverError::UnquoteUsedOutsideQuote { span } => { Diagnostic::simple_error( diff --git a/compiler/noirc_frontend/src/hir/resolution/functions.rs b/compiler/noirc_frontend/src/hir/resolution/functions.rs index e63de9b9173..fe46796ed24 100644 --- a/compiler/noirc_frontend/src/hir/resolution/functions.rs +++ b/compiler/noirc_frontend/src/hir/resolution/functions.rs @@ -1,8 +1,7 @@ -use std::{collections::BTreeMap, rc::Rc}; +use std::collections::BTreeMap; use fm::FileId; use iter_extended::vecmap; -use noirc_errors::Span; use crate::{ graph::CrateId, @@ -11,10 +10,10 @@ use crate::{ def_map::{CrateDefMap, ModuleId}, }, node_interner::{FuncId, NodeInterner, TraitImplId}, - Type, TypeVariable, + ResolvedGeneric, Type, }; -use super::{path_resolver::StandardPathResolver, resolver::Resolver}; +use super::{path_resolver::StandardPathResolver, Resolver}; #[allow(clippy::too_many_arguments)] pub(crate) fn resolve_function_set( @@ -24,7 +23,7 @@ pub(crate) fn resolve_function_set( mut unresolved_functions: UnresolvedFunctions, self_type: Option, trait_impl_id: Option, - impl_generics: Vec<(Rc, TypeVariable, Span)>, + impl_generics: Vec, errors: &mut Vec<(CompilationError, FileId)>, ) -> Vec<(FileId, FuncId)> { let file_id = unresolved_functions.file_id; diff --git a/compiler/noirc_frontend/src/hir/resolution/import.rs b/compiler/noirc_frontend/src/hir/resolution/import.rs index f3dae31e1e0..282ee8a23c2 100644 --- a/compiler/noirc_frontend/src/hir/resolution/import.rs +++ b/compiler/noirc_frontend/src/hir/resolution/import.rs @@ -88,15 +88,12 @@ pub fn resolve_import( import_directive: &ImportDirective, def_maps: &BTreeMap, ) -> Result { - let allow_contracts = - allow_referencing_contracts(def_maps, crate_id, import_directive.module_id); - let module_scope = import_directive.module_id; let NamespaceResolution { module_id: resolved_module, namespace: resolved_namespace, mut error, - } = resolve_path_to_ns(import_directive, crate_id, crate_id, def_maps, allow_contracts)?; + } = resolve_path_to_ns(import_directive, crate_id, crate_id, def_maps)?; let name = resolve_path_name(import_directive); @@ -129,20 +126,11 @@ pub fn resolve_import( }) } -fn allow_referencing_contracts( - def_maps: &BTreeMap, - krate: CrateId, - local_id: LocalModuleId, -) -> bool { - ModuleId { krate, local_id }.module(def_maps).is_contract -} - fn resolve_path_to_ns( import_directive: &ImportDirective, crate_id: CrateId, importing_crate: CrateId, def_maps: &BTreeMap, - allow_contracts: bool, ) -> NamespaceResolutionResult { let import_path = &import_directive.path.segments; let def_map = &def_maps[&crate_id]; @@ -150,13 +138,7 @@ fn resolve_path_to_ns( match import_directive.path.kind { crate::ast::PathKind::Crate => { // Resolve from the root of the crate - resolve_path_from_crate_root( - crate_id, - importing_crate, - import_path, - def_maps, - allow_contracts, - ) + resolve_path_from_crate_root(crate_id, importing_crate, import_path, def_maps) } crate::ast::PathKind::Plain => { // There is a possibility that the import path is empty @@ -168,7 +150,6 @@ fn resolve_path_to_ns( import_path, import_directive.module_id, def_maps, - allow_contracts, ); } @@ -177,13 +158,7 @@ fn resolve_path_to_ns( let first_segment = import_path.first().expect("ice: could not fetch first segment"); if current_mod.find_name(first_segment).is_none() { // Resolve externally when first segment is unresolved - return resolve_external_dep( - def_map, - import_directive, - def_maps, - allow_contracts, - importing_crate, - ); + return resolve_external_dep(def_map, import_directive, def_maps, importing_crate); } resolve_name_in_module( @@ -192,17 +167,12 @@ fn resolve_path_to_ns( import_path, import_directive.module_id, def_maps, - allow_contracts, ) } - crate::ast::PathKind::Dep => resolve_external_dep( - def_map, - import_directive, - def_maps, - allow_contracts, - importing_crate, - ), + crate::ast::PathKind::Dep => { + resolve_external_dep(def_map, import_directive, def_maps, importing_crate) + } } } @@ -212,7 +182,6 @@ fn resolve_path_from_crate_root( import_path: &[Ident], def_maps: &BTreeMap, - allow_contracts: bool, ) -> NamespaceResolutionResult { resolve_name_in_module( crate_id, @@ -220,7 +189,6 @@ fn resolve_path_from_crate_root( import_path, def_maps[&crate_id].root, def_maps, - allow_contracts, ) } @@ -230,7 +198,6 @@ fn resolve_name_in_module( import_path: &[Ident], starting_mod: LocalModuleId, def_maps: &BTreeMap, - allow_contracts: bool, ) -> NamespaceResolutionResult { let def_map = &def_maps[&krate]; let mut current_mod_id = ModuleId { krate, local_id: starting_mod }; @@ -293,10 +260,6 @@ fn resolve_name_in_module( return Err(PathResolutionError::Unresolved(current_segment.clone())); } - // Check if it is a contract and we're calling from a non-contract context - if current_mod.is_contract && !allow_contracts { - return Err(PathResolutionError::ExternalContractUsed(current_segment.clone())); - } current_ns = found_ns; } @@ -314,7 +277,6 @@ fn resolve_external_dep( current_def_map: &CrateDefMap, directive: &ImportDirective, def_maps: &BTreeMap, - allow_contracts: bool, importing_crate: CrateId, ) -> NamespaceResolutionResult { // Use extern_prelude to get the dep @@ -344,7 +306,7 @@ fn resolve_external_dep( is_prelude: false, }; - resolve_path_to_ns(&dep_directive, dep_module.krate, importing_crate, def_maps, allow_contracts) + resolve_path_to_ns(&dep_directive, dep_module.krate, importing_crate, def_maps) } // Issue an error if the given private function is being called from a non-child module, or diff --git a/compiler/noirc_frontend/src/hir/resolution/resolver.rs b/compiler/noirc_frontend/src/hir/resolution/resolver.rs index 97de66be817..6d547aaf0b7 100644 --- a/compiler/noirc_frontend/src/hir/resolution/resolver.rs +++ b/compiler/noirc_frontend/src/hir/resolution/resolver.rs @@ -17,7 +17,7 @@ use crate::hir_def::expr::{ HirArrayLiteral, HirBinaryOp, HirBlockExpression, HirCallExpression, HirCapturedVar, HirCastExpression, HirConstructorExpression, HirExpression, HirIdent, HirIfExpression, HirIndexExpression, HirInfixExpression, HirLambda, HirLiteral, HirMemberAccess, - HirMethodCallExpression, HirPrefixExpression, HirQuoted, ImplKind, + HirMethodCallExpression, HirPrefixExpression, ImplKind, }; use crate::hir_def::function::FunctionBody; @@ -32,8 +32,9 @@ use crate::ast::{ ArrayLiteral, BinaryOpKind, BlockExpression, Expression, ExpressionKind, ForRange, FunctionDefinition, FunctionKind, FunctionReturnType, Ident, ItemVisibility, LValue, LetStatement, Literal, NoirFunction, NoirStruct, NoirTypeAlias, Param, Path, PathKind, Pattern, - Statement, StatementKind, TraitBound, UnaryOp, UnresolvedGenerics, UnresolvedTraitConstraint, - UnresolvedType, UnresolvedTypeData, UnresolvedTypeExpression, Visibility, ERROR_IDENT, + Statement, StatementKind, TraitBound, UnaryOp, UnresolvedGeneric, UnresolvedGenerics, + UnresolvedTraitConstraint, UnresolvedType, UnresolvedTypeData, UnresolvedTypeExpression, + Visibility, ERROR_IDENT, }; use crate::graph::CrateId; use crate::hir::def_map::{ModuleDefId, TryFromModuleDefId, MAIN_FUNCTION}; @@ -47,7 +48,10 @@ use crate::node_interner::{ DefinitionId, DefinitionKind, DependencyId, ExprId, FuncId, GlobalId, NodeInterner, StmtId, StructId, TraitId, TraitImplId, TraitMethodId, TypeAliasId, }; -use crate::{Generics, Shared, StructType, Type, TypeAlias, TypeVariable, TypeVariableKind}; +use crate::{ + GenericTypeVars, Generics, Kind, ResolvedGeneric, Shared, StructType, Type, TypeAlias, + TypeVariable, TypeVariableKind, +}; use fm::FileId; use iter_extended::vecmap; use noirc_errors::{Location, Span, Spanned}; @@ -131,7 +135,7 @@ pub struct Resolver<'a> { /// unique type variables if we're resolving a struct. Empty otherwise. /// This is a Vec rather than a map to preserve the order a functions generics /// were declared in. - generics: Vec<(Rc, TypeVariable, Span)>, + generics: Vec, /// When resolving lambda expressions, we need to keep track of the variables /// that are captured. We do this in order to create the hidden environment @@ -223,7 +227,8 @@ impl<'a> Resolver<'a> { let mut new_generic_ident: Ident = format!("T{}_impl_{}", func_id, path.as_string()).into(); let mut new_generic_path = Path::from_ident(new_generic_ident.clone()); - while impl_trait_generics.contains(&new_generic_ident) + let new_generic = UnresolvedGeneric::from(new_generic_ident.clone()); + while impl_trait_generics.contains(&new_generic) || self.lookup_generic_or_global_type(&new_generic_path).is_some() { new_generic_ident = @@ -231,7 +236,7 @@ impl<'a> Resolver<'a> { new_generic_path = Path::from_ident(new_generic_ident.clone()); counter += 1; } - impl_trait_generics.insert(new_generic_ident.clone()); + impl_trait_generics.insert(UnresolvedGeneric::from(new_generic_ident.clone())); let is_synthesized = true; let new_generic_type_data = @@ -249,7 +254,7 @@ impl<'a> Resolver<'a> { }; parameter.typ.typ = new_generic_type_data; - func.def.generics.push(new_generic_ident); + func.def.generics.push(new_generic_ident.into()); func.def.where_clause.push(new_trait_constraint); } } @@ -591,7 +596,7 @@ impl<'a> Resolver<'a> { let env = Box::new(self.resolve_type_inner(*env)); match *env { - Type::Unit | Type::Tuple(_) | Type::NamedGeneric(_, _) => { + Type::Unit | Type::Tuple(_) | Type::NamedGeneric(_, _, _) => { Type::Function(args, ret, env) } _ => { @@ -607,6 +612,7 @@ impl<'a> Resolver<'a> { Type::MutableReference(Box::new(self.resolve_type_inner(*element))) } Parenthesized(typ) => self.resolve_type_inner(*typ), + Resolved(id) => self.interner.get_quoted_type(id).clone(), }; if let Type::Struct(_, _) = resolved_type { @@ -621,8 +627,8 @@ impl<'a> Resolver<'a> { resolved_type } - fn find_generic(&self, target_name: &str) -> Option<&(Rc, TypeVariable, Span)> { - self.generics.iter().find(|(name, _, _)| name.as_ref() == target_name) + fn find_generic(&self, target_name: &str) -> Option<&ResolvedGeneric> { + self.generics.iter().find(|generic| generic.name.as_ref() == target_name) } fn resolve_named_type(&mut self, path: Path, args: Vec) -> Type { @@ -747,9 +753,15 @@ impl<'a> Resolver<'a> { fn lookup_generic_or_global_type(&mut self, path: &Path) -> Option { if path.segments.len() == 1 { let name = &path.last_segment().0.contents; - if let Some((name, var, _)) = self.find_generic(name) { - return Some(Type::NamedGeneric(var.clone(), name.clone())); - } + if let Some(generic) = self.find_generic(name) { + // We always insert a `TypeKind::Normal` as we do not support explicit numeric generics + // in the resolver + return Some(Type::NamedGeneric( + generic.type_var.clone(), + generic.name.clone(), + Kind::Normal, + )); + }; } // If we cannot find a local generic of the same name, try to look up a global @@ -848,14 +860,14 @@ impl<'a> Resolver<'a> { /// Return the current generics. /// Needed to keep referring to the same type variables across many /// methods in a single impl. - pub fn get_generics(&self) -> &[(Rc, TypeVariable, Span)] { + pub fn get_generics(&self) -> &[ResolvedGeneric] { &self.generics } /// Set the current generics that are in scope. /// Unlike add_generics, this function will not create any new type variables, /// opting to reuse the existing ones it is directly given. - pub fn set_generics(&mut self, generics: Vec<(Rc, TypeVariable, Span)>) { + pub fn set_generics(&mut self, generics: Vec) { self.generics = generics; } @@ -875,48 +887,79 @@ impl<'a> Resolver<'a> { // Map the generic to a fresh type variable let id = self.interner.next_type_variable_id(); let typevar = TypeVariable::unbound(id); - let span = generic.0.span(); + let ident = generic.ident(); + let span = ident.0.span(); // Check for name collisions of this generic - let name = Rc::new(generic.0.contents.clone()); + let name = Rc::new(ident.0.contents.clone()); - if let Some((_, _, first_span)) = self.find_generic(&name) { + let resolved_generic = ResolvedGeneric { + name: name.clone(), + type_var: typevar, + // We only support numeric generics in the elaborator + kind: Kind::Normal, + span, + }; + if let Some(generic) = self.find_generic(&name) { self.errors.push(ResolverError::DuplicateDefinition { - name: generic.0.contents.clone(), - first_span: *first_span, + name: ident.0.contents.clone(), + first_span: generic.span, second_span: span, }); } else { - self.generics.push((name, typevar.clone(), span)); + self.generics.push(resolved_generic.clone()); } - typevar + resolved_generic }) } /// Add the given existing generics to scope. /// This is useful for adding the same generics to many items. E.g. apply impl generics /// to each function in the impl or trait generics to each item in the trait. - pub fn add_existing_generics(&mut self, names: &UnresolvedGenerics, generics: &Generics) { - assert_eq!(names.len(), generics.len()); + pub fn add_existing_generics( + &mut self, + unresolved_generics: &UnresolvedGenerics, + generics: &GenericTypeVars, + ) { + assert_eq!(unresolved_generics.len(), generics.len()); - for (name, typevar) in names.iter().zip(generics) { - self.add_existing_generic(&name.0.contents, name.0.span(), typevar.clone()); + for (unresolved_generic, typevar) in unresolved_generics.iter().zip(generics) { + self.add_existing_generic( + unresolved_generic, + unresolved_generic.span(), + typevar.clone(), + ); } } - pub fn add_existing_generic(&mut self, name: &str, span: Span, typevar: TypeVariable) { + pub fn add_existing_generic( + &mut self, + unresolved_generic: &UnresolvedGeneric, + span: Span, + typevar: TypeVariable, + ) { + let name = &unresolved_generic.ident().0.contents; + // Check for name collisions of this generic - let rc_name = Rc::new(name.to_owned()); + let rc_name = Rc::new(name.clone()); - if let Some((_, _, first_span)) = self.find_generic(&rc_name) { + if let Some(generic) = self.find_generic(&rc_name) { self.errors.push(ResolverError::DuplicateDefinition { - name: name.to_owned(), - first_span: *first_span, + name: name.clone(), + first_span: generic.span, second_span: span, }); } else { - self.generics.push((rc_name, typevar, span)); + let resolved_generic = ResolvedGeneric { + name: rc_name, + type_var: typevar.clone(), + kind: unresolved_generic + .kind() + .expect("ICE: Deprecated code should only support normal kinds"), + span, + }; + self.generics.push(resolved_generic); } } @@ -992,7 +1035,7 @@ impl<'a> Resolver<'a> { // indicate we should code generate in the same way. Thus, we unify the attributes into one flag here. let has_inline_attribute = has_no_predicates_attribute || should_fold; - let generics = vecmap(&self.generics, |(_, typevar, _)| typevar.clone()); + let generics = vecmap(&self.generics, |generic| generic.type_var.clone()); let mut parameters = vec![]; let mut parameter_types = vec![]; @@ -1053,8 +1096,8 @@ impl<'a> Resolver<'a> { let direct_generics = func.def.generics.iter(); let direct_generics = direct_generics - .filter_map(|generic| self.find_generic(&generic.0.contents)) - .map(|(name, typevar, _span)| (name.clone(), typevar.clone())) + .filter_map(|generic| self.find_generic(&generic.ident().0.contents)) + .map(|ResolvedGeneric { name, type_var, .. }| (name.clone(), type_var.clone())) .collect(); FuncMeta { @@ -1071,6 +1114,7 @@ impl<'a> Resolver<'a> { trait_constraints: self.resolve_trait_constraints(&func.def.where_clause), is_entry_point: self.is_entry_point_function(func), has_inline_attribute, + source_crate: self.path_resolver.module_id().krate, // These fields are only used by the elaborator all_generics: Vec::new(), @@ -1107,6 +1151,7 @@ impl<'a> Resolver<'a> { !func.def.is_unconstrained } + // TODO(https://github.com/noir-lang/noir/issues/5156): Remove this method in favor of explicit numeric generics fn declare_numeric_generics(&mut self, params: &[Type], return_type: &Type) { if self.generics.is_empty() { return; @@ -1119,12 +1164,12 @@ impl<'a> Resolver<'a> { // We can fail to find the generic in self.generics if it is an implicit one created // by the compiler. This can happen when, e.g. eliding array lengths using the slice // syntax [T]. - if let Some((name, _, span)) = - self.generics.iter().find(|(name, _, _)| name.as_ref() == &name_to_find) + if let Some(ResolvedGeneric { name, span, .. }) = + self.generics.iter().find(|generic| generic.name.as_ref() == &name_to_find) { let ident = Ident::new(name.to_string(), *span); let definition = DefinitionKind::GenericType(type_variable); - self.add_variable_decl_inner(ident, false, false, false, definition); + self.add_variable_decl_inner(ident.clone(), false, false, false, definition); } } } @@ -1150,7 +1195,7 @@ impl<'a> Resolver<'a> { | Type::Error | Type::TypeVariable(_, _) | Type::Constant(_) - | Type::NamedGeneric(_, _) + | Type::NamedGeneric(_, _, _) | Type::Quoted(_) | Type::Forall(_, _) => (), @@ -1161,7 +1206,7 @@ impl<'a> Resolver<'a> { } Type::Array(length, element_type) => { - if let Type::NamedGeneric(type_variable, name) = length.as_ref() { + if let Type::NamedGeneric(type_variable, name, _) = length.as_ref() { found.insert(name.to_string(), type_variable.clone()); } Self::find_numeric_generics_in_type(element_type, found); @@ -1186,7 +1231,7 @@ impl<'a> Resolver<'a> { Type::Struct(struct_type, generics) => { for (i, generic) in generics.iter().enumerate() { - if let Type::NamedGeneric(type_variable, name) = generic { + if let Type::NamedGeneric(type_variable, name, _) = generic { if struct_type.borrow().generic_is_numeric(i) { found.insert(name.to_string(), type_variable.clone()); } @@ -1197,7 +1242,7 @@ impl<'a> Resolver<'a> { } Type::Alias(alias, generics) => { for (i, generic) in generics.iter().enumerate() { - if let Type::NamedGeneric(type_variable, name) = generic { + if let Type::NamedGeneric(type_variable, name, _) = generic { if alias.borrow().generic_is_numeric(i) { found.insert(name.to_string(), type_variable.clone()); } @@ -1208,12 +1253,12 @@ impl<'a> Resolver<'a> { } Type::MutableReference(element) => Self::find_numeric_generics_in_type(element, found), Type::String(length) => { - if let Type::NamedGeneric(type_variable, name) = length.as_ref() { + if let Type::NamedGeneric(type_variable, name, _) = length.as_ref() { found.insert(name.to_string(), type_variable.clone()); } } Type::FmtString(length, fields) => { - if let Type::NamedGeneric(type_variable, name) = length.as_ref() { + if let Type::NamedGeneric(type_variable, name, _) = length.as_ref() { found.insert(name.to_string(), type_variable.clone()); } Self::find_numeric_generics_in_type(fields, found); @@ -1634,10 +1679,7 @@ impl<'a> Resolver<'a> { ExpressionKind::Parenthesized(sub_expr) => return self.resolve_expression(*sub_expr), // The quoted expression isn't resolved since we don't want errors if variables aren't defined - ExpressionKind::Quote(block, _) => { - let quoted = HirQuoted { quoted_block: block, unquoted_exprs: Vec::new() }; - HirExpression::Quote(quoted) - } + ExpressionKind::Quote(block) => HirExpression::Quote(block), ExpressionKind::Comptime(block, _) => { HirExpression::Comptime(self.resolve_block(block)) } @@ -1648,9 +1690,6 @@ impl<'a> Resolver<'a> { self.push_err(ResolverError::UnquoteUsedOutsideQuote { span: expr.span }); HirExpression::Literal(HirLiteral::Unit) } - ExpressionKind::UnquoteMarker(index) => { - unreachable!("UnquoteMarker({index}) remaining in runtime code") - } }; // If these lines are ever changed, make sure to change the early return @@ -1880,7 +1919,9 @@ impl<'a> Resolver<'a> { let constraint = TraitConstraint { typ: self.self_type.clone()?, - trait_generics: Type::from_generics(&the_trait.generics), + trait_generics: Type::from_generics(&vecmap(&the_trait.generics, |generic| { + generic.type_var.clone() + })), trait_id, }; return Some((method, constraint, false)); @@ -1908,7 +1949,9 @@ impl<'a> Resolver<'a> { the_trait.self_type_typevar.clone(), TypeVariableKind::Normal, ), - trait_generics: Type::from_generics(&the_trait.generics), + trait_generics: Type::from_generics(&vecmap(&the_trait.generics, |generic| { + generic.type_var.clone() + })), trait_id, }; return Some((method, constraint, false)); diff --git a/compiler/noirc_frontend/src/hir/resolution/traits.rs b/compiler/noirc_frontend/src/hir/resolution/traits.rs index 4c360731711..a6b732439b0 100644 --- a/compiler/noirc_frontend/src/hir/resolution/traits.rs +++ b/compiler/noirc_frontend/src/hir/resolution/traits.rs @@ -4,7 +4,7 @@ use fm::FileId; use iter_extended::vecmap; use noirc_errors::Location; -use crate::ast::{ItemVisibility, Path, TraitItem}; +use crate::ast::{Ident, ItemVisibility, Path, TraitItem, UnresolvedGeneric}; use crate::{ graph::CrateId, hir::{ @@ -17,7 +17,7 @@ use crate::{ }, hir_def::traits::{TraitConstant, TraitFunction, TraitImpl, TraitType}, node_interner::{FuncId, NodeInterner, TraitId}, - Generics, Shared, Type, TypeVariable, TypeVariableKind, + GenericTypeVars, Shared, Type, TypeVariableKind, }; use super::{ @@ -36,14 +36,18 @@ pub(crate) fn resolve_traits( crate_id: CrateId, ) -> Vec<(CompilationError, FileId)> { for (trait_id, unresolved_trait) in &traits { - context.def_interner.push_empty_trait(*trait_id, unresolved_trait); + context.def_interner.push_empty_trait(*trait_id, unresolved_trait, vec![]); } let mut all_errors = Vec::new(); for (trait_id, unresolved_trait) in traits { - let generics = vecmap(&unresolved_trait.trait_def.generics, |_| { - TypeVariable::unbound(context.def_interner.next_type_variable_id()) - }); + let file_id = context.def_maps[&crate_id].file_id(unresolved_trait.module_id); + let generics = context.resolve_generics( + &unresolved_trait.trait_def.generics, + &mut all_errors, + file_id, + ); + let generic_type_vars = generics.iter().map(|generic| generic.type_var.clone()).collect(); // Resolve order // 1. Trait Types ( Trait constants can have a trait type, therefore types before constants) @@ -51,8 +55,13 @@ pub(crate) fn resolve_traits( // 2. Trait Constants ( Trait's methods can use trait types & constants, therefore they should be after) let _ = resolve_trait_constants(context, crate_id, &unresolved_trait); // 3. Trait Methods - let (methods, errors) = - resolve_trait_methods(context, trait_id, crate_id, &unresolved_trait, &generics); + let (methods, errors) = resolve_trait_methods( + context, + trait_id, + crate_id, + &unresolved_trait, + &generic_type_vars, + ); all_errors.extend(errors); @@ -93,7 +102,7 @@ fn resolve_trait_methods( trait_id: TraitId, crate_id: CrateId, unresolved_trait: &UnresolvedTrait, - trait_generics: &Generics, + trait_generics: &GenericTypeVars, ) -> (Vec, Vec<(CompilationError, FileId)>) { let interner = &mut context.def_interner; let def_maps = &mut context.def_maps; @@ -126,7 +135,11 @@ fn resolve_trait_methods( resolver.add_generics(generics); resolver.add_existing_generics(&unresolved_trait.trait_def.generics, trait_generics); - resolver.add_existing_generic("Self", name_span, self_typevar); + resolver.add_existing_generic( + &UnresolvedGeneric::Variable(Ident::from("Self")), + name_span, + self_typevar, + ); resolver.set_self_type(Some(self_type.clone())); let func_id = unresolved_trait.method_ids[&name.0.contents]; @@ -143,7 +156,7 @@ fn resolve_trait_methods( let arguments = vecmap(parameters, |param| resolver.resolve_type(param.1.clone())); let return_type = resolver.resolve_type(return_type.get_type().into_owned()); - let generics = vecmap(resolver.get_generics(), |(_, type_var, _)| type_var.clone()); + let generics = vecmap(resolver.get_generics(), |generic| generic.type_var.clone()); let default_impl_list: Vec<_> = unresolved_trait .fns_with_default_impl @@ -464,7 +477,7 @@ pub(crate) fn resolve_trait_impls( methods: vecmap(&impl_methods, |(_, func_id)| *func_id), }); - let impl_generics = vecmap(impl_generics, |(_, type_variable, _)| type_variable); + let impl_generics = vecmap(impl_generics, |generic| generic.type_var); if let Err((prev_span, prev_file)) = interner.add_trait_implementation( self_type.clone(), diff --git a/compiler/noirc_frontend/src/hir/type_check/errors.rs b/compiler/noirc_frontend/src/hir/type_check/errors.rs index bd32ba2fce5..f18e8a9e843 100644 --- a/compiler/noirc_frontend/src/hir/type_check/errors.rs +++ b/compiler/noirc_frontend/src/hir/type_check/errors.rs @@ -40,6 +40,8 @@ pub enum TypeCheckError { TypeMismatch { expected_typ: String, expr_typ: String, expr_span: Span }, #[error("Expected type {expected} is not the same as {actual}")] TypeMismatchWithSource { expected: Type, actual: Type, span: Span, source: Source }, + #[error("Expected type {expected_kind:?} is not the same as {expr_kind:?}")] + TypeKindMismatch { expected_kind: String, expr_kind: String, expr_span: Span }, #[error("Expected {expected:?} found {found:?}")] ArityMisMatch { expected: usize, found: usize, span: Span }, #[error("Return type in a function cannot be public")] @@ -143,7 +145,7 @@ pub enum TypeCheckError { }, #[error("Strings do not support indexed assignment")] StringIndexAssign { span: Span }, - #[error("Macro calls may only return Expr values")] + #[error("Macro calls may only return `Quoted` values")] MacroReturningNonExpr { typ: Type, span: Span }, } @@ -178,6 +180,13 @@ impl<'a> From<&'a TypeCheckError> for Diagnostic { *expr_span, ) } + TypeCheckError::TypeKindMismatch { expected_kind, expr_kind, expr_span } => { + Diagnostic::simple_error( + format!("Expected kind {expected_kind}, found kind {expr_kind}"), + String::new(), + *expr_span, + ) + } TypeCheckError::TraitMethodParameterTypeMismatch { method_name, expected_typ, actual_typ, parameter_index, parameter_span } => { Diagnostic::simple_error( format!("Parameter #{parameter_index} of method `{method_name}` must be of type {expected_typ}, not {actual_typ}"), @@ -338,8 +347,8 @@ impl<'a> From<&'a TypeCheckError> for Diagnostic { Diagnostic::simple_error(msg, "".into(), *span) }, TypeCheckError::MacroReturningNonExpr { typ, span } => Diagnostic::simple_error( - format!("Expected macro call to return an `Expr` but found a(n) {typ}"), - "Macro calls must return quoted expressions, otherwise there is no code to insert".into(), + format!("Expected macro call to return a `Quoted` but found a(n) `{typ}`"), + "Macro calls must return quoted values, otherwise there is no code to insert".into(), *span, ), } diff --git a/compiler/noirc_frontend/src/hir/type_check/expr.rs b/compiler/noirc_frontend/src/hir/type_check/expr.rs index 46e8db8f5ff..77861a6d8f8 100644 --- a/compiler/noirc_frontend/src/hir/type_check/expr.rs +++ b/compiler/noirc_frontend/src/hir/type_check/expr.rs @@ -307,13 +307,13 @@ impl<'interner> TypeChecker<'interner> { Type::Function(params, Box::new(lambda.return_type), Box::new(env_type)) } - HirExpression::Quote(_) => Type::Quoted(crate::QuotedType::Expr), + HirExpression::Quote(_) => Type::Quoted(crate::QuotedType::Quoted), HirExpression::Comptime(block) => self.check_block(block), // Unquote should be inserted & removed by the comptime interpreter. // Even if we allowed it here, we wouldn't know what type to give to the result. HirExpression::Unquote(block) => { - unreachable!("Unquote remaining during type checking {block}") + unreachable!("Unquote remaining during type checking {block:?}") } }; @@ -340,7 +340,7 @@ impl<'interner> TypeChecker<'interner> { // Check that we are not passing a mutable reference from a constrained runtime to an unconstrained runtime if is_current_func_constrained && is_unconstrained_call { for (typ, _, _) in args.iter() { - if matches!(&typ.follow_bindings(), Type::MutableReference(_)) { + if !typ.is_valid_for_unconstrained_boundary() { self.errors.push(TypeCheckError::ConstrainedReferenceToUnconstrained { span }); } } @@ -404,8 +404,8 @@ impl<'interner> TypeChecker<'interner> { for (param, arg) in the_trait.generics.iter().zip(&constraint.trait_generics) { // Avoid binding t = t - if !arg.occurs(param.id()) { - bindings.insert(param.id(), (param.clone(), arg.clone())); + if !arg.occurs(param.type_var.id()) { + bindings.insert(param.type_var.id(), (param.type_var.clone(), arg.clone())); } } @@ -1025,7 +1025,7 @@ impl<'interner> TypeChecker<'interner> { }); None } - Type::NamedGeneric(_, _) => { + Type::NamedGeneric(_, _, _) => { let func_meta = self.interner.function_meta( &self.current_function.expect("unexpected method outside a function"), ); diff --git a/compiler/noirc_frontend/src/hir/type_check/mod.rs b/compiler/noirc_frontend/src/hir/type_check/mod.rs index 98e1cd9c72a..1d3c7fcda9b 100644 --- a/compiler/noirc_frontend/src/hir/type_check/mod.rs +++ b/compiler/noirc_frontend/src/hir/type_check/mod.rs @@ -22,7 +22,7 @@ use crate::{ traits::TraitConstraint, }, node_interner::{ExprId, FuncId, GlobalId, NodeInterner}, - Type, TypeBindings, + Kind, Type, TypeBindings, }; pub use self::errors::Source; @@ -263,7 +263,7 @@ pub(crate) fn check_trait_impl_method_matches_declaration( // Substitute each generic on the trait with the corresponding generic on the impl for (generic, arg) in trait_info.generics.iter().zip(&impl_.trait_generics) { - bindings.insert(generic.id(), (generic.clone(), arg.clone())); + bindings.insert(generic.type_var.id(), (generic.type_var.clone(), arg.clone())); } // If this is None, the trait does not have the corresponding function. @@ -284,7 +284,7 @@ pub(crate) fn check_trait_impl_method_matches_declaration( for ((_, trait_fn_generic), (name, impl_fn_generic)) in trait_fn_meta.direct_generics.iter().zip(&meta.direct_generics) { - let arg = Type::NamedGeneric(impl_fn_generic.clone(), name.clone()); + let arg = Type::NamedGeneric(impl_fn_generic.clone(), name.clone(), Kind::Normal); bindings.insert(trait_fn_generic.id(), (trait_fn_generic.clone(), arg)); } @@ -561,6 +561,7 @@ pub mod test { all_generics: Vec::new(), parameter_idents: Vec::new(), function_body: FunctionBody::Resolved, + source_crate: CrateId::dummy_id(), }; interner.push_fn_meta(func_meta, func_id); @@ -716,13 +717,15 @@ pub mod test { let mut interner = NodeInterner::default(); interner.populate_dummy_operator_traits(); - assert_eq!( - errors.len(), - 0, - "expected 0 parser errors, but got {}, errors: {:?}", - errors.len(), - errors - ); + if !errors.iter().all(|error| error.is_warning()) { + assert_eq!( + errors.len(), + 0, + "expected 0 parser errors, but got {}, errors: {:?}", + errors.len(), + errors + ); + } let func_ids = btree_map(&func_namespace, |name| { (name.to_string(), interner.push_test_function_definition(name.into())) diff --git a/compiler/noirc_frontend/src/hir_def/expr.rs b/compiler/noirc_frontend/src/hir_def/expr.rs index 9547aef866b..8de4f118774 100644 --- a/compiler/noirc_frontend/src/hir_def/expr.rs +++ b/compiler/noirc_frontend/src/hir_def/expr.rs @@ -4,6 +4,7 @@ use noirc_errors::Location; use crate::ast::{BinaryOp, BinaryOpKind, Ident, UnaryOp}; use crate::node_interner::{DefinitionId, ExprId, FuncId, NodeInterner, StmtId, TraitMethodId}; +use crate::token::Tokens; use crate::Shared; use super::stmt::HirPattern; @@ -33,8 +34,8 @@ pub enum HirExpression { If(HirIfExpression), Tuple(Vec), Lambda(HirLambda), - Quote(HirQuoted), - Unquote(crate::ast::BlockExpression), + Quote(Tokens), + Unquote(Tokens), Comptime(HirBlockExpression), Error, } @@ -291,13 +292,3 @@ pub struct HirLambda { pub body: ExprId, pub captures: Vec, } - -#[derive(Debug, Clone)] -pub struct HirQuoted { - pub quoted_block: crate::ast::BlockExpression, - - /// Each expression here corresponds to a `ExpressionKind::UnquoteMarker(index)` in `quoted_block`. - /// The values of these expressions after evaluation will be inlined into the position - /// indicated by their corresponding UnquoteMarker with that index. - pub unquoted_exprs: Vec, -} diff --git a/compiler/noirc_frontend/src/hir_def/function.rs b/compiler/noirc_frontend/src/hir_def/function.rs index 53eabe21081..a4a9f855c62 100644 --- a/compiler/noirc_frontend/src/hir_def/function.rs +++ b/compiler/noirc_frontend/src/hir_def/function.rs @@ -7,9 +7,10 @@ use super::expr::{HirBlockExpression, HirExpression, HirIdent}; use super::stmt::HirPattern; use super::traits::TraitConstraint; use crate::ast::{FunctionKind, FunctionReturnType, Visibility}; +use crate::graph::CrateId; use crate::macros_api::BlockExpression; use crate::node_interner::{ExprId, NodeInterner, TraitImplId}; -use crate::{Type, TypeVariable}; +use crate::{ResolvedGeneric, Type, TypeVariable}; /// A Hir function is a block expression /// with a list of statements @@ -118,7 +119,7 @@ pub struct FuncMeta { /// from outer scopes, such as those introduced by an impl. /// This is stored when the FuncMeta is first created to later be used to set the current /// generics when the function's body is later resolved. - pub all_generics: Vec<(Rc, TypeVariable, Span)>, + pub all_generics: Vec, pub location: Location, @@ -145,6 +146,9 @@ pub struct FuncMeta { pub has_inline_attribute: bool, pub function_body: FunctionBody, + + /// The crate this function was defined in + pub source_crate: CrateId, } #[derive(Debug, Clone)] diff --git a/compiler/noirc_frontend/src/hir_def/types.rs b/compiler/noirc_frontend/src/hir_def/types.rs index ce36a22cf88..0a7797c2bfb 100644 --- a/compiler/noirc_frontend/src/hir_def/types.rs +++ b/compiler/noirc_frontend/src/hir_def/types.rs @@ -82,7 +82,7 @@ pub enum Type { /// NamedGenerics are the 'T' or 'U' in a user-defined generic function /// like `fn foo(...) {}`. Unlike TypeVariables, they cannot be bound over. - NamedGeneric(TypeVariable, Rc), + NamedGeneric(TypeVariable, Rc, Kind), /// A functions with arguments, a return type and environment. /// the environment should be `Unit` by default, @@ -98,7 +98,7 @@ pub enum Type { /// but it makes handling them both easier. The TypeVariableId should /// never be bound over during type checking, but during monomorphization it /// will be and thus needs the full TypeVariable link. - Forall(Generics, Box), + Forall(GenericTypeVars, Box), /// A type-level integer. Included to let an Array's size type variable /// bind to an integer without special checks to bind it to a non-type. @@ -142,7 +142,7 @@ impl Type { | Type::Unit | Type::TypeVariable(_, _) | Type::TraitAsType(..) - | Type::NamedGeneric(_, _) + | Type::NamedGeneric(_, _, _) | Type::Function(_, _, _) | Type::MutableReference(_) | Type::Forall(_, _) @@ -187,12 +187,34 @@ impl Type { } } +/// A Kind is the type of a Type. These are used since only certain kinds of types are allowed in +/// certain positions. +/// +/// For example, the type of a struct field or a function parameter is expected to be +/// a type of kind * (represented here as `Normal`). Types used in positions where a number +/// is expected (such as in an array length position) are expected to be of kind `Kind::Numeric`. +#[derive(PartialEq, Eq, Clone, Hash, Debug)] +pub enum Kind { + Normal, + Numeric(Box), +} + +impl std::fmt::Display for Kind { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + Kind::Normal => write!(f, "normal"), + Kind::Numeric(typ) => write!(f, "numeric {}", typ), + } + } +} + #[derive(Debug, PartialEq, Eq, Copy, Clone, Hash)] pub enum QuotedType { Expr, - TypeDefinition, + Quoted, TopLevelItem, Type, + TypeDefinition, } /// A list of TypeVariableIds to bind to a type. Storing the @@ -221,7 +243,22 @@ pub struct StructType { } /// Corresponds to generic lists such as `` in the source program. -pub type Generics = Vec; +/// Used mainly for resolved types which no longer need information such +/// as names or kinds. +pub type GenericTypeVars = Vec; + +/// Corresponds to generic lists such as `` with additional +/// information gathered during name resolution that is necessary +/// correctly resolving types. +pub type Generics = Vec; + +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct ResolvedGeneric { + pub name: Rc, + pub type_var: TypeVariable, + pub kind: Kind, + pub span: Span, +} impl std::hash::Hash for StructType { fn hash(&self, state: &mut H) { @@ -270,7 +307,7 @@ impl StructType { .generics .iter() .zip(generic_args) - .map(|(old, new)| (old.id(), (old.clone(), new.clone()))) + .map(|(old, new)| (old.type_var.id(), (old.type_var.clone(), new.clone()))) .collect(); (typ.substitute(&substitutions), i) @@ -286,7 +323,7 @@ impl StructType { .generics .iter() .zip(generic_args) - .map(|(old, new)| (old.id(), (old.clone(), new.clone()))) + .map(|(old, new)| (old.type_var.id(), (old.type_var.clone(), new.clone()))) .collect(); vecmap(&self.fields, |(name, typ)| { @@ -295,15 +332,33 @@ impl StructType { }) } + /// Returns the name and raw types of each field of this type. + /// This will not substitute any generic arguments so a generic field like `x` + /// in `struct Foo { x: T }` will return a `("x", T)` pair. + /// + /// This method is almost never what is wanted for type checking or monomorphization, + /// prefer to use `get_fields` whenever possible. + pub fn get_fields_as_written(&self) -> Vec<(String, Type)> { + vecmap(&self.fields, |(name, typ)| (name.0.contents.clone(), typ.clone())) + } + pub fn field_names(&self) -> BTreeSet { self.fields.iter().map(|(name, _)| name.clone()).collect() } + /// Search the fields of a struct for any types with a `TypeKind::Numeric` + pub fn find_numeric_generics_in_fields(&self, found_names: &mut Vec) { + for (_, field) in self.fields.iter() { + field.find_numeric_type_vars(found_names); + } + } + /// True if the given index is the same index as a generic type of this struct /// which is expected to be a numeric generic. /// This is needed because we infer type kinds in Noir and don't have extensive kind checking. + /// TODO(https://github.com/noir-lang/noir/issues/5156): This is outdated and we should remove this implicit searching for numeric generics pub fn generic_is_numeric(&self, index_of_generic: usize) -> bool { - let target_id = self.generics[index_of_generic].0; + let target_id = self.generics[index_of_generic].type_var.id(); self.fields.iter().any(|(_, field)| field.contains_numeric_typevar(target_id)) } @@ -372,7 +427,7 @@ impl TypeAlias { .generics .iter() .zip(generic_args) - .map(|(old, new)| (old.id(), (old.clone(), new.clone()))) + .map(|(old, new)| (old.type_var.id(), (old.type_var.clone(), new.clone()))) .collect(); self.typ.substitute(&substitutions) @@ -382,7 +437,7 @@ impl TypeAlias { /// which is expected to be a numeric generic. /// This is needed because we infer type kinds in Noir and don't have extensive kind checking. pub fn generic_is_numeric(&self, index_of_generic: usize) -> bool { - let target_id = self.generics[index_of_generic].0; + let target_id = self.generics[index_of_generic].type_var.id(); self.typ.contains_numeric_typevar(target_id) } } @@ -492,7 +547,7 @@ impl TypeVariable { TypeBinding::Unbound(id) => *id, }; - assert!(!typ.occurs(id)); + assert!(!typ.occurs(id), "{self:?} occurs within {typ:?}"); *self.1.borrow_mut() = TypeBinding::Bound(typ); } @@ -630,7 +685,7 @@ impl Type { fn contains_numeric_typevar(&self, target_id: TypeVariableId) -> bool { // True if the given type is a NamedGeneric with the target_id let named_generic_id_matches_target = |typ: &Type| { - if let Type::NamedGeneric(type_variable, _) = typ { + if let Type::NamedGeneric(type_variable, _, _) = typ { match &*type_variable.borrow() { TypeBinding::Bound(_) => { unreachable!("Named generics should not be bound until monomorphization") @@ -650,7 +705,7 @@ impl Type { | Type::Error | Type::TypeVariable(_, _) | Type::Constant(_) - | Type::NamedGeneric(_, _) + | Type::NamedGeneric(_, _, _) | Type::Forall(_, _) | Type::Quoted(_) => false, @@ -694,6 +749,85 @@ impl Type { } } + /// TODO(https://github.com/noir-lang/noir/issues/5156): Remove with explicit numeric generics + pub fn find_numeric_type_vars(&self, found_names: &mut Vec) { + // Return whether the named generic has a TypeKind::Numeric and save its name + let named_generic_is_numeric = |typ: &Type, found_names: &mut Vec| { + if let Type::NamedGeneric(_, name, Kind::Numeric { .. }) = typ { + found_names.push(name.to_string()); + true + } else { + false + } + }; + + match self { + Type::FieldElement + | Type::Integer(_, _) + | Type::Bool + | Type::Unit + | Type::Error + | Type::Constant(_) + | Type::Forall(_, _) + | Type::Quoted(_) => {} + + Type::TypeVariable(type_var, _) => { + if let TypeBinding::Bound(typ) = &*type_var.borrow() { + named_generic_is_numeric(typ, found_names); + } + } + + Type::NamedGeneric(_, _, _) => { + named_generic_is_numeric(self, found_names); + } + + Type::TraitAsType(_, _, args) => { + for arg in args.iter() { + arg.find_numeric_type_vars(found_names); + } + } + Type::Array(length, elem) => { + elem.find_numeric_type_vars(found_names); + named_generic_is_numeric(length, found_names); + } + Type::Slice(elem) => elem.find_numeric_type_vars(found_names), + Type::Tuple(fields) => { + for field in fields.iter() { + field.find_numeric_type_vars(found_names); + } + } + Type::Function(parameters, return_type, env) => { + for parameter in parameters.iter() { + parameter.find_numeric_type_vars(found_names); + } + return_type.find_numeric_type_vars(found_names); + env.find_numeric_type_vars(found_names); + } + Type::Struct(_, generics) => { + for generic in generics.iter() { + if !named_generic_is_numeric(generic, found_names) { + generic.find_numeric_type_vars(found_names); + } + } + } + Type::Alias(_, generics) => { + for generic in generics.iter() { + if !named_generic_is_numeric(generic, found_names) { + generic.find_numeric_type_vars(found_names); + } + } + } + Type::MutableReference(element) => element.find_numeric_type_vars(found_names), + Type::String(length) => { + named_generic_is_numeric(length, found_names); + } + Type::FmtString(length, elements) => { + elements.find_numeric_type_vars(found_names); + named_generic_is_numeric(length, found_names); + } + } + } + /// True if this type can be used as a parameter to `main` or a contract function. /// This is only false for unsized types like slices or slices that do not make sense /// as a program input such as named generics or mutable references. @@ -714,7 +848,7 @@ impl Type { Type::FmtString(_, _) | Type::TypeVariable(_, _) - | Type::NamedGeneric(_, _) + | Type::NamedGeneric(_, _, _) | Type::Function(_, _, _) | Type::MutableReference(_) | Type::Forall(_, _) @@ -756,7 +890,7 @@ impl Type { | Type::Unit | Type::Constant(_) | Type::TypeVariable(_, _) - | Type::NamedGeneric(_, _) + | Type::NamedGeneric(_, _, _) | Type::Error => true, Type::FmtString(_, _) @@ -788,12 +922,55 @@ impl Type { } } + /// Returns true if a value of this type can safely pass between constrained and + /// unconstrained functions (and vice-versa). + pub(crate) fn is_valid_for_unconstrained_boundary(&self) -> bool { + match self { + Type::FieldElement + | Type::Integer(_, _) + | Type::Bool + | Type::Unit + | Type::Constant(_) + | Type::Slice(_) + | Type::TypeVariable(_, _) + | Type::NamedGeneric(_, _, _) + | Type::Function(_, _, _) + | Type::FmtString(_, _) + | Type::Error => true, + + // Quoted objects only exist at compile-time where the only execution + // environment is the interpreter. In this environment, they are valid. + Type::Quoted(_) => true, + + Type::MutableReference(_) | Type::Forall(_, _) | Type::TraitAsType(..) => false, + + Type::Alias(alias, generics) => { + let alias = alias.borrow(); + alias.get_type(generics).is_valid_for_unconstrained_boundary() + } + + Type::Array(length, element) => { + length.is_valid_for_unconstrained_boundary() + && element.is_valid_for_unconstrained_boundary() + } + Type::String(length) => length.is_valid_for_unconstrained_boundary(), + Type::Tuple(elements) => { + elements.iter().all(|elem| elem.is_valid_for_unconstrained_boundary()) + } + Type::Struct(definition, generics) => definition + .borrow() + .get_fields(generics) + .into_iter() + .all(|(_, field)| field.is_valid_for_unconstrained_boundary()), + } + } + /// Returns the number of `Forall`-quantified type variables on this type. /// Returns 0 if this is not a Type::Forall pub fn generic_count(&self) -> usize { match self { Type::Forall(generics, _) => generics.len(), - Type::TypeVariable(type_variable, _) | Type::NamedGeneric(type_variable, _) => { + Type::TypeVariable(type_variable, _) | Type::NamedGeneric(type_variable, _, _) => { match &*type_variable.borrow() { TypeBinding::Bound(binding) => binding.generic_count(), TypeBinding::Unbound(_) => 0, @@ -822,12 +999,42 @@ impl Type { /// Return the generics and type within this `Type::Forall`. /// Panics if `self` is not `Type::Forall` - pub fn unwrap_forall(&self) -> (Cow, &Type) { + pub fn unwrap_forall(&self) -> (Cow, &Type) { match self { Type::Forall(generics, typ) => (Cow::Borrowed(generics), typ.as_ref()), - other => (Cow::Owned(Generics::new()), other), + other => (Cow::Owned(GenericTypeVars::new()), other), } } + + // TODO(https://github.com/noir-lang/noir/issues/5156): Bring back this method when we remove implicit numeric generics + // It has been commented out as to not trigger clippy for an unused method + // pub(crate) fn kind(&self) -> Kind { + // match self { + // Type::NamedGeneric(_, _, kind) => kind.clone(), + // Type::Constant(_) => Kind::Numeric(Box::new(Type::Integer( + // Signedness::Unsigned, + // IntegerBitSize::ThirtyTwo, + // ))), + // Type::FieldElement + // | Type::Array(_, _) + // | Type::Slice(_) + // | Type::Integer(_, _) + // | Type::Bool + // | Type::String(_) + // | Type::FmtString(_, _) + // | Type::Unit + // | Type::Tuple(_) + // | Type::Struct(_, _) + // | Type::Alias(_, _) + // | Type::TypeVariable(_, _) + // | Type::TraitAsType(_, _, _) + // | Type::Function(_, _, _) + // | Type::MutableReference(_) + // | Type::Forall(_, _) + // | Type::Quoted(_) + // | Type::Error => Kind::Normal, + // } + // } } impl std::fmt::Display for Type { @@ -907,7 +1114,7 @@ impl std::fmt::Display for Type { } Type::Unit => write!(f, "()"), Type::Error => write!(f, "error"), - Type::NamedGeneric(binding, name) => match &*binding.borrow() { + Type::NamedGeneric(binding, name, _) => match &*binding.borrow() { TypeBinding::Bound(binding) => binding.fmt(f), TypeBinding::Unbound(_) if name.is_empty() => write!(f, "_"), TypeBinding::Unbound(_) => write!(f, "{name}"), @@ -966,9 +1173,10 @@ impl std::fmt::Display for QuotedType { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { QuotedType::Expr => write!(f, "Expr"), - QuotedType::TypeDefinition => write!(f, "TypeDefinition"), + QuotedType::Quoted => write!(f, "Quoted"), QuotedType::TopLevelItem => write!(f, "TopLevelItem"), QuotedType::Type => write!(f, "Type"), + QuotedType::TypeDefinition => write!(f, "TypeDefinition"), } } } @@ -1135,8 +1343,7 @@ impl Type { TypeBinding::Unbound(id) => *id, }; - let this = self.substitute(bindings); - + let this = self.substitute(bindings).follow_bindings(); if let Some(binding) = this.get_inner_type_variable() { match &*binding.borrow() { TypeBinding::Bound(typ) => return typ.try_bind_to(var, bindings), @@ -1158,7 +1365,7 @@ impl Type { fn get_inner_type_variable(&self) -> Option> { match self { - Type::TypeVariable(var, _) | Type::NamedGeneric(var, _) => Some(var.1.clone()), + Type::TypeVariable(var, _) | Type::NamedGeneric(var, _, _) => Some(var.1.clone()), _ => None, } } @@ -1269,7 +1476,7 @@ impl Type { } } - (NamedGeneric(binding, _), other) | (other, NamedGeneric(binding, _)) + (NamedGeneric(binding, _, _), other) | (other, NamedGeneric(binding, _, _)) if !binding.borrow().is_unbound() => { if let TypeBinding::Bound(link) = &*binding.borrow() { @@ -1279,7 +1486,7 @@ impl Type { } } - (NamedGeneric(binding_a, name_a), NamedGeneric(binding_b, name_b)) => { + (NamedGeneric(binding_a, name_a, _), NamedGeneric(binding_b, name_b, _)) => { // Bound NamedGenerics are caught by the check above assert!(binding_a.borrow().is_unbound()); assert!(binding_b.borrow().is_unbound()); @@ -1535,6 +1742,15 @@ impl Type { } } + fn type_variable_id(&self) -> Option { + match self { + Type::TypeVariable(variable, _) | Type::NamedGeneric(variable, _, _) => { + Some(variable.0) + } + _ => None, + } + } + /// Substitute any type variables found within this type with the /// given bindings if found. If a type variable is not found within /// the given TypeBindings, it is unchanged. @@ -1569,18 +1785,29 @@ impl Type { return self.clone(); } + let recur_on_binding = |id, replacement: &Type| { + // Prevent recuring forever if there's a `T := T` binding + if replacement.type_variable_id() == Some(id) { + replacement.clone() + } else { + replacement.substitute_helper(type_bindings, substitute_bound_typevars) + } + }; + let substitute_binding = |binding: &TypeVariable| { // Check the id first to allow substituting to // type variables that have already been bound over. // This is needed for monomorphizing trait impl methods. match type_bindings.get(&binding.0) { - Some((_, binding)) if substitute_bound_typevars => binding.clone(), + Some((_, replacement)) if substitute_bound_typevars => { + recur_on_binding(binding.0, replacement) + } _ => match &*binding.borrow() { TypeBinding::Bound(binding) => { binding.substitute_helper(type_bindings, substitute_bound_typevars) } TypeBinding::Unbound(id) => match type_bindings.get(id) { - Some((_, binding)) => binding.clone(), + Some((_, replacement)) => recur_on_binding(binding.0, replacement), None => self.clone(), }, }, @@ -1606,7 +1833,7 @@ impl Type { let fields = fields.substitute_helper(type_bindings, substitute_bound_typevars); Type::FmtString(Box::new(size), Box::new(fields)) } - Type::NamedGeneric(binding, _) | Type::TypeVariable(binding, _) => { + Type::NamedGeneric(binding, _, _) | Type::TypeVariable(binding, _) => { substitute_binding(binding) } // Do not substitute_helper fields, it can lead to infinite recursion @@ -1684,7 +1911,7 @@ impl Type { generic_args.iter().any(|arg| arg.occurs(target_id)) } Type::Tuple(fields) => fields.iter().any(|field| field.occurs(target_id)), - Type::NamedGeneric(binding, _) | Type::TypeVariable(binding, _) => { + Type::NamedGeneric(binding, _, _) | Type::TypeVariable(binding, _) => { match &*binding.borrow() { TypeBinding::Bound(binding) => binding.occurs(target_id), TypeBinding::Unbound(id) => *id == target_id, @@ -1739,7 +1966,7 @@ impl Type { def.borrow().get_type(args).follow_bindings() } Tuple(args) => Tuple(vecmap(args, |arg| arg.follow_bindings())), - TypeVariable(var, _) | NamedGeneric(var, _) => { + TypeVariable(var, _) | NamedGeneric(var, _, _) => { if let TypeBinding::Bound(typ) = &*var.borrow() { return typ.follow_bindings(); } @@ -1768,7 +1995,7 @@ impl Type { } } - pub fn from_generics(generics: &Generics) -> Vec { + pub fn from_generics(generics: &GenericTypeVars) -> Vec { vecmap(generics, |var| Type::TypeVariable(var.clone(), TypeVariableKind::Normal)) } } @@ -1965,7 +2192,14 @@ impl std::fmt::Debug for Type { } Type::Unit => write!(f, "()"), Type::Error => write!(f, "error"), - Type::NamedGeneric(binding, name) => write!(f, "{}{:?}", name, binding), + Type::NamedGeneric(binding, name, kind) => match kind { + Kind::Normal => { + write!(f, "{} -> {:?}", name, binding) + } + Kind::Numeric(typ) => { + write!(f, "({} : {}) -> {:?}", name, typ, binding) + } + }, Type::Constant(x) => x.fmt(f), Type::Forall(typevars, typ) => { let typevars = vecmap(typevars, |var| format!("{:?}", var)); diff --git a/compiler/noirc_frontend/src/lexer/errors.rs b/compiler/noirc_frontend/src/lexer/errors.rs index 73c75af4cd7..2452e034c1c 100644 --- a/compiler/noirc_frontend/src/lexer/errors.rs +++ b/compiler/noirc_frontend/src/lexer/errors.rs @@ -27,6 +27,10 @@ pub enum LexerErrorKind { "'\\{escaped}' is not a valid escape sequence. Use '\\' for a literal backslash character." )] InvalidEscape { escaped: char, span: Span }, + #[error("Invalid quote delimiter `{delimiter}`, valid delimiters are `{{`, `[`, and `(`")] + InvalidQuoteDelimiter { delimiter: SpannedToken }, + #[error("Expected `{end_delim}` to close this {start_delim}")] + UnclosedQuote { start_delim: SpannedToken, end_delim: Token }, } impl From for ParserError { @@ -47,6 +51,8 @@ impl LexerErrorKind { LexerErrorKind::UnterminatedBlockComment { span } => *span, LexerErrorKind::UnterminatedStringLiteral { span } => *span, LexerErrorKind::InvalidEscape { span, .. } => *span, + LexerErrorKind::InvalidQuoteDelimiter { delimiter } => delimiter.to_span(), + LexerErrorKind::UnclosedQuote { start_delim, .. } => start_delim.to_span(), } } @@ -92,6 +98,12 @@ impl LexerErrorKind { ("Unterminated string literal".to_string(), "Unterminated string literal".to_string(), *span), LexerErrorKind::InvalidEscape { escaped, span } => (format!("'\\{escaped}' is not a valid escape sequence. Use '\\' for a literal backslash character."), "Invalid escape sequence".to_string(), *span), + LexerErrorKind::InvalidQuoteDelimiter { delimiter } => { + (format!("Invalid quote delimiter `{delimiter}`"), "Valid delimiters are `{`, `[`, and `(`".to_string(), delimiter.to_span()) + }, + LexerErrorKind::UnclosedQuote { start_delim, end_delim } => { + ("Unclosed `quote` expression".to_string(), format!("Expected a `{end_delim}` to close this `{start_delim}`"), start_delim.to_span()) + } } } } diff --git a/compiler/noirc_frontend/src/lexer/lexer.rs b/compiler/noirc_frontend/src/lexer/lexer.rs index 3d052e22e36..d2bc3d0f519 100644 --- a/compiler/noirc_frontend/src/lexer/lexer.rs +++ b/compiler/noirc_frontend/src/lexer/lexer.rs @@ -145,6 +145,7 @@ impl<'a> Lexer<'a> { Some('"') => self.eat_string_literal(), Some('f') => self.eat_format_string_or_alpha_numeric(), Some('r') => self.eat_raw_string_or_alpha_numeric(), + Some('q') => self.eat_quote_or_alpha_numeric(), Some('#') => self.eat_attribute(), Some(ch) if ch.is_ascii_alphanumeric() || ch == '_' => self.eat_alpha_numeric(ch), Some(ch) => { @@ -310,14 +311,25 @@ impl<'a> Lexer<'a> { //XXX(low): Can increase performance if we use iterator semantic and utilize some of the methods on String. See below // https://doc.rust-lang.org/stable/std/primitive.str.html#method.rsplit fn eat_word(&mut self, initial_char: char) -> SpannedTokenResult { - let start = self.position; + let (start, word, end) = self.lex_word(initial_char); + self.lookup_word_token(word, start, end) + } + /// Lex the next word in the input stream. Returns (start position, word, end position) + fn lex_word(&mut self, initial_char: char) -> (Position, String, Position) { + let start = self.position; let word = self.eat_while(Some(initial_char), |ch| { ch.is_ascii_alphabetic() || ch.is_numeric() || ch == '_' }); + (start, word, self.position) + } - let end = self.position; - + fn lookup_word_token( + &self, + word: String, + start: Position, + end: Position, + ) -> SpannedTokenResult { // Check if word either an identifier or a keyword if let Some(keyword_token) = Keyword::lookup_keyword(&word) { return Ok(keyword_token.into_span(start, end)); @@ -509,6 +521,50 @@ impl<'a> Lexer<'a> { } } + fn eat_quote_or_alpha_numeric(&mut self) -> SpannedTokenResult { + let (start, word, end) = self.lex_word('q'); + if word != "quote" { + return self.lookup_word_token(word, start, end); + } + + let delimiter = self.next_token()?; + let (start_delim, end_delim) = match delimiter.token() { + Token::LeftBrace => (Token::LeftBrace, Token::RightBrace), + Token::LeftBracket => (Token::LeftBracket, Token::RightBracket), + Token::LeftParen => (Token::LeftParen, Token::RightParen), + _ => return Err(LexerErrorKind::InvalidQuoteDelimiter { delimiter }), + }; + + let mut tokens = Vec::new(); + + // Keep track of each nested delimiter we need to close. + let mut nested_delimiters = vec![delimiter]; + + while !nested_delimiters.is_empty() { + let token = self.next_token()?; + + if *token.token() == start_delim { + nested_delimiters.push(token.clone()); + } else if *token.token() == end_delim { + nested_delimiters.pop(); + } else if *token.token() == Token::EOF { + let start_delim = + nested_delimiters.pop().expect("If this were empty, we wouldn't be looping"); + return Err(LexerErrorKind::UnclosedQuote { start_delim, end_delim }); + } + + tokens.push(token); + } + + // Pop the closing delimiter from the token stream + if !tokens.is_empty() { + tokens.pop(); + } + + let end = self.position; + Ok(Token::Quote(Tokens(tokens)).into_span(start, end)) + } + fn parse_comment(&mut self, start: u32) -> SpannedTokenResult { let doc_style = match self.peek_char() { Some('!') => { @@ -604,6 +660,8 @@ impl<'a> Iterator for Lexer<'a> { #[cfg(test)] mod tests { + use iter_extended::vecmap; + use super::*; use crate::token::{FunctionAttribute, SecondaryAttribute, TestScope}; @@ -1232,4 +1290,45 @@ mod tests { } } } + + #[test] + fn test_quote() { + // cases is a vector of pairs of (test string, expected # of tokens in token stream) + let cases = vec![ + ("quote {}", 0), + ("quote { a.b }", 3), + ("quote { ) ( }", 2), // invalid syntax is fine in a quote + ("quote { { } }", 2), // Nested `{` and `}` shouldn't close the quote as long as they are matched. + ("quote { 1 { 2 { 3 { 4 { 5 } 4 4 } 3 3 } 2 2 } 1 1 }", 21), + ("quote [ } } ]", 2), // In addition to `{}`, `[]`, and `()` can also be used as delimiters. + ("quote [ } foo[] } ]", 5), + ("quote ( } () } )", 4), + ]; + + for (source, expected_stream_length) in cases { + let mut tokens = vecmap(Lexer::new(source), |result| result.unwrap().into_token()); + + // All examples should be a single TokenStream token followed by an EOF token. + assert_eq!(tokens.len(), 2, "Unexpected token count: {tokens:?}"); + + tokens.pop(); + match tokens.pop().unwrap() { + Token::Quote(stream) => assert_eq!(stream.0.len(), expected_stream_length), + other => panic!("test_quote test failure! Expected a single TokenStream token, got {other} for input `{source}`") + } + } + } + + #[test] + fn test_unclosed_quote() { + let cases = vec!["quote {", "quote { { }", "quote [ []", "quote (((((((())))"]; + + for source in cases { + // `quote` is not itself a keyword so if the token stream fails to + // parse we don't expect any valid tokens from the quote construct + for token in Lexer::new(source) { + assert!(token.is_err(), "Expected Err, found {token:?}"); + } + } + } } diff --git a/compiler/noirc_frontend/src/lexer/token.rs b/compiler/noirc_frontend/src/lexer/token.rs index ec589a816f6..f98343ba52f 100644 --- a/compiler/noirc_frontend/src/lexer/token.rs +++ b/compiler/noirc_frontend/src/lexer/token.rs @@ -2,7 +2,10 @@ use acvm::{acir::AcirField, FieldElement}; use noirc_errors::{Position, Span, Spanned}; use std::{fmt, iter::Map, vec::IntoIter}; -use crate::lexer::errors::LexerErrorKind; +use crate::{ + lexer::errors::LexerErrorKind, + node_interner::{ExprId, QuotedTypeId}, +}; /// Represents a token in noir's grammar - a word, number, /// or symbol that can be used in noir's syntax. This is the @@ -23,6 +26,8 @@ pub enum BorrowedToken<'input> { Attribute(Attribute), LineComment(&'input str, Option), BlockComment(&'input str, Option), + Quote(&'input Tokens), + QuotedType(QuotedTypeId), /// < Less, /// <= @@ -94,6 +99,11 @@ pub enum BorrowedToken<'input> { Whitespace(&'input str), + /// This is an implementation detail on how macros are implemented by quoting token streams. + /// This token marks where an unquote operation is performed. The ExprId argument is the + /// resolved variable which is being unquoted at this position in the token stream. + UnquoteMarker(ExprId), + /// An invalid character is one that is not in noir's language or grammar. /// /// We don't report invalid tokens in the source as errors until parsing to @@ -117,6 +127,13 @@ pub enum Token { Attribute(Attribute), LineComment(String, Option), BlockComment(String, Option), + // A `quote { ... }` along with the tokens in its token stream. + Quote(Tokens), + /// A quoted type resulting from a `Type` object in noir code being + /// spliced into a macro's token stream. We preserve the original type + /// to avoid having to tokenize it, re-parse it, and re-resolve it which + /// may change the underlying type. + QuotedType(QuotedTypeId), /// < Less, /// <= @@ -188,6 +205,11 @@ pub enum Token { Whitespace(String), + /// This is an implementation detail on how macros are implemented by quoting token streams. + /// This token marks where an unquote operation is performed. The ExprId argument is the + /// resolved variable which is being unquoted at this position in the token stream. + UnquoteMarker(ExprId), + /// An invalid character is one that is not in noir's language or grammar. /// /// We don't report invalid tokens in the source as errors until parsing to @@ -209,6 +231,8 @@ pub fn token_to_borrowed_token(token: &Token) -> BorrowedToken<'_> { Token::Attribute(ref a) => BorrowedToken::Attribute(a.clone()), Token::LineComment(ref s, _style) => BorrowedToken::LineComment(s, *_style), Token::BlockComment(ref s, _style) => BorrowedToken::BlockComment(s, *_style), + Token::Quote(stream) => BorrowedToken::Quote(stream), + Token::QuotedType(id) => BorrowedToken::QuotedType(*id), Token::IntType(ref i) => BorrowedToken::IntType(i.clone()), Token::Less => BorrowedToken::Less, Token::LessEqual => BorrowedToken::LessEqual, @@ -246,6 +270,7 @@ pub fn token_to_borrowed_token(token: &Token) -> BorrowedToken<'_> { Token::EOF => BorrowedToken::EOF, Token::Invalid(c) => BorrowedToken::Invalid(*c), Token::Whitespace(ref s) => BorrowedToken::Whitespace(s), + Token::UnquoteMarker(id) => BorrowedToken::UnquoteMarker(*id), } } @@ -255,7 +280,7 @@ pub enum DocStyle { Inner, } -#[derive(Debug, Clone, PartialEq, Eq, Hash)] +#[derive(Debug, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)] pub struct SpannedToken(Spanned); impl PartialEq for Token { @@ -321,6 +346,15 @@ impl fmt::Display for Token { Token::Attribute(ref a) => write!(f, "{a}"), Token::LineComment(ref s, _style) => write!(f, "//{s}"), Token::BlockComment(ref s, _style) => write!(f, "/*{s}*/"), + Token::Quote(ref stream) => { + write!(f, "quote {{")?; + for token in stream.0.iter() { + write!(f, " {token}")?; + } + write!(f, "}}") + } + // Quoted types only have an ID so there is nothing to display + Token::QuotedType(_) => write!(f, "(type)"), Token::IntType(ref i) => write!(f, "{i}"), Token::Less => write!(f, "<"), Token::LessEqual => write!(f, "<="), @@ -358,6 +392,7 @@ impl fmt::Display for Token { Token::EOF => write!(f, "end of input"), Token::Invalid(c) => write!(f, "{c}"), Token::Whitespace(ref s) => write!(f, "{s}"), + Token::UnquoteMarker(_) => write!(f, "(UnquoteMarker)"), } } } @@ -370,6 +405,9 @@ pub enum TokenKind { Literal, Keyword, Attribute, + Quote, + QuotedType, + UnquoteMarker, } impl fmt::Display for TokenKind { @@ -380,13 +418,16 @@ impl fmt::Display for TokenKind { TokenKind::Literal => write!(f, "literal"), TokenKind::Keyword => write!(f, "keyword"), TokenKind::Attribute => write!(f, "attribute"), + TokenKind::Quote => write!(f, "quote"), + TokenKind::QuotedType => write!(f, "quoted type"), + TokenKind::UnquoteMarker => write!(f, "macro result"), } } } impl Token { pub fn kind(&self) -> TokenKind { - match *self { + match self { Token::Ident(_) => TokenKind::Ident, Token::Int(_) | Token::Bool(_) @@ -395,7 +436,10 @@ impl Token { | Token::FmtStr(_) => TokenKind::Literal, Token::Keyword(_) => TokenKind::Keyword, Token::Attribute(_) => TokenKind::Attribute, - ref tok => TokenKind::Token(tok.clone()), + Token::UnquoteMarker(_) => TokenKind::UnquoteMarker, + Token::Quote(_) => TokenKind::Quote, + Token::QuotedType(_) => TokenKind::QuotedType, + tok => TokenKind::Token(tok.clone()), } } @@ -859,7 +903,7 @@ pub enum Keyword { Mod, Mut, Pub, - Quote, + Quoted, Return, ReturnData, String, @@ -907,7 +951,7 @@ impl fmt::Display for Keyword { Keyword::Mod => write!(f, "mod"), Keyword::Mut => write!(f, "mut"), Keyword::Pub => write!(f, "pub"), - Keyword::Quote => write!(f, "quote"), + Keyword::Quoted => write!(f, "Quoted"), Keyword::Return => write!(f, "return"), Keyword::ReturnData => write!(f, "return_data"), Keyword::String => write!(f, "str"), @@ -958,7 +1002,7 @@ impl Keyword { "mod" => Keyword::Mod, "mut" => Keyword::Mut, "pub" => Keyword::Pub, - "quote" => Keyword::Quote, + "Quoted" => Keyword::Quoted, "return" => Keyword::Return, "return_data" => Keyword::ReturnData, "str" => Keyword::String, @@ -984,6 +1028,7 @@ impl Keyword { } } +#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] pub struct Tokens(pub Vec); type TokenMapIter = Map, fn(SpannedToken) -> (Token, Span)>; diff --git a/compiler/noirc_frontend/src/monomorphization/errors.rs b/compiler/noirc_frontend/src/monomorphization/errors.rs index 2db570540d6..df61c138c02 100644 --- a/compiler/noirc_frontend/src/monomorphization/errors.rs +++ b/compiler/noirc_frontend/src/monomorphization/errors.rs @@ -6,6 +6,7 @@ use crate::hir::comptime::InterpreterError; pub enum MonomorphizationError { UnknownArrayLength { location: Location }, TypeAnnotationsNeeded { location: Location }, + InternalError { message: &'static str, location: Location }, InterpreterError(InterpreterError), } @@ -13,6 +14,7 @@ impl MonomorphizationError { fn location(&self) -> Location { match self { MonomorphizationError::UnknownArrayLength { location } + | MonomorphizationError::InternalError { location, .. } | MonomorphizationError::TypeAnnotationsNeeded { location } => *location, MonomorphizationError::InterpreterError(error) => error.get_location(), } @@ -36,6 +38,7 @@ impl MonomorphizationError { } MonomorphizationError::TypeAnnotationsNeeded { .. } => "Type annotations needed", MonomorphizationError::InterpreterError(error) => return (&error).into(), + MonomorphizationError::InternalError { message, .. } => message, }; let location = self.location(); diff --git a/compiler/noirc_frontend/src/monomorphization/mod.rs b/compiler/noirc_frontend/src/monomorphization/mod.rs index 821ae7e7c4c..0ed043914f3 100644 --- a/compiler/noirc_frontend/src/monomorphization/mod.rs +++ b/compiler/noirc_frontend/src/monomorphization/mod.rs @@ -610,6 +610,7 @@ impl<'interner> Monomorphizer<'interner> { }) .transpose()? .map(Box::new); + Ok(ast::Expression::Constrain(Box::new(expr), location, assert_message)) } HirStatement::Assign(assign) => self.assign(assign), @@ -889,7 +890,11 @@ impl<'interner> Monomorphizer<'interner> { DefinitionKind::Local(_) => match self.lookup_captured_expr(ident.id) { Some(expr) => expr, None => { - let ident = self.local_ident(&ident)?.unwrap(); + let Some(ident) = self.local_ident(&ident)? else { + let location = self.interner.id_location(expr_id); + let message = "ICE: Variable not found during monomorphization"; + return Err(MonomorphizationError::InternalError { location, message }); + }; ast::Expression::Ident(ident) } }, @@ -941,7 +946,7 @@ impl<'interner> Monomorphizer<'interner> { HirType::TraitAsType(..) => { unreachable!("All TraitAsType should be replaced before calling convert_type"); } - HirType::NamedGeneric(binding, _) => { + HirType::NamedGeneric(binding, _, _) => { if let TypeBinding::Bound(binding) = &*binding.borrow() { return Self::convert_type(binding, location); } @@ -1271,19 +1276,19 @@ impl<'interner> Monomorphizer<'interner> { } "modulus_le_bits" => { let bits = FieldElement::modulus().to_radix_le(2); - Some(self.modulus_array_literal(bits, IntegerBitSize::One, location)) + Some(self.modulus_slice_literal(bits, IntegerBitSize::One, location)) } "modulus_be_bits" => { let bits = FieldElement::modulus().to_radix_be(2); - Some(self.modulus_array_literal(bits, IntegerBitSize::One, location)) + Some(self.modulus_slice_literal(bits, IntegerBitSize::One, location)) } "modulus_be_bytes" => { let bytes = FieldElement::modulus().to_bytes_be(); - Some(self.modulus_array_literal(bytes, IntegerBitSize::Eight, location)) + Some(self.modulus_slice_literal(bytes, IntegerBitSize::Eight, location)) } "modulus_le_bytes" => { let bytes = FieldElement::modulus().to_bytes_le(); - Some(self.modulus_array_literal(bytes, IntegerBitSize::Eight, location)) + Some(self.modulus_slice_literal(bytes, IntegerBitSize::Eight, location)) } _ => None, }; @@ -1292,7 +1297,7 @@ impl<'interner> Monomorphizer<'interner> { None } - fn modulus_array_literal( + fn modulus_slice_literal( &self, bytes: Vec, arr_elem_bits: IntegerBitSize, @@ -1306,10 +1311,9 @@ impl<'interner> Monomorphizer<'interner> { Expression::Literal(Literal::Integer((byte as u128).into(), int_type.clone(), location)) }); - let typ = Type::Array(bytes_as_expr.len() as u32, Box::new(int_type)); - + let typ = Type::Slice(Box::new(int_type)); let arr_literal = ArrayLiteral { typ, contents: bytes_as_expr }; - Expression::Literal(Literal::Array(arr_literal)) + Expression::Literal(Literal::Slice(arr_literal)) } fn queue_function( @@ -1813,13 +1817,13 @@ fn unwrap_struct_type(typ: &HirType) -> Vec<(String, HirType)> { } } -fn perform_instantiation_bindings(bindings: &TypeBindings) { +pub fn perform_instantiation_bindings(bindings: &TypeBindings) { for (var, binding) in bindings.values() { var.force_bind(binding.clone()); } } -fn undo_instantiation_bindings(bindings: TypeBindings) { +pub fn undo_instantiation_bindings(bindings: TypeBindings) { for (id, (var, _)) in bindings { var.unbind(id); } diff --git a/compiler/noirc_frontend/src/node_interner.rs b/compiler/noirc_frontend/src/node_interner.rs index 915093df514..17531d09eac 100644 --- a/compiler/noirc_frontend/src/node_interner.rs +++ b/compiler/noirc_frontend/src/node_interner.rs @@ -32,9 +32,9 @@ use crate::hir_def::{ stmt::HirStatement, }; use crate::token::{Attributes, SecondaryAttribute}; -use crate::{ - Generics, Shared, TypeAlias, TypeBindings, TypeVariable, TypeVariableId, TypeVariableKind, -}; +use crate::GenericTypeVars; +use crate::Generics; +use crate::{Shared, TypeAlias, TypeBindings, TypeVariable, TypeVariableId, TypeVariableKind}; /// An arbitrary number to limit the recursion depth when searching for trait impls. /// This is needed to stop recursing for cases such as `impl Foo for T where T: Eq` @@ -176,6 +176,12 @@ pub struct NodeInterner { /// Stores the [Location] of a [Type] reference pub(crate) type_ref_locations: Vec<(Type, Location)>, + + /// In Noir's metaprogramming, a noir type has the type `Type`. When these are spliced + /// into `quoted` expressions, we preserve the original type by assigning it a unique id + /// and creating a `Token::QuotedType(id)` from this id. We cannot create a token holding + /// the actual type since types do not implement Send or Sync. + quoted_types: noirc_arena::Arena, } /// A dependency in the dependency graph may be a type or a definition. @@ -303,7 +309,7 @@ impl StmtId { } } -#[derive(Debug, Eq, PartialEq, Hash, Copy, Clone)] +#[derive(Debug, Eq, PartialEq, Hash, Copy, Clone, PartialOrd, Ord)] pub struct ExprId(Index); impl ExprId { @@ -472,6 +478,9 @@ pub struct GlobalInfo { pub value: Option, } +#[derive(Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub struct QuotedTypeId(noirc_arena::Index); + impl Default for NodeInterner { fn default() -> Self { let mut interner = NodeInterner { @@ -506,6 +515,7 @@ impl Default for NodeInterner { primitive_methods: HashMap::new(), type_alias_ref: Vec::new(), type_ref_locations: Vec::new(), + quoted_types: Default::default(), }; // An empty block expression is used often, we add this into the `node` on startup @@ -547,7 +557,12 @@ impl NodeInterner { self.definition_to_type.insert(definition_id, typ); } - pub fn push_empty_trait(&mut self, type_id: TraitId, unresolved_trait: &UnresolvedTrait) { + pub fn push_empty_trait( + &mut self, + type_id: TraitId, + unresolved_trait: &UnresolvedTrait, + generics: Generics, + ) { let self_type_typevar_id = self.next_type_variable_id(); let new_trait = Trait { @@ -555,13 +570,7 @@ impl NodeInterner { name: unresolved_trait.trait_def.name.clone(), crate_id: unresolved_trait.crate_id, location: Location::new(unresolved_trait.trait_def.span, unresolved_trait.file_id), - generics: vecmap(&unresolved_trait.trait_def.generics, |_| { - // Temporary type variable ids before the trait is resolved to its actual ids. - // This lets us record how many arguments the type expects so that other types - // can refer to it with generic arguments before the generic parameters themselves - // are resolved. - TypeVariable::unbound(TypeVariableId(0)) - }), + generics, self_type_typevar_id, self_type_typevar: TypeVariable::unbound(self_type_typevar_id), methods: Vec::new(), @@ -576,6 +585,7 @@ impl NodeInterner { pub fn new_struct( &mut self, typ: &UnresolvedStruct, + generics: Generics, krate: CrateId, local_id: LocalModuleId, file_id: FileId, @@ -585,13 +595,6 @@ impl NodeInterner { // Fields will be filled in later let no_fields = Vec::new(); - let generics = vecmap(&typ.struct_def.generics, |_| { - // Temporary type variable ids before the struct is resolved to its actual ids. - // This lets us record how many arguments the type expects so that other types - // can refer to it with generic arguments before the generic parameters themselves - // are resolved. - TypeVariable::unbound(TypeVariableId(0)) - }); let location = Location::new(typ.struct_def.span, file_id); let new_struct = StructType::new(struct_id, name, location, no_fields, generics); @@ -600,7 +603,11 @@ impl NodeInterner { struct_id } - pub fn push_type_alias(&mut self, typ: &UnresolvedTypeAlias) -> TypeAliasId { + pub fn push_type_alias( + &mut self, + typ: &UnresolvedTypeAlias, + generics: Generics, + ) -> TypeAliasId { let type_id = TypeAliasId(self.type_aliases.len()); self.type_aliases.push(Shared::new(TypeAlias::new( @@ -608,7 +615,7 @@ impl NodeInterner { typ.type_alias_def.name.clone(), Location::new(typ.type_alias_def.span, typ.file_id), Type::Error, - vecmap(&typ.type_alias_def.generics, |_| TypeVariable::unbound(TypeVariableId(0))), + generics, ))); type_id @@ -629,6 +636,15 @@ impl NodeInterner { f(value); } + pub fn update_struct_attributes( + &mut self, + type_id: StructId, + f: impl FnOnce(&mut StructAttributes), + ) { + let value = self.struct_attributes.get_mut(&type_id).unwrap(); + f(value); + } + pub fn set_type_alias(&mut self, type_id: TypeAliasId, typ: Type, generics: Generics) { let type_alias_type = &mut self.type_aliases[type_id.0]; type_alias_type.borrow_mut().set_type_and_generics(typ, generics); @@ -1407,7 +1423,7 @@ impl NodeInterner { trait_id: TraitId, trait_generics: Vec, impl_id: TraitImplId, - impl_generics: Generics, + impl_generics: GenericTypeVars, trait_impl: Shared, ) -> Result<(), (Span, FileId)> { self.trait_implementations.insert(impl_id, trait_impl.clone()); @@ -1738,6 +1754,14 @@ impl NodeInterner { cycle } + + pub fn push_quoted_type(&mut self, typ: Type) -> QuotedTypeId { + QuotedTypeId(self.quoted_types.insert(typ)) + } + + pub fn get_quoted_type(&self, id: QuotedTypeId) -> &Type { + &self.quoted_types[id.0] + } } impl Methods { @@ -1825,7 +1849,7 @@ fn get_type_method_key(typ: &Type) -> Option { Type::Unit => Some(Unit), Type::Tuple(_) => Some(Tuple), Type::Function(_, _, _) => Some(Function), - Type::NamedGeneric(_, _) => Some(Generic), + Type::NamedGeneric(_, _, _) => Some(Generic), Type::Quoted(quoted) => Some(Quoted(*quoted)), Type::MutableReference(element) => get_type_method_key(element), Type::Alias(alias, _) => get_type_method_key(&alias.borrow().typ), diff --git a/compiler/noirc_frontend/src/noir_parser.lalrpop b/compiler/noirc_frontend/src/noir_parser.lalrpop index 5768e3c1d17..5bf48a764d6 100644 --- a/compiler/noirc_frontend/src/noir_parser.lalrpop +++ b/compiler/noirc_frontend/src/noir_parser.lalrpop @@ -80,7 +80,6 @@ extern { "mod" => BorrowedToken::Keyword(noir_token::Keyword::Mod), "mut" => BorrowedToken::Keyword(noir_token::Keyword::Mut), "pub" => BorrowedToken::Keyword(noir_token::Keyword::Pub), - "quote" => BorrowedToken::Keyword(noir_token::Keyword::Quote), "return" => BorrowedToken::Keyword(noir_token::Keyword::Return), "return_data" => BorrowedToken::Keyword(noir_token::Keyword::ReturnData), "str" => BorrowedToken::Keyword(noir_token::Keyword::String), diff --git a/compiler/noirc_frontend/src/parser/errors.rs b/compiler/noirc_frontend/src/parser/errors.rs index af3d4caa145..41ea9f88c19 100644 --- a/compiler/noirc_frontend/src/parser/errors.rs +++ b/compiler/noirc_frontend/src/parser/errors.rs @@ -133,7 +133,7 @@ impl std::fmt::Display for ParserError { } else { let expected = expected.iter().map(ToString::to_string).collect::>().join(", "); - write!(f, "Unexpected {}, expected one of {}{}", self.found, expected, reason_str) + write!(f, "Unexpected {:?}, expected one of {}{}", self.found, expected, reason_str) } } } diff --git a/compiler/noirc_frontend/src/parser/mod.rs b/compiler/noirc_frontend/src/parser/mod.rs index 9e60383afee..d7a282dbfc7 100644 --- a/compiler/noirc_frontend/src/parser/mod.rs +++ b/compiler/noirc_frontend/src/parser/mod.rs @@ -22,10 +22,10 @@ use chumsky::primitive::Container; pub use errors::ParserError; pub use errors::ParserErrorReason; use noirc_errors::Span; -pub use parser::parse_program; +pub use parser::{expression, parse_program, top_level_item}; #[derive(Debug, Clone)] -pub(crate) enum TopLevelStatement { +pub enum TopLevelStatement { Function(NoirFunction), Module(ModuleDeclaration), Import(UseTree), @@ -45,7 +45,7 @@ pub trait NoirParser: Parser + Sized + Clone { impl NoirParser for P where P: Parser + Clone {} // ExprParser just serves as a type alias for NoirParser + Clone -trait ExprParser: NoirParser {} +pub trait ExprParser: NoirParser {} impl

ExprParser for P where P: NoirParser {} fn parenthesized(parser: P) -> impl NoirParser @@ -197,7 +197,7 @@ fn parameter_name_recovery() -> impl NoirParser { } fn top_level_statement_recovery() -> impl NoirParser { - none_of([Token::Semicolon, Token::RightBrace, Token::EOF]) + none_of([Token::RightBrace, Token::EOF]) .repeated() .ignore_then(one_of([Token::Semicolon])) .map(|_| TopLevelStatement::Error) diff --git a/compiler/noirc_frontend/src/parser/parser.rs b/compiler/noirc_frontend/src/parser/parser.rs index e8838c58772..afeee889ede 100644 --- a/compiler/noirc_frontend/src/parser/parser.rs +++ b/compiler/noirc_frontend/src/parser/parser.rs @@ -23,7 +23,7 @@ //! prevent other parsers from being tried afterward since there is no longer an error. Thus, they should //! be limited to cases like the above `fn` example where it is clear we shouldn't back out of the //! current parser to try alternative parsers in a `choice` expression. -use self::primitives::{keyword, mutable_reference, variable}; +use self::primitives::{keyword, macro_quote_marker, mutable_reference, variable}; use self::types::{generic_type_args, maybe_comp_time, parse_type}; use super::{ @@ -191,6 +191,11 @@ fn module() -> impl NoirParser { }) } +/// This parser is used for parsing top level statements in macros +pub fn top_level_item() -> impl NoirParser { + top_level_statement(module()) +} + /// top_level_statement: function_definition /// | struct_definition /// | trait_definition @@ -225,11 +230,20 @@ fn implementation() -> impl NoirParser { keyword(Keyword::Impl) .ignore_then(function::generics()) .then(parse_type().map_with_span(|typ, span| (typ, span))) + .then(where_clause()) .then_ignore(just(Token::LeftBrace)) .then(spanned(function::function_definition(true)).repeated()) .then_ignore(just(Token::RightBrace)) - .map(|((generics, (object_type, type_span)), methods)| { - TopLevelStatement::Impl(TypeImpl { generics, object_type, type_span, methods }) + .map(|args| { + let ((other_args, where_clause), methods) = args; + let (generics, (object_type, type_span)) = other_args; + TopLevelStatement::Impl(TypeImpl { + generics, + object_type, + type_span, + where_clause, + methods, + }) }) } @@ -691,7 +705,7 @@ fn optional_visibility() -> impl NoirParser { }) } -fn expression() -> impl ExprParser { +pub fn expression() -> impl ExprParser { recursive(|expr| { expression_with_precedence( Precedence::Lowest, @@ -1074,11 +1088,12 @@ where }, lambdas::lambda(expr_parser.clone()), block(statement.clone()).map(ExpressionKind::Block), - comptime_expr(statement.clone()), - quote(statement), + comptime_expr(statement), + quote(), unquote(expr_parser.clone()), variable(), literal(), + macro_quote_marker(), )) .map_with_span(Expression::new) .or(parenthesized(expr_parser.clone()).map_with_span(|sub_expr, span| { @@ -1101,19 +1116,18 @@ where .labelled(ParsingRuleLabel::Atom) } -fn quote<'a, P>(statement: P) -> impl NoirParser + 'a -where - P: NoirParser + 'a, -{ - keyword(Keyword::Quote).ignore_then(spanned(block(statement))).validate( - |(block, block_span), span, emit| { - emit(ParserError::with_reason( - ParserErrorReason::ExperimentalFeature("quoted expressions"), - span, - )); - ExpressionKind::Quote(block, block_span) - }, - ) +fn quote() -> impl NoirParser { + token_kind(TokenKind::Quote).validate(|token, span, emit| { + let tokens = match token { + Token::Quote(tokens) => tokens, + _ => unreachable!("token_kind(Quote) should guarantee parsing only a quote token"), + }; + emit(ParserError::with_reason( + ParserErrorReason::ExperimentalFeature("quoted expressions"), + span, + )); + ExpressionKind::Quote(tokens) + }) } /// unquote: '$' variable @@ -1642,4 +1656,19 @@ mod test { check_cases_with_errors(&cases[..], block(fresh_statement())); } + + #[test] + fn test_quote() { + let cases = vec![ + "quote {}", + "quote { a.b }", + "quote { ) ( }", // invalid syntax is fine in a quote + "quote { { } }", // Nested `{` and `}` shouldn't close the quote as long as they are matched. + "quote { 1 { 2 { 3 { 4 { 5 } 4 4 } 3 3 } 2 2 } 1 1 }", + ]; + parse_all(quote(), cases); + + let failing = vec!["quote {}}", "quote a", "quote { { { } } } }"]; + parse_all_failing(quote(), failing); + } } diff --git a/compiler/noirc_frontend/src/parser/parser/function.rs b/compiler/noirc_frontend/src/parser/parser/function.rs index 4db5637f6a7..3e686ee4c85 100644 --- a/compiler/noirc_frontend/src/parser/parser/function.rs +++ b/compiler/noirc_frontend/src/parser/parser/function.rs @@ -5,11 +5,14 @@ use super::{ self_parameter, where_clause, NoirParser, }; use crate::ast::{ - FunctionDefinition, FunctionReturnType, Ident, ItemVisibility, NoirFunction, Param, Visibility, + FunctionDefinition, FunctionReturnType, ItemVisibility, NoirFunction, Param, Visibility, }; -use crate::parser::labels::ParsingRuleLabel; use crate::parser::spanned; use crate::token::{Keyword, Token}; +use crate::{ + ast::{UnresolvedGeneric, UnresolvedGenerics}, + parser::labels::ParsingRuleLabel, +}; use chumsky::prelude::*; @@ -76,16 +79,31 @@ fn function_modifiers() -> impl NoirParser<(bool, ItemVisibility, bool)> { }) } +pub(super) fn numeric_generic() -> impl NoirParser { + keyword(Keyword::Let) + .ignore_then(ident()) + .then_ignore(just(Token::Colon)) + .then(parse_type()) + .map(|(ident, typ)| UnresolvedGeneric::Numeric { ident, typ }) +} + +pub(super) fn generic_type() -> impl NoirParser { + ident().map(UnresolvedGeneric::Variable) +} + +pub(super) fn generic() -> impl NoirParser { + generic_type().or(numeric_generic()) +} + /// non_empty_ident_list: ident ',' non_empty_ident_list /// | ident /// /// generics: '<' non_empty_ident_list '>' /// | %empty -pub(super) fn generics() -> impl NoirParser> { - ident() +pub(super) fn generics() -> impl NoirParser { + generic() .separated_by(just(Token::Comma)) .allow_trailing() - .at_least(1) .delimited_by(just(Token::Less), just(Token::Greater)) .or_not() .map(|opt| opt.unwrap_or_default()) @@ -193,6 +211,7 @@ mod test { // fn func_name(x: impl Eq) {} with error Expected an end of input but found end of input // "fn func_name(x: impl Eq) {}", "fn func_name(x: impl Eq, y : T) where T: SomeTrait + Eq {}", + "fn func_name(x: [Field; N]) {}", ], ); @@ -209,6 +228,11 @@ mod test { // A leading plus is not allowed. "fn func_name(f: Field, y : T) where T: + SomeTrait {}", "fn func_name(f: Field, y : T) where T: TraitX + {}", + // Test ill-formed numeric generics + "fn func_name(y: T) {}", + "fn func_name(y: T) {}", + "fn func_name(y: T) {}", + "fn func_name(y: T) {}", ], ); } diff --git a/compiler/noirc_frontend/src/parser/parser/primitives.rs b/compiler/noirc_frontend/src/parser/parser/primitives.rs index 9da19c0a185..88f9e591aba 100644 --- a/compiler/noirc_frontend/src/parser/parser/primitives.rs +++ b/compiler/noirc_frontend/src/parser/parser/primitives.rs @@ -94,6 +94,13 @@ pub(super) fn variable_no_turbofish() -> impl NoirParser { path().map(|path| ExpressionKind::Variable(path, None)) } +pub(super) fn macro_quote_marker() -> impl NoirParser { + token_kind(TokenKind::UnquoteMarker).map(|token| match token { + Token::UnquoteMarker(expr_id) => ExpressionKind::Resolved(expr_id), + other => unreachable!("Non-unquote-marker parsed as an unquote marker: {other:?}"), + }) +} + #[cfg(test)] mod test { use crate::parser::parser::{ diff --git a/compiler/noirc_frontend/src/parser/parser/types.rs b/compiler/noirc_frontend/src/parser/parser/types.rs index a961b32f9d7..493ebd1fb2f 100644 --- a/compiler/noirc_frontend/src/parser/parser/types.rs +++ b/compiler/noirc_frontend/src/parser/parser/types.rs @@ -1,3 +1,4 @@ +use super::primitives::token_kind; use super::{ expression_with_precedence, keyword, nothing, parenthesized, path, NoirParser, ParserError, ParserErrorReason, Precedence, @@ -6,7 +7,7 @@ use crate::ast::{Recoverable, UnresolvedType, UnresolvedTypeData, UnresolvedType use crate::QuotedType; use crate::parser::labels::ParsingRuleLabel; -use crate::token::{Keyword, Token}; +use crate::token::{Keyword, Token, TokenKind}; use chumsky::prelude::*; use noirc_errors::Span; @@ -26,7 +27,9 @@ pub(super) fn parse_type_inner<'a>( expr_type(), type_definition_type(), top_level_item_type(), + type_of_quoted_types(), quoted_type(), + resolved_type(), format_string_type(recursive_type_parser.clone()), named_type(recursive_type_parser.clone()), named_trait(recursive_type_parser.clone()), @@ -93,11 +96,27 @@ fn top_level_item_type() -> impl NoirParser { } /// This is the type `Type` - the type of a quoted noir type. -fn quoted_type() -> impl NoirParser { +fn type_of_quoted_types() -> impl NoirParser { keyword(Keyword::TypeType) .map_with_span(|_, span| UnresolvedTypeData::Quoted(QuotedType::Type).with_span(span)) } +/// This is the type of a quoted, unparsed token stream. +fn quoted_type() -> impl NoirParser { + keyword(Keyword::Quoted) + .map_with_span(|_, span| UnresolvedTypeData::Quoted(QuotedType::Quoted).with_span(span)) +} + +/// This is the type of an already resolved type. +/// The only way this can appear in the token input is if an already resolved `Type` object +/// was spliced into a macro's token stream via the `$` operator. +fn resolved_type() -> impl NoirParser { + token_kind(TokenKind::QuotedType).map_with_span(|token, span| match token { + Token::QuotedType(id) => UnresolvedTypeData::Resolved(id).with_span(span), + _ => unreachable!("token_kind(QuotedType) guarantees we parse a quoted type"), + }) +} + pub(super) fn string_type() -> impl NoirParser { keyword(Keyword::String) .ignore_then(type_expression().delimited_by(just(Token::Less), just(Token::Greater))) diff --git a/compiler/noirc_frontend/src/tests.rs b/compiler/noirc_frontend/src/tests.rs index 1acf1fbf3f2..9251eb3db6b 100644 --- a/compiler/noirc_frontend/src/tests.rs +++ b/compiler/noirc_frontend/src/tests.rs @@ -14,6 +14,7 @@ use fm::FileId; use iter_extended::vecmap; use noirc_errors::Location; +use crate::hir::comptime::InterpreterError; use crate::hir::def_collector::dc_crate::CompilationError; use crate::hir::def_collector::errors::{DefCollectorErrorKind, DuplicateType}; use crate::hir::def_map::ModuleData; @@ -49,7 +50,10 @@ pub(crate) fn remove_experimental_warnings(errors: &mut Vec<(CompilationError, F }); } -pub(crate) fn get_program(src: &str) -> (ParsedModule, Context, Vec<(CompilationError, FileId)>) { +pub(crate) fn get_program( + src: &str, + use_legacy: bool, +) -> (ParsedModule, Context, Vec<(CompilationError, FileId)>) { let root = std::path::Path::new("/"); let fm = FileManager::new(root); @@ -81,7 +85,7 @@ pub(crate) fn get_program(src: &str) -> (ParsedModule, Context, Vec<(Compilation &mut context, program.clone().into_sorted(), root_file_id, - false, + use_legacy, &[], // No macro processors )); } @@ -89,7 +93,7 @@ pub(crate) fn get_program(src: &str) -> (ParsedModule, Context, Vec<(Compilation } pub(crate) fn get_program_errors(src: &str) -> Vec<(CompilationError, FileId)> { - get_program(src).2 + get_program(src, false).2 } #[test] @@ -832,7 +836,7 @@ fn check_trait_as_type_as_two_fn_parameters() { } fn get_program_captures(src: &str) -> Vec> { - let (program, context, _errors) = get_program(src); + let (program, context, _errors) = get_program(src, false); let interner = context.def_interner; let mut all_captures: Vec> = Vec::new(); for func in program.into_sorted().functions { @@ -1194,7 +1198,7 @@ fn resolve_fmt_strings() { } fn check_rewrite(src: &str, expected: &str) { - let (_program, mut context, _errors) = get_program(src); + let (_program, mut context, _errors) = get_program(src, false); let main_func_id = context.def_interner.find_function("main").unwrap(); let program = monomorphize(main_func_id, &mut context.def_interner).unwrap(); assert!(format!("{}", program) == expected); @@ -1325,14 +1329,20 @@ fn for_loop_over_array() { hello(array); } "#; - assert_eq!(get_program_errors(src).len(), 0); + let errors = get_program_errors(src); + assert_eq!(get_program_errors(src).len(), 1); + + assert!(matches!( + errors[0].0, + CompilationError::ResolverError(ResolverError::UseExplicitNumericGeneric { .. }) + )); } // Regression for #4545 #[test] fn type_aliases_in_main() { let src = r#" - type Outer = [u8; N]; + type Outer = [u8; N]; fn main(_arg: Outer<1>) {} "#; assert_eq!(get_program_errors(src).len(), 0); @@ -1444,3 +1454,445 @@ fn specify_method_types_with_turbofish() { let errors = get_program_errors(src); assert_eq!(errors.len(), 0); } + +#[test] +fn struct_numeric_generic_in_function() { + let src = r#" + struct Foo { + inner: u64 + } + + fn bar() { } + "#; + let errors = get_program_errors(src); + assert_eq!(errors.len(), 1); + assert!(matches!( + errors[0].0, + CompilationError::ResolverError(ResolverError::UnsupportedNumericGenericType { .. }), + )); +} + +#[test] +fn struct_numeric_generic_in_struct() { + let src = r#" + struct Foo { + inner: u64 + } + + struct Bar { } + "#; + let errors = get_program_errors(src); + assert_eq!(errors.len(), 1); + assert!(matches!( + errors[0].0, + CompilationError::DefinitionError( + DefCollectorErrorKind::UnsupportedNumericGenericType { .. } + ), + )); +} + +#[test] +fn bool_numeric_generic() { + let src = r#" + fn read() -> Field { + if N { + 0 + } else { + 1 + } + } + "#; + let errors = get_program_errors(src); + assert_eq!(errors.len(), 1); + assert!(matches!( + errors[0].0, + CompilationError::ResolverError(ResolverError::UnsupportedNumericGenericType { .. }), + )); +} + +#[test] +fn numeric_generic_binary_operation_type_mismatch() { + let src = r#" + fn foo() -> bool { + let mut check: bool = true; + check = N; + check + } + "#; + let errors = get_program_errors(src); + assert_eq!(errors.len(), 1); + assert!(matches!( + errors[0].0, + CompilationError::TypeError(TypeCheckError::TypeMismatchWithSource { .. }), + )); +} + +#[test] +fn bool_generic_as_loop_bound() { + let src = r#" + fn read() { + let mut fields = [0; N]; + for i in 0..N { + fields[i] = i + 1; + } + assert(fields[0] == 1); + } + "#; + let errors = get_program_errors(src); + assert_eq!(errors.len(), 2); + + assert!(matches!( + errors[0].0, + CompilationError::ResolverError(ResolverError::UnsupportedNumericGenericType { .. }), + )); + + let CompilationError::TypeError(TypeCheckError::TypeMismatch { + expected_typ, expr_typ, .. + }) = &errors[1].0 + else { + panic!("Got an error other than a type mismatch"); + }; + + assert_eq!(expected_typ, "Field"); + assert_eq!(expr_typ, "bool"); +} + +#[test] +fn numeric_generic_in_function_signature() { + let src = r#" + fn foo(arr: [Field; N]) -> [Field; N] { arr } + "#; + let errors = get_program_errors(src); + assert!(errors.is_empty()); +} + +#[test] +fn numeric_generic_as_struct_field_type() { + let src = r#" + struct Foo { + a: Field, + b: N, + } + "#; + let errors = get_program_errors(src); + assert_eq!(errors.len(), 1); + assert!(matches!( + errors[0].0, + CompilationError::ResolverError(ResolverError::NumericGenericUsedForType { .. }), + )); +} + +#[test] +fn normal_generic_as_array_length() { + let src = r#" + struct Foo { + a: Field, + b: [Field; N], + } + "#; + let errors = get_program_errors(src); + assert_eq!(errors.len(), 1); + // TODO(https://github.com/noir-lang/noir/issues/5156): This should be switched to a hard type error rather than + // the `UseExplicitNumericGeneric` once implicit numeric generics are removed. + assert!(matches!( + errors[0].0, + CompilationError::ResolverError(ResolverError::UseExplicitNumericGeneric { .. }), + )); +} + +#[test] +fn numeric_generic_as_param_type() { + let src = r#" + fn foo(x: I) -> I { + let _q: I = 5; + x + } + "#; + let errors = get_program_errors(src); + assert_eq!(errors.len(), 3); + // Error from the parameter type + assert!(matches!( + errors[0].0, + CompilationError::ResolverError(ResolverError::NumericGenericUsedForType { .. }), + )); + // Error from the let statement annotated type + assert!(matches!( + errors[1].0, + CompilationError::ResolverError(ResolverError::NumericGenericUsedForType { .. }), + )); + // Error from the return type + assert!(matches!( + errors[2].0, + CompilationError::ResolverError(ResolverError::NumericGenericUsedForType { .. }), + )); +} + +#[test] +fn numeric_generic_used_in_nested_type_fail() { + let src = r#" + struct Foo { + a: Field, + b: Bar, + } + struct Bar { + inner: N + } + "#; + let errors = get_program_errors(src); + assert_eq!(errors.len(), 1); + assert!(matches!( + errors[0].0, + CompilationError::ResolverError(ResolverError::NumericGenericUsedForType { .. }), + )); +} + +#[test] +fn normal_generic_used_in_nested_array_length_fail() { + let src = r#" + struct Foo { + a: Field, + b: Bar, + } + struct Bar { + inner: [Field; N] + } + "#; + let errors = get_program_errors(src); + // TODO(https://github.com/noir-lang/noir/issues/5156): This should be switched to a hard type error once implicit numeric generics are removed. + assert_eq!(errors.len(), 0); +} + +#[test] +fn numeric_generic_used_in_nested_type_pass() { + // The order of these structs should not be changed to make sure + // that we are accurately resolving all struct generics before struct fields + let src = r#" + struct NestedNumeric { + a: Field, + b: InnerNumeric + } + struct InnerNumeric { + inner: [u64; N], + } + "#; + let errors = get_program_errors(src); + assert!(errors.is_empty()); +} + +#[test] +fn numeric_generic_used_in_trait() { + let src = r#" + struct MyType { + a: Field, + b: Field, + c: Field, + d: T, + } + + impl Deserialize for MyType { + fn deserialize(fields: [Field; N], other: T) -> Self { + MyType { a: fields[0], b: fields[1], c: fields[2], d: other } + } + } + + trait Deserialize { + fn deserialize(fields: [Field; N], other: T) -> Self; + } + "#; + let errors = get_program_errors(src); + // We want to make sure that `N` in `impl Deserialize` does + // not trigger `expected type, found numeric generic parameter N` as the trait + // does in fact expect a numeric generic. + assert!(errors.is_empty()); +} + +#[test] +fn numeric_generic_in_trait_impl_with_extra_impl_generics() { + let src = r#" + trait Default { + fn default() -> Self; + } + + struct MyType { + a: Field, + b: Field, + c: Field, + d: T, + } + + // Make sure that `T` is placed before `N` as we want to test that the order of the generics is correctly maintained. + // `N` is used first in the trait impl generics (`Deserialize for MyType`). + // We want to make sure that the compiler correctly accounts for that `N` has a numeric kind + // while `T` has a normal kind. + impl Deserialize for MyType where T: Default { + fn deserialize(fields: [Field; N]) -> Self { + MyType { a: fields[0], b: fields[1], c: fields[2], d: T::default() } + } + } + + trait Deserialize { + fn deserialize(fields: [Field; N]) -> Self; + } + "#; + let errors = get_program_errors(src); + assert!(errors.is_empty()); +} + +#[test] +fn numeric_generic_used_in_where_clause() { + let src = r#" + trait Deserialize { + fn deserialize(fields: [Field; N]) -> Self; + } + + fn read() -> T where T: Deserialize { + let mut fields: [Field; N] = [0; N]; + for i in 0..N { + fields[i] = i as Field + 1; + } + T::deserialize(fields) + } + "#; + let errors = get_program_errors(src); + assert!(errors.is_empty()); +} + +#[test] +fn numeric_generic_used_in_turbofish() { + let src = r#" + fn double() -> u32 { + // Used as an expression + N * 2 + } + + fn double_numeric_generics_test() { + // Example usage of a numeric generic arguments. + assert(double::<9>() == 18); + assert(double::<7 + 8>() == 30); + } + "#; + let errors = get_program_errors(src); + assert!(errors.is_empty()); +} + +#[test] +fn constant_used_with_numeric_generic() { + let src = r#" + struct ValueNote { + value: Field, + } + + trait Serialize { + fn serialize(self) -> [Field; N]; + } + + impl Serialize<1> for ValueNote { + fn serialize(self) -> [Field; 1] { + [self.value] + } + } + "#; + let errors = get_program_errors(src); + assert!(errors.is_empty()); +} + +#[test] +fn normal_generic_used_when_numeric_expected_in_where_clause() { + let src = r#" + trait Deserialize { + fn deserialize(fields: [Field; N]) -> Self; + } + + fn read() -> T where T: Deserialize { + T::deserialize([0, 1]) + } + "#; + let errors = get_program_errors(src); + assert_eq!(errors.len(), 1); + assert!(matches!( + errors[0].0, + CompilationError::TypeError(TypeCheckError::TypeMismatch { .. }), + )); + + let src = r#" + trait Deserialize { + fn deserialize(fields: [Field; N]) -> Self; + } + + fn read() -> T where T: Deserialize { + let mut fields: [Field; N] = [0; N]; + for i in 0..N { + fields[i] = i as Field + 1; + } + T::deserialize(fields) + } + "#; + let errors = get_program_errors(src); + assert_eq!(errors.len(), 1); + assert!(matches!( + errors[0].0, + CompilationError::ResolverError(ResolverError::VariableNotDeclared { .. }), + )); +} + +// TODO(https://github.com/noir-lang/noir/issues/5156): Remove this test once we ban implicit numeric generics +#[test] +fn implicit_numeric_generics_elaborator() { + let src = r#" + struct BoundedVec { + storage: [T; MaxLen], + len: u64, + } + + impl BoundedVec { + + // Test that we have an implicit numeric generic for "Len" as well as "MaxLen" + pub fn extend_from_bounded_vec(&mut self, _vec: BoundedVec) { + // We do this to avoid an unused variable warning on `self` + let _ = self.len; + for _ in 0..Len { } + } + + pub fn push(&mut self, elem: T) { + assert(self.len < MaxLen, "push out of bounds"); + self.storage[self.len] = elem; + self.len += 1; + } + } + "#; + let errors = get_program_errors(src); + assert_eq!(errors.len(), 4); + + for error in errors.iter() { + if let CompilationError::ResolverError(ResolverError::UseExplicitNumericGeneric { ident }) = + &errors[0].0 + { + assert!(matches!(ident.0.contents.as_str(), "MaxLen" | "Len")); + } else { + panic!("Expected ResolverError::UseExplicitNumericGeneric but got {:?}", error); + } + } +} + +#[test] +fn quote_code_fragments() { + // This test ensures we can quote (and unquote/splice) code fragments + // which by themselves are not valid code. They only need to be valid + // by the time they are unquoted into the macro's call site. + let src = r#" + fn main() { + comptime { + concat!(quote { assert( }, quote { false); }); + } + } + + comptime fn concat(a: Quoted, b: Quoted) -> Quoted { + quote { $a $b } + } + "#; + let errors = get_program_errors(src); + assert_eq!(errors.len(), 1); + + use InterpreterError::FailingConstraint; + assert!(matches!(&errors[0].0, CompilationError::InterpreterError(FailingConstraint { .. }))); +} diff --git a/deny.toml b/deny.toml index db7e53cad24..2d6d3e658b5 100644 --- a/deny.toml +++ b/deny.toml @@ -73,7 +73,7 @@ exceptions = [ { allow = ["CC0-1.0"], name = "tiny-keccak" }, { allow = ["MPL-2.0"], name = "sized-chunks" }, { allow = ["MPL-2.0"], name = "webpki-roots" }, - + { allow = ["CDDL-1.0"], name = "inferno" }, ] [[licenses.clarify]] diff --git a/docs/docs/how_to/how-to-oracles.md b/docs/docs/how_to/how-to-oracles.md index 2d2ed5c94b9..d6834c09c84 100644 --- a/docs/docs/how_to/how-to-oracles.md +++ b/docs/docs/how_to/how-to-oracles.md @@ -141,10 +141,10 @@ server.addMethod("resolve_function_call", async (params) => { if params.function !== "getSqrt" { throw Error("Unexpected foreign call") }; - const values = params.inputs[0].Array.map((field) => { + const values = params.inputs[0].map((field) => { return `${Math.sqrt(parseInt(field, 16))}`; }); - return { values: [{ Array: values }] }; + return { values }; }); ``` @@ -236,9 +236,9 @@ const foreignCallHandler = async (name, input) => { // notice that the "inputs" parameter contains *all* the inputs // in this case we to make the RPC request with the first parameter "numbers", which would be input[0] const oracleReturn = await client.request(name, [ - { Array: input[0].map((i) => i.toString("hex")) }, + input[0].map((i) => i.toString("hex")), ]); - return [oracleReturn.values[0].Array]; + return { values: oracleReturn }; }; // the rest of your NoirJS code diff --git a/docs/docs/noir/concepts/assert.md b/docs/docs/noir/concepts/assert.md index bcff613a695..2132de42072 100644 --- a/docs/docs/noir/concepts/assert.md +++ b/docs/docs/noir/concepts/assert.md @@ -1,9 +1,9 @@ --- title: Assert Function description: - Learn about the assert function in Noir, which can be used to explicitly constrain the predicate or - comparison expression that follows to be true, and what happens if the expression is false at - runtime. + Learn about the `assert` and `static_assert` functions in Noir, which can be used to explicitly + constrain the predicate or comparison expression that follows to be true, and what happens if + the expression is false at runtime or compile-time, respectively. keywords: [Noir programming language, assert statement, predicate expression, comparison expression] sidebar_position: 4 --- @@ -43,3 +43,36 @@ let s = myStruct { myField: y }; assert(s.myField == x, s); ``` +There is also a special `static_assert` function that behaves like `assert`, +but that runs at compile-time. + +```rust +fn main(xs: [Field; 3]) { + let x = 2 + 2; + let y = 4; + static_assert(x == y, "expected 2 + 2 to equal 4"); + + // This passes since the length of `xs` is known at compile-time + static_assert(xs.len() == 3, "expected the input to have 3 elements"); +} +``` + +This function fails when passed a dynamic (run-time) argument: + +```rust +fn main(x : Field, y : Field) { + // this fails because `x` is not known at compile-time + static_assert(x == 2, "expected x to be known at compile-time and equal to 2"); + + let mut example_slice = &[]; + if y == 4 { + example_slice = example_slice.push_back(0); + } + + // This fails because the length of `example_slice` is not known at + // compile-time + let error_message = "expected an empty slice, known at compile-time"; + static_assert(example_slice.len() == 0, error_message); +} +``` + diff --git a/docs/docs/noir/concepts/data_types/arrays.md b/docs/docs/noir/concepts/data_types/arrays.md index 95d749053e2..9a4ab5d3c1f 100644 --- a/docs/docs/noir/concepts/data_types/arrays.md +++ b/docs/docs/noir/concepts/data_types/arrays.md @@ -199,7 +199,7 @@ fn main() { ### reduce -Same as fold, but uses the first element as starting element. +Same as fold, but uses the first element as the starting element. ```rust fn reduce(self, f: fn(T, T) -> T) -> T diff --git a/docs/docs/noir/concepts/data_types/slices.mdx b/docs/docs/noir/concepts/data_types/slices.mdx index dff08d63ffb..d619117b799 100644 --- a/docs/docs/noir/concepts/data_types/slices.mdx +++ b/docs/docs/noir/concepts/data_types/slices.mdx @@ -191,3 +191,108 @@ fn main() { assert(array[1] == slice[1]); } ``` + +### map + +Applies a function to each element of the slice, returning a new slice containing the mapped elements. + +```rust +fn map(self, f: fn(T) -> U) -> [U] +``` + +example + +```rust +let a = &[1, 2, 3]; +let b = a.map(|a| a * 2); // b is now &[2, 4, 6] +``` + +### fold + +Applies a function to each element of the slice, returning the final accumulated value. The first +parameter is the initial value. + +```rust +fn fold(self, mut accumulator: U, f: fn(U, T) -> U) -> U +``` + +This is a left fold, so the given function will be applied to the accumulator and first element of +the slice, then the second, and so on. For a given call the expected result would be equivalent to: + +```rust +let a1 = &[1]; +let a2 = &[1, 2]; +let a3 = &[1, 2, 3]; + +let f = |a, b| a - b; +a1.fold(10, f) //=> f(10, 1) +a2.fold(10, f) //=> f(f(10, 1), 2) +a3.fold(10, f) //=> f(f(f(10, 1), 2), 3) +``` + +example: + +```rust + +fn main() { + let slice = &[2, 2, 2, 2, 2]; + let folded = slice.fold(0, |a, b| a + b); + assert(folded == 10); +} + +``` + +### reduce + +Same as fold, but uses the first element as the starting element. + +```rust +fn reduce(self, f: fn(T, T) -> T) -> T +``` + +example: + +```rust +fn main() { + let slice = &[2, 2, 2, 2, 2]; + let reduced = slice.reduce(|a, b| a + b); + assert(reduced == 10); +} +``` + +### all + +Returns true if all the elements satisfy the given predicate + +```rust +fn all(self, predicate: fn(T) -> bool) -> bool +``` + +example: + +```rust +fn main() { + let slice = &[2, 2, 2, 2, 2]; + let all = slice.all(|a| a == 2); + assert(all); +} +``` + +### any + +Returns true if any of the elements satisfy the given predicate + +```rust +fn any(self, predicate: fn(T) -> bool) -> bool +``` + +example: + +```rust +fn main() { + let slice = &[2, 2, 2, 2, 5]; + let any = slice.any(|a| a == 5); + assert(any); +} + +``` diff --git a/docs/docs/noir/concepts/traits.md b/docs/docs/noir/concepts/traits.md index df7cb9ebda0..51305b38c16 100644 --- a/docs/docs/noir/concepts/traits.md +++ b/docs/docs/noir/concepts/traits.md @@ -147,7 +147,7 @@ fn main() { ### Generic Trait Implementations With Where Clauses -Where clauses can also be placed on trait implementations themselves to restrict generics in a similar way. +Where clauses can be placed on trait implementations themselves to restrict generics in a similar way. For example, while `impl Foo for T` implements the trait `Foo` for every type, `impl Foo for T where T: Bar` will implement `Foo` only for types that also implement `Bar`. This is often used for implementing generic types. For example, here is the implementation for array equality: @@ -169,6 +169,22 @@ impl Eq for [T; N] where T: Eq { } ``` +Where clauses can also be placed on struct implementations. +For example, here is a method utilizing a generic type that implements the equality trait. + +```rust +struct Foo { + a: u32, + b: T, +} + +impl Foo where T: Eq { + fn eq(self, other: Self) -> bool { + (self.a == other.a) & self.b.eq(other.b) + } +} +``` + ## Generic Traits Traits themselves can also be generic by placing the generic arguments after the trait name. These generics are in diff --git a/docs/docs/tooling/testing.md b/docs/docs/tooling/testing.md index d3e0c522473..866677da567 100644 --- a/docs/docs/tooling/testing.md +++ b/docs/docs/tooling/testing.md @@ -42,7 +42,7 @@ fn test_add() { } ``` -You can be more specific and make it fail with a specific reason by using `should_fail_with = "`: +You can be more specific and make it fail with a specific reason by using `should_fail_with = ""`: ```rust fn main(african_swallow_avg_speed : Field) { @@ -58,5 +58,22 @@ fn test_king_arthur() { fn test_bridgekeeper() { main(32); } - ``` + +The string given to `should_fail_with` doesn't need to exactly match the failure reason, it just needs to be a substring of it: + +```rust +fn main(african_swallow_avg_speed : Field) { + assert(african_swallow_avg_speed == 65, "What is the airspeed velocity of an unladen swallow"); +} + +#[test] +fn test_king_arthur() { + main(65); +} + +#[test(should_fail_with = "airspeed velocity")] +fn test_bridgekeeper() { + main(32); +} +``` \ No newline at end of file diff --git a/examples/.gitignore b/examples/.gitignore new file mode 100644 index 00000000000..c4fdcad6719 --- /dev/null +++ b/examples/.gitignore @@ -0,0 +1,2 @@ +target +proofs \ No newline at end of file diff --git a/examples/codegen_verifier/.gitignore b/examples/codegen_verifier/.gitignore index c0d62c447d3..ae13a24752c 100644 --- a/examples/codegen_verifier/.gitignore +++ b/examples/codegen_verifier/.gitignore @@ -1,4 +1,3 @@ out cache -target src/contract.sol \ No newline at end of file diff --git a/examples/prove_and_verify/proofs/proof b/examples/prove_and_verify/proofs/proof deleted file mode 100644 index 01d5ad27686..00000000000 Binary files a/examples/prove_and_verify/proofs/proof and /dev/null differ diff --git a/examples/prove_and_verify/prove_and_verify.sh b/examples/prove_and_verify/prove_and_verify.sh index 01ee6c70738..df3ec3ff97a 100755 --- a/examples/prove_and_verify/prove_and_verify.sh +++ b/examples/prove_and_verify/prove_and_verify.sh @@ -11,4 +11,4 @@ $BACKEND prove -b ./target/hello_world.json -w ./target/witness.gz # TODO: backend should automatically generate vk if necessary. $BACKEND write_vk -b ./target/hello_world.json -$BACKEND verify -v ./target/vk -p ./proofs/proof \ No newline at end of file +$BACKEND verify -k ./target/vk -p ./proofs/proof \ No newline at end of file diff --git a/noir_stdlib/src/aes128.nr b/noir_stdlib/src/aes128.nr index e6e2a5e4997..7b0876b86f3 100644 --- a/noir_stdlib/src/aes128.nr +++ b/noir_stdlib/src/aes128.nr @@ -1,4 +1,4 @@ #[foreign(aes128_encrypt)] // docs:start:aes128 -pub fn aes128_encrypt(input: [u8; N], iv: [u8; 16], key: [u8; 16]) -> [u8] {} +pub fn aes128_encrypt(input: [u8; N], iv: [u8; 16], key: [u8; 16]) -> [u8] {} // docs:end:aes128 diff --git a/noir_stdlib/src/array.nr b/noir_stdlib/src/array.nr index 6fba197dd05..ad9c7093d07 100644 --- a/noir_stdlib/src/array.nr +++ b/noir_stdlib/src/array.nr @@ -2,7 +2,7 @@ use crate::cmp::Ord; // TODO: Once we fully move to the new SSA pass this module can be removed and replaced // by the methods in the `slice` module -impl [T; N] { +impl [T; N] { #[builtin(array_len)] pub fn len(self) -> u32 {} @@ -110,7 +110,7 @@ impl [T; N] { // helper function used to look up the position of a value in an array of Field // Note that function returns 0 if the value is not found -unconstrained fn find_index(a: [u32; N], find: u32) -> u32 { +unconstrained fn find_index(a: [u32; N], find: u32) -> u32 { let mut result = 0; for i in 0..a.len() { if a[i] == find { diff --git a/noir_stdlib/src/cmp.nr b/noir_stdlib/src/cmp.nr index 457b2cfa167..bdd5e2bc5ec 100644 --- a/noir_stdlib/src/cmp.nr +++ b/noir_stdlib/src/cmp.nr @@ -18,7 +18,7 @@ impl Eq for i64 { fn eq(self, other: i64) -> bool { self == other } } impl Eq for () { fn eq(_self: Self, _other: ()) -> bool { true } } impl Eq for bool { fn eq(self, other: bool) -> bool { self == other } } -impl Eq for [T; N] where T: Eq { +impl Eq for [T; N] where T: Eq { fn eq(self, other: [T; N]) -> bool { let mut result = true; for i in 0 .. self.len() { @@ -38,7 +38,7 @@ impl Eq for [T] where T: Eq { } } -impl Eq for str { +impl Eq for str { fn eq(self, other: str) -> bool { let self_bytes = self.as_bytes(); let other_bytes = other.as_bytes(); @@ -203,7 +203,7 @@ impl Ord for bool { } } -impl Ord for [T; N] where T: Ord { +impl Ord for [T; N] where T: Ord { // The first non-equal element of both arrays determines // the ordering for the whole array. fn cmp(self, other: [T; N]) -> Ordering { diff --git a/noir_stdlib/src/collections/bounded_vec.nr b/noir_stdlib/src/collections/bounded_vec.nr index 6fde9e70f4d..c218ecd2348 100644 --- a/noir_stdlib/src/collections/bounded_vec.nr +++ b/noir_stdlib/src/collections/bounded_vec.nr @@ -1,11 +1,11 @@ use crate::{cmp::Eq, convert::From}; -struct BoundedVec { +struct BoundedVec { storage: [T; MaxLen], len: u32, } -impl BoundedVec { +impl BoundedVec { pub fn new() -> Self { let zeroed = crate::unsafe::zeroed(); BoundedVec { storage: [zeroed; MaxLen], len: 0 } @@ -61,7 +61,7 @@ impl BoundedVec { self.storage } - pub fn extend_from_array(&mut self, array: [T; Len]) { + pub fn extend_from_array(&mut self, array: [T; Len]) { let new_len = self.len + array.len(); assert(new_len <= MaxLen, "extend_from_array out of bounds"); for i in 0..array.len() { @@ -79,7 +79,7 @@ impl BoundedVec { self.len = new_len; } - pub fn extend_from_bounded_vec(&mut self, vec: BoundedVec) { + pub fn extend_from_bounded_vec(&mut self, vec: BoundedVec) { let append_len = vec.len(); let new_len = self.len + append_len; assert(new_len <= MaxLen, "extend_from_bounded_vec out of bounds"); @@ -94,7 +94,7 @@ impl BoundedVec { self.len = new_len; } - pub fn from_array(array: [T; Len]) -> Self { + pub fn from_array(array: [T; Len]) -> Self { assert(Len <= MaxLen, "from array out of bounds"); let mut vec: BoundedVec = BoundedVec::new(); vec.extend_from_array(array); @@ -134,7 +134,7 @@ impl BoundedVec { } } -impl Eq for BoundedVec where T: Eq { +impl Eq for BoundedVec where T: Eq { fn eq(self, other: BoundedVec) -> bool { // TODO: https://github.com/noir-lang/noir/issues/4837 // @@ -145,7 +145,7 @@ impl Eq for BoundedVec where T: Eq { } } -impl From<[T; Len]> for BoundedVec { +impl From<[T; Len]> for BoundedVec { fn from(array: [T; Len]) -> BoundedVec { BoundedVec::from_array(array) } diff --git a/noir_stdlib/src/collections/map.nr b/noir_stdlib/src/collections/map.nr index 84e94166869..8324583632f 100644 --- a/noir_stdlib/src/collections/map.nr +++ b/noir_stdlib/src/collections/map.nr @@ -15,7 +15,7 @@ global MAX_LOAD_FACTOR_DEN0MINATOR = 4; // Size of the underlying table must be known at compile time. // It is advised to select capacity N as a power of two, or a prime number // because utilized probing scheme is best tailored for it. -struct HashMap { +struct HashMap { _table: [Slot; N], // Amount of valid elements in the map. @@ -77,7 +77,7 @@ impl Slot { // While conducting lookup, we iterate attempt from 0 to N - 1 due to heuristic, // that if we have went that far without finding desired, // it is very unlikely to be after - performance will be heavily degraded. -impl HashMap { +impl HashMap { // Creates a new instance of HashMap with specified BuildHasher. // docs:start:with_hasher pub fn with_hasher(_build_hasher: B) -> Self @@ -424,7 +424,7 @@ impl HashMap { // equal sets of key-value entries, // thus one is a subset of the other and vice versa. // docs:start:eq -impl Eq for HashMap +impl Eq for HashMap where K: Eq + Hash, V: Eq, @@ -460,7 +460,7 @@ where } // docs:start:default -impl Default for HashMap +impl Default for HashMap where B: BuildHasher + Default, H: Hasher + Default diff --git a/noir_stdlib/src/compat.nr b/noir_stdlib/src/compat.nr index 30b7f73f130..06da8150767 100644 --- a/noir_stdlib/src/compat.nr +++ b/noir_stdlib/src/compat.nr @@ -1,18 +1,7 @@ -global BN254_MODULUS_BE_BYTES: [u8; 32] = [ +global BN254_MODULUS_BE_BYTES: [u8] = &[ 48, 100, 78, 114, 225, 49, 160, 41, 184, 80, 69, 182, 129, 129, 88, 93, 40, 51, 232, 72, 121, 185, 112, 145, 67, 225, 245, 147, 240, 0, 0, 1 ]; pub fn is_bn254() -> bool { - // TODO: refactor this once https://github.com/noir-lang/noir/issues/5245 is resolved. - let modulus_bytes = crate::field::modulus_be_bytes(); - if modulus_bytes.len() == 32 { - let mut modulus_bytes_array: [u8; 32] = [0; 32]; - for i in 0..32 { - modulus_bytes_array[i] = modulus_bytes[i]; - } - - modulus_bytes_array == BN254_MODULUS_BE_BYTES - } else { - false - } + crate::field::modulus_be_bytes() == BN254_MODULUS_BE_BYTES } diff --git a/noir_stdlib/src/default.nr b/noir_stdlib/src/default.nr index bd2f1ce0cd2..0acb3966034 100644 --- a/noir_stdlib/src/default.nr +++ b/noir_stdlib/src/default.nr @@ -17,7 +17,7 @@ impl Default for i64 { fn default() -> i64 { 0 } } impl Default for () { fn default() -> () { () } } impl Default for bool { fn default() -> bool { false } } -impl Default for [T; N] where T: Default { +impl Default for [T; N] where T: Default { fn default() -> [T; N] { [T::default(); N] } diff --git a/noir_stdlib/src/ec/montcurve.nr b/noir_stdlib/src/ec/montcurve.nr index 7dc756781c0..12b48d66b9d 100644 --- a/noir_stdlib/src/ec/montcurve.nr +++ b/noir_stdlib/src/ec/montcurve.nr @@ -114,7 +114,7 @@ mod affine { // Scalar multiplication with scalar represented by a bit array (little-endian convention). // If k is the natural number represented by `bits`, then this computes p + ... + p k times. - fn bit_mul(self, bits: [u1; N], p: Point) -> Point { + fn bit_mul(self, bits: [u1; N], p: Point) -> Point { self.into_tecurve().bit_mul(bits, p.into_tecurve()).into_montcurve() } @@ -124,7 +124,7 @@ mod affine { } // Multi-scalar multiplication (n[0]*p[0] + ... + n[N]*p[N], where * denotes scalar multiplication) - fn msm(self, n: [Field; N], p: [Point; N]) -> Point { + fn msm(self, n: [Field; N], p: [Point; N]) -> Point { let mut out = Point::zero(); for i in 0..N { @@ -315,7 +315,7 @@ mod curvegroup { // Scalar multiplication with scalar represented by a bit array (little-endian convention). // If k is the natural number represented by `bits`, then this computes p + ... + p k times. - fn bit_mul(self, bits: [u1; N], p: Point) -> Point { + fn bit_mul(self, bits: [u1; N], p: Point) -> Point { self.into_tecurve().bit_mul(bits, p.into_tecurve()).into_montcurve() } @@ -325,7 +325,7 @@ mod curvegroup { } // Multi-scalar multiplication (n[0]*p[0] + ... + n[N]*p[N], where * denotes scalar multiplication) - fn msm(self, n: [Field; N], p: [Point; N]) -> Point { + fn msm(self, n: [Field; N], p: [Point; N]) -> Point { let mut out = Point::zero(); for i in 0..N { diff --git a/noir_stdlib/src/ec/swcurve.nr b/noir_stdlib/src/ec/swcurve.nr index 9dd324f3085..3ad3af41cff 100644 --- a/noir_stdlib/src/ec/swcurve.nr +++ b/noir_stdlib/src/ec/swcurve.nr @@ -134,7 +134,7 @@ mod affine { // Scalar multiplication with scalar represented by a bit array (little-endian convention). // If k is the natural number represented by `bits`, then this computes p + ... + p k times. - fn bit_mul(self, bits: [u1; N], p: Point) -> Point { + fn bit_mul(self, bits: [u1; N], p: Point) -> Point { self.into_group().bit_mul(bits, p.into_group()).into_affine() } @@ -144,7 +144,7 @@ mod affine { } // Multi-scalar multiplication (n[0]*p[0] + ... + n[N]*p[N], where * denotes scalar multiplication) - pub fn msm(self, n: [Field; N], p: [Point; N]) -> Point { + pub fn msm(self, n: [Field; N], p: [Point; N]) -> Point { let mut out = Point::zero(); for i in 0..N { @@ -336,7 +336,7 @@ mod curvegroup { // Scalar multiplication with scalar represented by a bit array (little-endian convention). // If k is the natural number represented by `bits`, then this computes p + ... + p k times. - fn bit_mul(self, bits: [u1; N], p: Point) -> Point { + fn bit_mul(self, bits: [u1; N], p: Point) -> Point { let mut out = Point::zero(); for i in 0..N { @@ -363,7 +363,7 @@ mod curvegroup { } // Multi-scalar multiplication (n[0]*p[0] + ... + n[N]*p[N], where * denotes scalar multiplication) - fn msm(self, n: [Field; N], p: [Point; N]) -> Point { + fn msm(self, n: [Field; N], p: [Point; N]) -> Point { let mut out = Point::zero(); for i in 0..N { diff --git a/noir_stdlib/src/ec/tecurve.nr b/noir_stdlib/src/ec/tecurve.nr index 506fe89313a..aaf66f903cc 100644 --- a/noir_stdlib/src/ec/tecurve.nr +++ b/noir_stdlib/src/ec/tecurve.nr @@ -132,7 +132,7 @@ mod affine { // Scalar multiplication with scalar represented by a bit array (little-endian convention). // If k is the natural number represented by `bits`, then this computes p + ... + p k times. - fn bit_mul(self, bits: [u1; N], p: Point) -> Point { + fn bit_mul(self, bits: [u1; N], p: Point) -> Point { self.into_group().bit_mul(bits, p.into_group()).into_affine() } @@ -142,7 +142,7 @@ mod affine { } // Multi-scalar multiplication (n[0]*p[0] + ... + n[N]*p[N], where * denotes scalar multiplication) - fn msm(self, n: [Field; N], p: [Point; N]) -> Point { + fn msm(self, n: [Field; N], p: [Point; N]) -> Point { let mut out = Point::zero(); for i in 0..N { @@ -340,7 +340,7 @@ mod curvegroup { // Scalar multiplication with scalar represented by a bit array (little-endian convention). // If k is the natural number represented by `bits`, then this computes p + ... + p k times. - fn bit_mul(self, bits: [u1; N], p: Point) -> Point { + fn bit_mul(self, bits: [u1; N], p: Point) -> Point { let mut out = Point::zero(); for i in 0..N { @@ -367,7 +367,7 @@ mod curvegroup { } // Multi-scalar multiplication (n[0]*p[0] + ... + n[N]*p[N], where * denotes scalar multiplication) - fn msm(self, n: [Field; N], p: [Point; N]) -> Point { + fn msm(self, n: [Field; N], p: [Point; N]) -> Point { let mut out = Point::zero(); for i in 0..N { diff --git a/noir_stdlib/src/ecdsa_secp256k1.nr b/noir_stdlib/src/ecdsa_secp256k1.nr index f84e2221f57..8a70184dca8 100644 --- a/noir_stdlib/src/ecdsa_secp256k1.nr +++ b/noir_stdlib/src/ecdsa_secp256k1.nr @@ -1,6 +1,6 @@ #[foreign(ecdsa_secp256k1)] // docs:start:ecdsa_secp256k1 -pub fn verify_signature( +pub fn verify_signature( public_key_x: [u8; 32], public_key_y: [u8; 32], signature: [u8; 64], diff --git a/noir_stdlib/src/ecdsa_secp256r1.nr b/noir_stdlib/src/ecdsa_secp256r1.nr index 76e68aeeafa..8772fa7c2ca 100644 --- a/noir_stdlib/src/ecdsa_secp256r1.nr +++ b/noir_stdlib/src/ecdsa_secp256r1.nr @@ -1,6 +1,6 @@ #[foreign(ecdsa_secp256r1)] // docs:start:ecdsa_secp256r1 -pub fn verify_signature( +pub fn verify_signature( public_key_x: [u8; 32], public_key_y: [u8; 32], signature: [u8; 64], diff --git a/noir_stdlib/src/embedded_curve_ops.nr b/noir_stdlib/src/embedded_curve_ops.nr index cd8c421e136..c5617094c0a 100644 --- a/noir_stdlib/src/embedded_curve_ops.nr +++ b/noir_stdlib/src/embedded_curve_ops.nr @@ -52,6 +52,14 @@ struct EmbeddedCurveScalar { hi: Field, } +impl EmbeddedCurveScalar { + #[field(bn254)] + fn from_field(scalar: Field) -> EmbeddedCurveScalar { + let (a,b) = crate::field::bn254::decompose(scalar); + EmbeddedCurveScalar { lo: a, hi: b } + } +} + // Computes a multi scalar multiplication over the embedded curve. // For bn254, We have Grumpkin and Baby JubJub. // For bls12-381, we have JubJub and Bandersnatch. @@ -60,7 +68,7 @@ struct EmbeddedCurveScalar { // underlying proof system. #[foreign(multi_scalar_mul)] // docs:start:multi_scalar_mul -pub fn multi_scalar_mul( +pub fn multi_scalar_mul( points: [EmbeddedCurvePoint; N], scalars: [EmbeddedCurveScalar; N] ) -> [Field; 3] diff --git a/noir_stdlib/src/hash.nr b/noir_stdlib/src/hash.nr index 6c295d127ab..493430c99a4 100644 --- a/noir_stdlib/src/hash.nr +++ b/noir_stdlib/src/hash.nr @@ -5,49 +5,87 @@ mod poseidon2; use crate::default::Default; use crate::uint128::U128; use crate::sha256::{digest, sha256_var}; -use crate::embedded_curve_ops::EmbeddedCurvePoint; +use crate::embedded_curve_ops::{EmbeddedCurvePoint, EmbeddedCurveScalar, multi_scalar_mul}; #[foreign(sha256)] // docs:start:sha256 -pub fn sha256(input: [u8; N]) -> [u8; 32] +pub fn sha256(input: [u8; N]) -> [u8; 32] // docs:end:sha256 {} #[foreign(blake2s)] // docs:start:blake2s -pub fn blake2s(input: [u8; N]) -> [u8; 32] +pub fn blake2s(input: [u8; N]) -> [u8; 32] // docs:end:blake2s {} #[foreign(blake3)] // docs:start:blake3 -pub fn blake3(input: [u8; N]) -> [u8; 32] +pub fn blake3(input: [u8; N]) -> [u8; 32] // docs:end:blake3 {} // docs:start:pedersen_commitment -pub fn pedersen_commitment(input: [Field; N]) -> EmbeddedCurvePoint { +pub fn pedersen_commitment(input: [Field; N]) -> EmbeddedCurvePoint { // docs:end:pedersen_commitment - pedersen_commitment_with_separator(input, 0) + let value = pedersen_commitment_with_separator(input, 0); + if (value.x == 0) & (value.y == 0) { + EmbeddedCurvePoint { x: 0, y: 0, is_infinite: true } + } else { + EmbeddedCurvePoint { x: value.x, y: value.y, is_infinite: false } + } } -#[foreign(pedersen_commitment)] -pub fn __pedersen_commitment_with_separator(input: [Field; N], separator: u32) -> [Field; 2] {} +fn pedersen_commitment_with_separator_noir(input: [Field; N], separator: u32) -> EmbeddedCurvePoint { + let mut points = [EmbeddedCurveScalar { lo: 0, hi: 0 }; N]; + for i in 0..N { + points[i] = EmbeddedCurveScalar::from_field(input[i]); + } + let generators = derive_generators("DEFAULT_DOMAIN_SEPARATOR".as_bytes(), separator); + let values = multi_scalar_mul(generators, points); + EmbeddedCurvePoint { x: values[0], y: values[1], is_infinite: values[2] as bool } +} -pub fn pedersen_commitment_with_separator(input: [Field; N], separator: u32) -> EmbeddedCurvePoint { +pub fn pedersen_commitment_with_separator(input: [Field; N], separator: u32) -> EmbeddedCurvePoint { let values = __pedersen_commitment_with_separator(input, separator); EmbeddedCurvePoint { x: values[0], y: values[1], is_infinite: false } } // docs:start:pedersen_hash -pub fn pedersen_hash(input: [Field; N]) -> Field +pub fn pedersen_hash(input: [Field; N]) -> Field // docs:end:pedersen_hash { pedersen_hash_with_separator(input, 0) } +#[field(bn254)] +fn derive_generators( + domain_separator_bytes: [u8; M], + starting_index: u32 +) -> [EmbeddedCurvePoint; N] { + crate::assert_constant(domain_separator_bytes); + crate::assert_constant(starting_index); + __derive_generators(domain_separator_bytes, starting_index) +} + +#[builtin(derive_pedersen_generators)] +#[field(bn254)] +fn __derive_generators(domain_separator_bytes: [u8; M], starting_index: u32) -> [EmbeddedCurvePoint; N] {} + +fn pedersen_hash_with_separator_noir(input: [Field; N], separator: u32) -> Field { + let v1 = pedersen_commitment_with_separator(input, separator); + let length_generator : [EmbeddedCurvePoint; 1] = derive_generators("pedersen_hash_length".as_bytes(), 0); + multi_scalar_mul( + [length_generator[0], v1], + [EmbeddedCurveScalar { lo: N as Field, hi: 0 }, EmbeddedCurveScalar { lo: 1, hi: 0 }] + )[0] +} + #[foreign(pedersen_hash)] -pub fn pedersen_hash_with_separator(input: [Field; N], separator: u32) -> Field {} +pub fn pedersen_hash_with_separator(input: [Field; N], separator: u32) -> Field {} + +#[foreign(pedersen_commitment)] +fn __pedersen_commitment_with_separator(input: [Field; N], separator: u32) -> [Field; 2] {} pub fn hash_to_field(inputs: [Field]) -> Field { let mut sum = 0; @@ -62,12 +100,12 @@ pub fn hash_to_field(inputs: [Field]) -> Field { #[foreign(keccak256)] // docs:start:keccak256 -pub fn keccak256(input: [u8; N], message_size: u32) -> [u8; 32] +pub fn keccak256(input: [u8; N], message_size: u32) -> [u8; 32] // docs:end:keccak256 {} #[foreign(poseidon2_permutation)] -pub fn poseidon2_permutation(_input: [Field; N], _state_length: u32) -> [Field; N] {} +pub fn poseidon2_permutation(_input: [Field; N], _state_length: u32) -> [Field; N] {} #[foreign(sha256_compression)] pub fn sha256_compression(_input: [u32; 16], _state: [u32; 8]) -> [u32; 8] {} @@ -172,7 +210,7 @@ impl Hash for U128 { } } -impl Hash for [T; N] where T: Hash { +impl Hash for [T; N] where T: Hash { fn hash(self, state: &mut H) where H: Hasher{ for elem in self { elem.hash(state); @@ -222,3 +260,11 @@ impl Hash for (A, B, C, D, E) where A: Hash, B: Hash, C: Hash, D: self.4.hash(state); } } + +#[test] +fn assert_pedersen_noir() { + // TODO: make this a fuzzer test once fuzzer supports curve-specific blackbox functions. + let input = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]; + assert_eq(pedersen_hash_with_separator(input, 4), pedersen_hash_with_separator_noir(input, 4)); + assert_eq(pedersen_commitment_with_separator(input, 4), pedersen_commitment_with_separator_noir(input, 4)); +} diff --git a/noir_stdlib/src/hash/mimc.nr b/noir_stdlib/src/hash/mimc.nr index e90bacb75c2..de4475d9446 100644 --- a/noir_stdlib/src/hash/mimc.nr +++ b/noir_stdlib/src/hash/mimc.nr @@ -6,7 +6,7 @@ use crate::default::Default; // You must use constants generated for the native field // Rounds number should be ~ log(p)/log(exp) // For 254 bit primes, exponent 7 and 91 rounds seems to be recommended -fn mimc(x: Field, k: Field, constants: [Field; N], exp: Field) -> Field { +fn mimc(x: Field, k: Field, constants: [Field; N], exp: Field) -> Field { //round 0 let mut t = x + k; let mut h = t.pow_32(exp); @@ -117,7 +117,7 @@ global MIMC_BN254_CONSTANTS: [Field; MIMC_BN254_ROUNDS] = [ //mimc implementation with hardcoded parameters for BN254 curve. #[field(bn254)] #[no_predicates] -pub fn mimc_bn254(array: [Field; N]) -> Field { +pub fn mimc_bn254(array: [Field; N]) -> Field { let exponent = 7; let mut r = 0; for elem in array { diff --git a/noir_stdlib/src/hash/poseidon.nr b/noir_stdlib/src/hash/poseidon.nr index c4b5f0fcb6f..963808f6053 100644 --- a/noir_stdlib/src/hash/poseidon.nr +++ b/noir_stdlib/src/hash/poseidon.nr @@ -6,7 +6,7 @@ use crate::default::Default; // A config struct defining the parameters of the Poseidon instance to use. // // A thorough writeup of this method (along with an unoptimized method) can be found at: https://spec.filecoin.io/algorithms/crypto/poseidon/ -struct PoseidonConfig { +struct PoseidonConfig { // State width, should be equal to `T` t: Field, // Number of full rounds. should be even @@ -28,7 +28,7 @@ struct PoseidonConfig { sparse_mds: [Field; X], } -pub fn config( +pub fn config( t: Field, rf: u8, rp: u8, @@ -40,14 +40,17 @@ pub fn config( ) -> PoseidonConfig { // Input checks assert_eq(rf & 1, 0); - assert_eq((t as u8) * rf + rp, N); - assert_eq(t, T); + assert_eq((t as u8) * rf + rp, N as u8); + assert_eq(t, T as Field); assert(alpha != 0); PoseidonConfig { t, rf, rp, alpha, round_constants, mds, presparse_mds, sparse_mds } } -pub fn permute(pos_conf: PoseidonConfig, mut state: [Field; T]) -> [Field; T] { +pub fn permute( + pos_conf: PoseidonConfig, + mut state: [Field; T] +) -> [Field; T] { let PoseidonConfig {t, rf, rp, alpha, round_constants, mds, presparse_mds, sparse_mds } = pos_conf; for i in 0..state.len() { @@ -109,7 +112,7 @@ pub fn permute(pos_conf: PoseidonConfig, mut state: [Field; T] } // Performs matrix multiplication on a vector -fn apply_matrix(matrix: [[Field; N]; N], vec: [Field; N]) -> [Field; N] { +fn apply_matrix(matrix: [[Field; N]; N], vec: [Field; N]) -> [Field; N] { let mut out = [0; N]; for i in 0..N { @@ -122,7 +125,7 @@ fn apply_matrix(matrix: [[Field; N]; N], vec: [Field; N]) -> [Field; N] { } // Corresponding absorption. -fn absorb( +fn absorb( pos_conf: PoseidonConfig, // Initial state; usually [0; O] mut state: [Field; T], @@ -152,7 +155,7 @@ fn absorb( state } -fn sigma(x: [Field; O]) -> [Field; O] { +fn sigma(x: [Field; O]) -> [Field; O] { let mut y = x; for i in 0..O { let t = y[i]; diff --git a/noir_stdlib/src/hash/poseidon/bn254.nr b/noir_stdlib/src/hash/poseidon/bn254.nr index 9d3accb1ebd..6800fac421d 100644 --- a/noir_stdlib/src/hash/poseidon/bn254.nr +++ b/noir_stdlib/src/hash/poseidon/bn254.nr @@ -7,7 +7,7 @@ use crate::hash::poseidon::{PoseidonConfig, absorb}; // Variable-length Poseidon-128 sponge as suggested in second bullet point of §3 of https://eprint.iacr.org/2019/458.pdf #[field(bn254)] #[no_predicates] -pub fn sponge(msg: [Field; N]) -> Field { +pub fn sponge(msg: [Field; N]) -> Field { absorb(consts::x5_5_config(), [0; 5], 4, 1, msg)[1] } diff --git a/noir_stdlib/src/hash/poseidon2.nr b/noir_stdlib/src/hash/poseidon2.nr index 4a68925255a..08cf68d1f82 100644 --- a/noir_stdlib/src/hash/poseidon2.nr +++ b/noir_stdlib/src/hash/poseidon2.nr @@ -12,7 +12,7 @@ struct Poseidon2 { impl Poseidon2 { #[no_predicates] - pub fn hash(input: [Field; N], message_size: u32) -> Field { + pub fn hash(input: [Field; N], message_size: u32) -> Field { if message_size == N { Poseidon2::hash_internal(input, N, false) } else { @@ -95,7 +95,7 @@ impl Poseidon2 { result } - fn hash_internal(input: [Field; N], in_len: u32, is_variable_length: bool) -> Field { + fn hash_internal(input: [Field; N], in_len: u32, is_variable_length: bool) -> Field { let two_pow_64 = 18446744073709551616; let iv : Field = (in_len as Field) * two_pow_64; let mut sponge = Poseidon2::new(iv); diff --git a/noir_stdlib/src/lib.nr b/noir_stdlib/src/lib.nr index ad47171fa46..65da7e6e9ab 100644 --- a/noir_stdlib/src/lib.nr +++ b/noir_stdlib/src/lib.nr @@ -26,6 +26,7 @@ mod prelude; mod uint128; mod bigint; mod runtime; +mod meta; // Oracle calls are required to be wrapped in an unconstrained function // Thus, the only argument to the `println` oracle is expected to always be an ident @@ -47,6 +48,11 @@ pub fn verify_proof(verification_key: [Field], proof: [Field], public_inputs: // Useful for debugging for-loop bounds. #[builtin(assert_constant)] pub fn assert_constant(x: T) {} + +// Asserts that the given value is both true and known at compile-time +#[builtin(static_assert)] +pub fn static_assert(predicate: bool, message: str) {} + // from_field and as_field are private since they are not valid for every type. // `as` should be the default for users to cast between primitive types, and in the future // traits can be used to work with generic types. diff --git a/noir_stdlib/src/merkle.nr b/noir_stdlib/src/merkle.nr index 9b15fe7313d..17e539ab9b7 100644 --- a/noir_stdlib/src/merkle.nr +++ b/noir_stdlib/src/merkle.nr @@ -2,7 +2,7 @@ // Currently we assume that it is a binary tree, so depth k implies a width of 2^k // XXX: In the future we can add an arity parameter // Returns the merkle root of the tree from the provided leaf, its hashpath, using a pedersen hash function. -pub fn compute_merkle_root(leaf: Field, index: Field, hash_path: [Field; N]) -> Field { +pub fn compute_merkle_root(leaf: Field, index: Field, hash_path: [Field; N]) -> Field { let n = hash_path.len(); let index_bits = index.to_le_bits(n as u32); let mut current = leaf; diff --git a/noir_stdlib/src/meta.nr b/noir_stdlib/src/meta.nr new file mode 100644 index 00000000000..1825888130b --- /dev/null +++ b/noir_stdlib/src/meta.nr @@ -0,0 +1 @@ +mod type_def; diff --git a/noir_stdlib/src/meta/type_def.nr b/noir_stdlib/src/meta/type_def.nr new file mode 100644 index 00000000000..b9354485921 --- /dev/null +++ b/noir_stdlib/src/meta/type_def.nr @@ -0,0 +1,16 @@ +impl TypeDefinition { + /// Return a syntactic version of this type definition as a type. + /// For example, `as_type(quote { type Foo { ... } })` would return `Foo` + #[builtin(type_def_as_type)] + fn as_type(self) -> Quoted {} + + /// Return each generic on this type. The names of these generics are unchanged + /// so users may need to keep name collisions in mind if this is used directly in a macro. + #[builtin(type_def_generics)] + fn generics(self) -> [Quoted] {} + + /// Returns (name, type) pairs of each field in this type. Each type is as-is + /// with any generic arguments unchanged. + #[builtin(type_def_fields)] + fn fields(self) -> [(Quoted, Quoted)] {} +} diff --git a/noir_stdlib/src/option.nr b/noir_stdlib/src/option.nr index c94a1cf836e..df020e75615 100644 --- a/noir_stdlib/src/option.nr +++ b/noir_stdlib/src/option.nr @@ -57,7 +57,7 @@ impl Option { } /// Asserts `self.is_some()` with a provided custom message and returns the contained `Some` value - fn expect(self, message: fmtstr) -> T { + fn expect(self, message: fmtstr) -> T { assert(self.is_some(), message); self._value } diff --git a/noir_stdlib/src/schnorr.nr b/noir_stdlib/src/schnorr.nr index c63915061cb..24ca514025c 100644 --- a/noir_stdlib/src/schnorr.nr +++ b/noir_stdlib/src/schnorr.nr @@ -1,6 +1,6 @@ #[foreign(schnorr_verify)] // docs:start:schnorr_verify -pub fn verify_signature( +pub fn verify_signature( public_key_x: Field, public_key_y: Field, signature: [u8; 64], diff --git a/noir_stdlib/src/sha256.nr b/noir_stdlib/src/sha256.nr index 6057876951d..96ea8bb82c3 100644 --- a/noir_stdlib/src/sha256.nr +++ b/noir_stdlib/src/sha256.nr @@ -16,8 +16,8 @@ fn msg_u8_to_u32(msg: [u8; 64]) -> [u32; 16] { msg32 } // SHA-256 hash function -pub fn digest(msg: [u8; N]) -> [u8; 32] { - sha256_var(msg, N) +pub fn digest(msg: [u8; N]) -> [u8; 32] { + sha256_var(msg, N as u64) } fn hash_final_block(msg_block: [u8; 64], mut state: [u32; 8]) -> [u8; 32] { @@ -39,12 +39,12 @@ fn hash_final_block(msg_block: [u8; 64], mut state: [u32; 8]) -> [u8; 32] { // Variable size SHA-256 hash #[no_predicates] -pub fn sha256_var(msg: [u8; N], message_size: u64) -> [u8; 32] { +pub fn sha256_var(msg: [u8; N], message_size: u64) -> [u8; 32] { let mut msg_block: [u8; 64] = [0; 64]; let mut h: [u32; 8] = [1779033703, 3144134277, 1013904242, 2773480762, 1359893119, 2600822924, 528734635, 1541459225]; // Intermediate hash, starting with the canonical initial value let mut i: u64 = 0; // Message byte pointer for k in 0..N { - if k < message_size { + if k as u64 < message_size { // Populate msg_block msg_block[i] = msg[k]; i = i + 1; diff --git a/noir_stdlib/src/sha512.nr b/noir_stdlib/src/sha512.nr index 4e46840ebb7..993f328341f 100644 --- a/noir_stdlib/src/sha512.nr +++ b/noir_stdlib/src/sha512.nr @@ -88,7 +88,7 @@ fn msg_u8_to_u64(msg: [u8; 128]) -> [u64; 16] { } // SHA-512 hash function #[no_predicates] -pub fn digest(msg: [u8; N]) -> [u8; 64] { +pub fn digest(msg: [u8; N]) -> [u8; 64] { let mut msg_block: [u8; 128] = [0; 128]; // noir-fmt:ignore let mut h: [u64; 8] = [7640891576956012808, 13503953896175478587, 4354685564936845355, 11912009170470909681, 5840696475078001361, 11170449401992604703, 2270897969802886507, 6620516959819538809]; // Intermediate hash, starting with the canonical initial value diff --git a/noir_stdlib/src/slice.nr b/noir_stdlib/src/slice.nr index bf05ae0cf64..1a40abcf704 100644 --- a/noir_stdlib/src/slice.nr +++ b/noir_stdlib/src/slice.nr @@ -44,7 +44,7 @@ impl [T] { self } - pub fn as_array(self) -> [T; N] { + pub fn as_array(self) -> [T; N] { assert(self.len() == N); let mut array = [crate::unsafe::zeroed(); N]; @@ -53,4 +53,53 @@ impl [T] { } array } + + // Apply a function to each element of the slice, returning a new slice + // containing the mapped elements. + pub fn map(self, f: fn[Env](T) -> U) -> [U] { + let mut ret = &[]; + for elem in self { + ret = ret.push_back(f(elem)); + } + ret + } + + // Apply a function to each element of the slice and an accumulator value, + // returning the final accumulated value. This function is also sometimes + // called `foldl`, `fold_left`, `reduce`, or `inject`. + pub fn fold(self, mut accumulator: U, f: fn[Env](U, T) -> U) -> U { + for elem in self { + accumulator = f(accumulator, elem); + } + accumulator + } + + // Apply a function to each element of the slice and an accumulator value, + // returning the final accumulated value. Unlike fold, reduce uses the first + // element of the given slice as its starting accumulator value. + pub fn reduce(self, f: fn[Env](T, T) -> T) -> T { + let mut accumulator = self[0]; + for i in 1..self.len() { + accumulator = f(accumulator, self[i]); + } + accumulator + } + + // Returns true if all elements in the slice satisfy the predicate + pub fn all(self, predicate: fn[Env](T) -> bool) -> bool { + let mut ret = true; + for elem in self { + ret &= predicate(elem); + } + ret + } + + // Returns true if any element in the slice satisfies the predicate + pub fn any(self, predicate: fn[Env](T) -> bool) -> bool { + let mut ret = false; + for elem in self { + ret |= predicate(elem); + } + ret + } } diff --git a/noir_stdlib/src/string.nr b/noir_stdlib/src/string.nr index 12b5a1e75ec..5f8f3de775d 100644 --- a/noir_stdlib/src/string.nr +++ b/noir_stdlib/src/string.nr @@ -1,5 +1,5 @@ use crate::collections::vec::Vec; -impl str { +impl str { /// Converts the given string into a byte array #[builtin(str_as_bytes)] pub fn as_bytes(self) -> [u8; N] {} diff --git a/noir_stdlib/src/test.nr b/noir_stdlib/src/test.nr index e6a7e03fefc..f8db6079193 100644 --- a/noir_stdlib/src/test.nr +++ b/noir_stdlib/src/test.nr @@ -1,5 +1,5 @@ #[oracle(create_mock)] -unconstrained fn create_mock_oracle(name: str) -> Field {} +unconstrained fn create_mock_oracle(name: str) -> Field {} #[oracle(set_mock_params)] unconstrained fn set_mock_params_oracle

(id: Field, params: P) {} @@ -21,7 +21,7 @@ struct OracleMock { } impl OracleMock { - unconstrained pub fn mock(name: str) -> Self { + unconstrained pub fn mock(name: str) -> Self { Self { id: create_mock_oracle(name) } } diff --git a/noir_stdlib/src/uint128.nr b/noir_stdlib/src/uint128.nr index 829ab09ee1e..e99818bafa0 100644 --- a/noir_stdlib/src/uint128.nr +++ b/noir_stdlib/src/uint128.nr @@ -66,7 +66,7 @@ impl U128 { bytes } - pub fn from_hex(hex: str) -> U128 { + pub fn from_hex(hex: str) -> U128 { let N = N as u32; let bytes = hex.as_bytes(); // string must starts with "0x" diff --git a/scripts/install_bb.sh b/scripts/install_bb.sh index c3ed476200a..b0d55b6ff1d 100755 --- a/scripts/install_bb.sh +++ b/scripts/install_bb.sh @@ -1,6 +1,6 @@ #!/bin/bash -VERSION="0.41.0" +VERSION="0.43.0" BBUP_PATH=~/.bb/bbup diff --git a/test_programs/benchmarks/bench_eddsa_poseidon/src/main.nr b/test_programs/benchmarks/bench_eddsa_poseidon/src/main.nr index cb853e48c30..2d38f5b1063 100644 --- a/test_programs/benchmarks/bench_eddsa_poseidon/src/main.nr +++ b/test_programs/benchmarks/bench_eddsa_poseidon/src/main.nr @@ -9,4 +9,4 @@ fn main( s: Field ) -> pub bool { eddsa_poseidon_verify(pub_key_x, pub_key_y, s, r8_x, r8_y, msg) -} +} \ No newline at end of file diff --git a/test_programs/benchmarks/bench_poseidon_hash_100/src/main.nr b/test_programs/benchmarks/bench_poseidon_hash_100/src/main.nr index 1c9bbfe61bf..5fc9e313179 100644 --- a/test_programs/benchmarks/bench_poseidon_hash_100/src/main.nr +++ b/test_programs/benchmarks/bench_poseidon_hash_100/src/main.nr @@ -9,4 +9,4 @@ fn main(input: [[Field; 2]; SIZE]) -> pub [Field; SIZE] { } results -} +} \ No newline at end of file diff --git a/test_programs/benchmarks/bench_poseidon_hash_30/src/main.nr b/test_programs/benchmarks/bench_poseidon_hash_30/src/main.nr index 3edb47e9f72..3e319d2b025 100644 --- a/test_programs/benchmarks/bench_poseidon_hash_30/src/main.nr +++ b/test_programs/benchmarks/bench_poseidon_hash_30/src/main.nr @@ -9,4 +9,4 @@ fn main(input: [[Field; 2]; SIZE]) -> pub [Field; SIZE] { } results -} +} \ No newline at end of file diff --git a/test_programs/benchmarks/bench_sha256/src/main.nr b/test_programs/benchmarks/bench_sha256/src/main.nr index fc873fb4afb..c94d359239d 100644 --- a/test_programs/benchmarks/bench_sha256/src/main.nr +++ b/test_programs/benchmarks/bench_sha256/src/main.nr @@ -1,4 +1,3 @@ -use dep::std; fn main(input: [u8; 2]) -> pub [u8; 32] { std::hash::sha256(input) diff --git a/test_programs/benchmarks/bench_sha256_100/src/main.nr b/test_programs/benchmarks/bench_sha256_100/src/main.nr index 6df856a83fc..48b7fbac93c 100644 --- a/test_programs/benchmarks/bench_sha256_100/src/main.nr +++ b/test_programs/benchmarks/bench_sha256_100/src/main.nr @@ -1,3 +1,4 @@ + global SIZE = 100; fn main(input: [[u8; 2]; SIZE]) -> pub [[u8; 32]; SIZE] { @@ -7,4 +8,4 @@ fn main(input: [[u8; 2]; SIZE]) -> pub [[u8; 32]; SIZE] { } results -} +} \ No newline at end of file diff --git a/test_programs/benchmarks/bench_sha256_30/src/main.nr b/test_programs/benchmarks/bench_sha256_30/src/main.nr index 220c1cfbbed..37c742f9667 100644 --- a/test_programs/benchmarks/bench_sha256_30/src/main.nr +++ b/test_programs/benchmarks/bench_sha256_30/src/main.nr @@ -1,3 +1,4 @@ + global SIZE = 30; fn main(input: [[u8; 2]; SIZE]) -> pub [[u8; 32]; SIZE] { @@ -7,4 +8,4 @@ fn main(input: [[u8; 2]; SIZE]) -> pub [[u8; 32]; SIZE] { } results -} +} \ No newline at end of file diff --git a/test_programs/compile_failure/assert_constant_dynamic_array/Nargo.toml b/test_programs/compile_failure/assert_constant_dynamic_array/Nargo.toml new file mode 100644 index 00000000000..6171770b62b --- /dev/null +++ b/test_programs/compile_failure/assert_constant_dynamic_array/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "assert_constant_dynamic_array" +type = "bin" +authors = [""] +compiler_version = ">=0.31.0" + +[dependencies] \ No newline at end of file diff --git a/test_programs/compile_failure/assert_constant_dynamic_array/src/main.nr b/test_programs/compile_failure/assert_constant_dynamic_array/src/main.nr new file mode 100644 index 00000000000..43da3ef0eaa --- /dev/null +++ b/test_programs/compile_failure/assert_constant_dynamic_array/src/main.nr @@ -0,0 +1,5 @@ +fn main( + dynamic_one: Field, // == 1 +) { + assert_constant([dynamic_one]); +} diff --git a/test_programs/compile_failure/assert_constant_dynamic_plus/Nargo.toml b/test_programs/compile_failure/assert_constant_dynamic_plus/Nargo.toml new file mode 100644 index 00000000000..e5583e53126 --- /dev/null +++ b/test_programs/compile_failure/assert_constant_dynamic_plus/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "assert_constant_dynamic_plus" +type = "bin" +authors = [""] +compiler_version = ">=0.31.0" + +[dependencies] \ No newline at end of file diff --git a/test_programs/compile_failure/assert_constant_dynamic_plus/src/main.nr b/test_programs/compile_failure/assert_constant_dynamic_plus/src/main.nr new file mode 100644 index 00000000000..f8a377092a2 --- /dev/null +++ b/test_programs/compile_failure/assert_constant_dynamic_plus/src/main.nr @@ -0,0 +1,5 @@ +fn main( + dynamic_one: Field, // == 1 +) { + assert_constant(dynamic_one + 1 == 3); +} diff --git a/test_programs/compile_failure/assert_constant_dynamic_slice/Nargo.toml b/test_programs/compile_failure/assert_constant_dynamic_slice/Nargo.toml new file mode 100644 index 00000000000..d1d068a79b8 --- /dev/null +++ b/test_programs/compile_failure/assert_constant_dynamic_slice/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "assert_constant_dynamic_slice" +type = "bin" +authors = [""] +compiler_version = ">=0.31.0" + +[dependencies] \ No newline at end of file diff --git a/test_programs/compile_failure/assert_constant_dynamic_slice/src/main.nr b/test_programs/compile_failure/assert_constant_dynamic_slice/src/main.nr new file mode 100644 index 00000000000..f07002d7d4c --- /dev/null +++ b/test_programs/compile_failure/assert_constant_dynamic_slice/src/main.nr @@ -0,0 +1,5 @@ +fn main( + dynamic_one: Field, // == 1 +) { + assert_constant(&[dynamic_one]); +} diff --git a/test_programs/compile_failure/assert_constant_dynamic_struct_array/Nargo.toml b/test_programs/compile_failure/assert_constant_dynamic_struct_array/Nargo.toml new file mode 100644 index 00000000000..18781f4d57d --- /dev/null +++ b/test_programs/compile_failure/assert_constant_dynamic_struct_array/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "assert_constant_dynamic_struct_array" +type = "bin" +authors = [""] +compiler_version = ">=0.31.0" + +[dependencies] \ No newline at end of file diff --git a/test_programs/compile_failure/assert_constant_dynamic_struct_array/src/main.nr b/test_programs/compile_failure/assert_constant_dynamic_struct_array/src/main.nr new file mode 100644 index 00000000000..b9f4dceafc0 --- /dev/null +++ b/test_programs/compile_failure/assert_constant_dynamic_struct_array/src/main.nr @@ -0,0 +1,12 @@ +struct Foo { + field: Field, + array: [Field; 3], + slice: [Field], +} + +fn main( + dynamic_one: Field, // == 1 +) { + let foo_dynamic_array = Foo { field: 0, array: [dynamic_one, 2, 3], slice: &[] }; + assert_constant(foo_dynamic_array); +} diff --git a/test_programs/compile_failure/assert_constant_dynamic_struct_field/Nargo.toml b/test_programs/compile_failure/assert_constant_dynamic_struct_field/Nargo.toml new file mode 100644 index 00000000000..173ea44101a --- /dev/null +++ b/test_programs/compile_failure/assert_constant_dynamic_struct_field/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "assert_constant_dynamic_struct_field" +type = "bin" +authors = [""] +compiler_version = ">=0.31.0" + +[dependencies] \ No newline at end of file diff --git a/test_programs/compile_failure/assert_constant_dynamic_struct_field/src/main.nr b/test_programs/compile_failure/assert_constant_dynamic_struct_field/src/main.nr new file mode 100644 index 00000000000..e7986c93b6b --- /dev/null +++ b/test_programs/compile_failure/assert_constant_dynamic_struct_field/src/main.nr @@ -0,0 +1,12 @@ +struct Foo { + field: Field, + array: [Field; 3], + slice: [Field], +} + +fn main( + dynamic_one: Field, // == 1 +) { + let foo_dynamic = Foo { field: dynamic_one, array: [1, 2, 3], slice: &[] }; + assert_constant(foo_dynamic); +} diff --git a/test_programs/compile_failure/assert_constant_dynamic_struct_slice/Nargo.toml b/test_programs/compile_failure/assert_constant_dynamic_struct_slice/Nargo.toml new file mode 100644 index 00000000000..426f4826a0b --- /dev/null +++ b/test_programs/compile_failure/assert_constant_dynamic_struct_slice/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "assert_constant_dynamic_struct_slice" +type = "bin" +authors = [""] +compiler_version = ">=0.31.0" + +[dependencies] \ No newline at end of file diff --git a/test_programs/compile_failure/assert_constant_dynamic_struct_slice/src/main.nr b/test_programs/compile_failure/assert_constant_dynamic_struct_slice/src/main.nr new file mode 100644 index 00000000000..c775b563928 --- /dev/null +++ b/test_programs/compile_failure/assert_constant_dynamic_struct_slice/src/main.nr @@ -0,0 +1,12 @@ +struct Foo { + field: Field, + array: [Field; 3], + slice: [Field], +} + +fn main( + dynamic_one: Field, // == 1 +) { + let foo_dynamic_slice = Foo { field: 0, array: [1, 2, 3], slice: &[dynamic_one] }; + assert_constant(foo_dynamic_slice); +} diff --git a/test_programs/compile_failure/assert_constant_dynamic_tuple/Nargo.toml b/test_programs/compile_failure/assert_constant_dynamic_tuple/Nargo.toml new file mode 100644 index 00000000000..de7b2031300 --- /dev/null +++ b/test_programs/compile_failure/assert_constant_dynamic_tuple/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "assert_constant_dynamic_tuple" +type = "bin" +authors = [""] +compiler_version = ">=0.31.0" + +[dependencies] \ No newline at end of file diff --git a/test_programs/compile_failure/assert_constant_dynamic_tuple/src/main.nr b/test_programs/compile_failure/assert_constant_dynamic_tuple/src/main.nr new file mode 100644 index 00000000000..579a5a0991f --- /dev/null +++ b/test_programs/compile_failure/assert_constant_dynamic_tuple/src/main.nr @@ -0,0 +1,5 @@ +fn main( + dynamic_one: Field, // == 1 +) { + assert_constant((dynamic_one, 2)); +} diff --git a/test_programs/compile_failure/assert_constant_false/Nargo.toml b/test_programs/compile_failure/assert_constant_false/Nargo.toml new file mode 100644 index 00000000000..cb8d59f4293 --- /dev/null +++ b/test_programs/compile_failure/assert_constant_false/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "assert_constant_false" +type = "bin" +authors = [""] +compiler_version = ">=0.31.0" + +[dependencies] \ No newline at end of file diff --git a/test_programs/compile_failure/assert_constant_false/src/main.nr b/test_programs/compile_failure/assert_constant_false/src/main.nr new file mode 100644 index 00000000000..f4e98cfec37 --- /dev/null +++ b/test_programs/compile_failure/assert_constant_false/src/main.nr @@ -0,0 +1,3 @@ +fn main() { + std::static_assert(false, ""); +} diff --git a/test_programs/compile_failure/builtin_function_declaration/Nargo.toml b/test_programs/compile_failure/builtin_function_declaration/Nargo.toml index 3835292a6ba..80312a7aec1 100644 --- a/test_programs/compile_failure/builtin_function_declaration/Nargo.toml +++ b/test_programs/compile_failure/builtin_function_declaration/Nargo.toml @@ -2,6 +2,6 @@ name = "builtin_function_declaration" type = "bin" authors = [""] -compiler_version = ">=0.23.0" +compiler_version = ">=0.31.0" -[dependencies] +[dependencies] \ No newline at end of file diff --git a/test_programs/compile_failure/builtin_function_declaration/src/main.nr b/test_programs/compile_failure/builtin_function_declaration/src/main.nr index ed376557371..473b5405691 100644 --- a/test_programs/compile_failure/builtin_function_declaration/src/main.nr +++ b/test_programs/compile_failure/builtin_function_declaration/src/main.nr @@ -7,4 +7,4 @@ fn to_le_bits(_x: Field, _bit_size: u32) -> [u1] {} fn main(x: Field) -> pub u1 { let bits = to_le_bits(x, 100); bits[0] -} +} \ No newline at end of file diff --git a/test_programs/compile_failure/negate_unsigned/src/main.nr b/test_programs/compile_failure/negate_unsigned/src/main.nr index 4d3c5abe5a4..af4802a4ce6 100644 --- a/test_programs/compile_failure/negate_unsigned/src/main.nr +++ b/test_programs/compile_failure/negate_unsigned/src/main.nr @@ -1,3 +1,4 @@ + fn main() { let var = -1 as u8; std::println(var); diff --git a/test_programs/compile_failure/non_comptime_local_fn_call/Nargo.toml b/test_programs/compile_failure/non_comptime_local_fn_call/Nargo.toml new file mode 100644 index 00000000000..4ea2b75aa85 --- /dev/null +++ b/test_programs/compile_failure/non_comptime_local_fn_call/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "non_comptime_local_fn_call" +type = "bin" +authors = [""] +compiler_version = ">=0.23.0" + +[dependencies] \ No newline at end of file diff --git a/test_programs/compile_failure/non_comptime_local_fn_call/src/main.nr b/test_programs/compile_failure/non_comptime_local_fn_call/src/main.nr new file mode 100644 index 00000000000..7cbf4213425 --- /dev/null +++ b/test_programs/compile_failure/non_comptime_local_fn_call/src/main.nr @@ -0,0 +1,9 @@ +fn main() { + comptime { + let _a = id(3); + } +} + +fn id(x: Field) -> Field { + x +} \ No newline at end of file diff --git a/test_programs/compile_failure/orphaned_trait_impl/src/main.nr b/test_programs/compile_failure/orphaned_trait_impl/src/main.nr index dd04aa454b2..dfd88d8f074 100644 --- a/test_programs/compile_failure/orphaned_trait_impl/src/main.nr +++ b/test_programs/compile_failure/orphaned_trait_impl/src/main.nr @@ -1,4 +1,4 @@ -impl crate1::MyTrait for crate2::MyStruct { +impl dep::crate1::MyTrait for dep::crate2::MyStruct { } fn main(x: Field, y: pub Field) { diff --git a/test_programs/compile_failure/regression_5008/Nargo.toml b/test_programs/compile_failure/regression_5008/Nargo.toml new file mode 100644 index 00000000000..920c00660cf --- /dev/null +++ b/test_programs/compile_failure/regression_5008/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "regression_5008" +type = "bin" +authors = [""] +compiler_version = ">=0.28.0" + +[dependencies] diff --git a/test_programs/compile_failure/regression_5008/src/main.nr b/test_programs/compile_failure/regression_5008/src/main.nr new file mode 100644 index 00000000000..6d9645ee6eb --- /dev/null +++ b/test_programs/compile_failure/regression_5008/src/main.nr @@ -0,0 +1,17 @@ +struct Bar { + value: Field, +} + +struct Foo{ + bar: &mut Bar, +} + +impl Foo { + unconstrained fn crash_fn(self) {} +} + +fn main() { + let foo = Foo { bar: &mut Bar { value: 0 } }; + + foo.crash_fn(); +} diff --git a/test_programs/compile_failure/restricted_bit_sizes/src/main.nr b/test_programs/compile_failure/restricted_bit_sizes/src/main.nr index a3fea13cc3a..4298c2052a7 100644 --- a/test_programs/compile_failure/restricted_bit_sizes/src/main.nr +++ b/test_programs/compile_failure/restricted_bit_sizes/src/main.nr @@ -1,5 +1,3 @@ -use std::assert_constant; - fn main() -> pub u63 { 5 } diff --git a/test_programs/compile_failure/static_assert_dynamic_array_len/Nargo.toml b/test_programs/compile_failure/static_assert_dynamic_array_len/Nargo.toml new file mode 100644 index 00000000000..1c6c13ce225 --- /dev/null +++ b/test_programs/compile_failure/static_assert_dynamic_array_len/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "static_assert_dynamic_array_len" +type = "bin" +authors = [""] +compiler_version = ">=0.31.0" + +[dependencies] \ No newline at end of file diff --git a/test_programs/compile_failure/static_assert_dynamic_array_len/src/main.nr b/test_programs/compile_failure/static_assert_dynamic_array_len/src/main.nr new file mode 100644 index 00000000000..9e4665f8ad8 --- /dev/null +++ b/test_programs/compile_failure/static_assert_dynamic_array_len/src/main.nr @@ -0,0 +1,5 @@ +fn main( + dynamic_one: Field, // == 1 +) { + std::static_assert([1, 2, dynamic_one].len() == 4, ""); +} diff --git a/test_programs/compile_failure/static_assert_dynamic_slice/Nargo.toml b/test_programs/compile_failure/static_assert_dynamic_slice/Nargo.toml new file mode 100644 index 00000000000..45e39ae24a8 --- /dev/null +++ b/test_programs/compile_failure/static_assert_dynamic_slice/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "static_assert_dynamic_slice" +type = "bin" +authors = [""] +compiler_version = ">=0.31.0" + +[dependencies] \ No newline at end of file diff --git a/test_programs/compile_failure/static_assert_dynamic_slice/src/main.nr b/test_programs/compile_failure/static_assert_dynamic_slice/src/main.nr new file mode 100644 index 00000000000..6621b8a1fb3 --- /dev/null +++ b/test_programs/compile_failure/static_assert_dynamic_slice/src/main.nr @@ -0,0 +1,15 @@ +fn main( + dynamic_one: Field, // == 1 + dynamic_two: Field, // == 2 +) { + // length unknown at compile time + let mut dynamic_built_slice_pair = &[]; + if dynamic_one == 1 { + dynamic_built_slice_pair = dynamic_built_slice_pair.push_back(dynamic_one); + } + if dynamic_two == 2 { + dynamic_built_slice_pair = dynamic_built_slice_pair.push_back(dynamic_two); + } + + std::static_assert(dynamic_built_slice_pair.len() == 3, ""); +} diff --git a/test_programs/compile_failure/static_assert_plus/Nargo.toml b/test_programs/compile_failure/static_assert_plus/Nargo.toml new file mode 100644 index 00000000000..fdb90c0dc1c --- /dev/null +++ b/test_programs/compile_failure/static_assert_plus/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "static_assert_plus" +type = "bin" +authors = [""] +compiler_version = ">=0.31.0" + +[dependencies] \ No newline at end of file diff --git a/test_programs/compile_failure/static_assert_plus/src/main.nr b/test_programs/compile_failure/static_assert_plus/src/main.nr new file mode 100644 index 00000000000..2241743ddb0 --- /dev/null +++ b/test_programs/compile_failure/static_assert_plus/src/main.nr @@ -0,0 +1,7 @@ +fn main() { + let x = 2; + let y = 3; + let xy = x + y; + + std::static_assert(xy == 6, "2 + 3 != 6"); +} diff --git a/test_programs/compile_failure/turbofish_generic_count/src/main.nr b/test_programs/compile_failure/turbofish_generic_count/src/main.nr index 4091b2f0581..a5f46adb6a5 100644 --- a/test_programs/compile_failure/turbofish_generic_count/src/main.nr +++ b/test_programs/compile_failure/turbofish_generic_count/src/main.nr @@ -1,3 +1,4 @@ + struct Bar { one: Field, two: Field, diff --git a/test_programs/compile_success_contract/abi_attribute/Nargo.toml b/test_programs/compile_success_contract/abi_attribute/Nargo.toml new file mode 100644 index 00000000000..9fcd61c235e --- /dev/null +++ b/test_programs/compile_success_contract/abi_attribute/Nargo.toml @@ -0,0 +1,6 @@ +[package] +name = "abi_attribute" +type = "contract" +authors = [""] + +[dependencies] \ No newline at end of file diff --git a/test_programs/compile_success_contract/abi_attribute/src/main.nr b/test_programs/compile_success_contract/abi_attribute/src/main.nr new file mode 100644 index 00000000000..0c376b54686 --- /dev/null +++ b/test_programs/compile_success_contract/abi_attribute/src/main.nr @@ -0,0 +1,9 @@ +contract Foo { + #[abi(foo)] + global foo: Field = 42; + + #[abi(bar)] + struct Bar { + inner: Field + } +} \ No newline at end of file diff --git a/test_programs/compile_success_contract/recursive_method/Nargo.toml b/test_programs/compile_success_contract/recursive_method/Nargo.toml new file mode 100644 index 00000000000..8142e5b3278 --- /dev/null +++ b/test_programs/compile_success_contract/recursive_method/Nargo.toml @@ -0,0 +1,6 @@ +[package] +name = "recursive_method" +type = "contract" +authors = [""] + +[dependencies] diff --git a/test_programs/compile_success_contract/recursive_method/src/main.nr b/test_programs/compile_success_contract/recursive_method/src/main.nr new file mode 100644 index 00000000000..6fd4bf3338d --- /dev/null +++ b/test_programs/compile_success_contract/recursive_method/src/main.nr @@ -0,0 +1,6 @@ +contract Foo { + #[recursive] + fn contract_entrypoint() -> pub Field { + 1 + } +} diff --git a/test_programs/compile_success_empty/assert_constant/Nargo.toml b/test_programs/compile_success_empty/assert_constant/Nargo.toml new file mode 100644 index 00000000000..f18d4828fdf --- /dev/null +++ b/test_programs/compile_success_empty/assert_constant/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "assert_constant" +type = "bin" +authors = [""] +compiler_version = ">=0.31.0" + +[dependencies] \ No newline at end of file diff --git a/test_programs/compile_success_empty/assert_constant/src/main.nr b/test_programs/compile_success_empty/assert_constant/src/main.nr new file mode 100644 index 00000000000..6910a2d17b2 --- /dev/null +++ b/test_programs/compile_success_empty/assert_constant/src/main.nr @@ -0,0 +1,59 @@ +use std::static_assert; + +global GLOBAL_ONE = 1; +global GLOBAL_TWO = 2; +global GLOBAL_THREE = GLOBAL_ONE + GLOBAL_TWO; + +// contents known at compile time +// length known at compile time +global GLOBAL_ARRAY_PAIR = [GLOBAL_ONE, GLOBAL_TWO]; +global GLOBAL_SLICE_PAIR = &[GLOBAL_ONE, GLOBAL_TWO]; + +struct Foo { + field: Field, + array: [Field; 3], + slice: [Field], +} + +fn main( + dynamic_one: Field, // == 1 + dynamic_two: Field // == 2 +) { + // contents unknown at compile time + // length known at compile time + let dynamic_array_pair = [dynamic_one, dynamic_two]; + let dynamic_slice_pair = &[dynamic_one, dynamic_two]; + + assert_constant(true); + assert_constant(false); + + assert_constant(2 == 2); + assert_constant(1 + 1 == 2); + + // assert_constant doesn't check for true + assert_constant(1 + 1 == 3); + + let local_one = 1; + let local_two = 2; + let local_three = local_one + local_two; + let local_array_pair = [local_one, local_two]; + let local_slice_pair = &[local_one, local_two]; + + assert_constant(local_one); + assert_constant(local_three); + assert_constant(local_array_pair); + assert_constant(local_slice_pair); + + assert_constant(GLOBAL_ONE); + assert_constant(GLOBAL_THREE); + assert_constant(GLOBAL_ARRAY_PAIR); + assert_constant(GLOBAL_SLICE_PAIR); + + assert_constant([1, 2, dynamic_one].len() == 4); + + static_assert(dynamic_array_pair.len() == 2, ""); + static_assert(dynamic_slice_pair.len() == 2, ""); + + let foo = Foo { field: 0, array: [1, 2, 3], slice: &[] }; + assert_constant(foo); +} diff --git a/test_programs/compile_success_empty/comptime_type_definition/Nargo.toml b/test_programs/compile_success_empty/comptime_type_definition/Nargo.toml new file mode 100644 index 00000000000..099545a9e71 --- /dev/null +++ b/test_programs/compile_success_empty/comptime_type_definition/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "comptime_type_definition" +type = "bin" +authors = [""] +compiler_version = ">=0.31.0" + +[dependencies] \ No newline at end of file diff --git a/test_programs/compile_success_empty/comptime_type_definition/src/main.nr b/test_programs/compile_success_empty/comptime_type_definition/src/main.nr new file mode 100644 index 00000000000..025f6a0b0bf --- /dev/null +++ b/test_programs/compile_success_empty/comptime_type_definition/src/main.nr @@ -0,0 +1,13 @@ +fn main() {} + +#[my_comptime_fn] +struct MyType { + field1: [A; 10], + field2: (B, C), +} + +comptime fn my_comptime_fn(typ: TypeDefinition) { + let _ = typ.as_type(); + assert_eq(typ.generics().len(), 3); + assert_eq(typ.fields().len(), 2); +} diff --git a/test_programs/execution_success/regression_5202/Nargo.toml b/test_programs/compile_success_empty/derive_impl/Nargo.toml similarity index 59% rename from test_programs/execution_success/regression_5202/Nargo.toml rename to test_programs/compile_success_empty/derive_impl/Nargo.toml index da3da06a306..26a6020a6b1 100644 --- a/test_programs/execution_success/regression_5202/Nargo.toml +++ b/test_programs/compile_success_empty/derive_impl/Nargo.toml @@ -1,8 +1,7 @@ [package] -name = "regression_5202" +name = "derive_impl" type = "bin" authors = [""] compiler_version = ">=0.30.0" [dependencies] -fraction = { path = "fraction" } diff --git a/test_programs/compile_success_empty/derive_impl/src/main.nr b/test_programs/compile_success_empty/derive_impl/src/main.nr new file mode 100644 index 00000000000..9636e4c7383 --- /dev/null +++ b/test_programs/compile_success_empty/derive_impl/src/main.nr @@ -0,0 +1,49 @@ +comptime fn derive_default(typ: TypeDefinition) -> Quoted { + let generics: [Quoted] = typ.generics(); + assert_eq( + generics.len(), 0, "derive_default: Deriving Default on generic types is currently unimplemented" + ); + + let type_name = typ.as_type(); + let fields = typ.fields(); + + let fields = join(make_field_exprs(fields)); + + quote { + impl Default for $type_name { + fn default() -> Self { + Self { $fields } + } + } + } +} + +#[derive_default] +struct Foo { + x: Field, + y: Bar, +} + +#[derive_default] +struct Bar {} + +comptime fn make_field_exprs(fields: [(Quoted, Quoted)]) -> [Quoted] { + let mut result = &[]; + for my_field in fields { + let name = my_field.0; + result = result.push_back(quote { $name: Default::default(), }); + } + result +} + +comptime fn join(slice: [Quoted]) -> Quoted { + let mut result = quote {}; + for elem in slice { + result = quote { $result $elem }; + } + result +} + +fn main() { + let _foo: Foo = Default::default(); +} diff --git a/test_programs/compile_success_empty/impl_with_where_clause/Nargo.toml b/test_programs/compile_success_empty/impl_where_clause/Nargo.toml similarity index 62% rename from test_programs/compile_success_empty/impl_with_where_clause/Nargo.toml rename to test_programs/compile_success_empty/impl_where_clause/Nargo.toml index ef9bdce2640..58e774846cf 100644 --- a/test_programs/compile_success_empty/impl_with_where_clause/Nargo.toml +++ b/test_programs/compile_success_empty/impl_where_clause/Nargo.toml @@ -1,5 +1,5 @@ [package] -name = "impl_with_where_clause" +name = "impl_where_clause" type = "bin" authors = [""] diff --git a/test_programs/compile_success_empty/impl_where_clause/src/main.nr b/test_programs/compile_success_empty/impl_where_clause/src/main.nr new file mode 100644 index 00000000000..2f3223efaae --- /dev/null +++ b/test_programs/compile_success_empty/impl_where_clause/src/main.nr @@ -0,0 +1,34 @@ +struct MyStruct { + a: u32, + b: T, +} + +struct InnerStruct { + a: Field, + b: Field, +} + +trait MyEq { + fn my_eq(self, other: Self) -> bool; +} + +impl MyEq for InnerStruct { + fn my_eq(self, other: InnerStruct) -> bool { + (self.a == other.a) & (self.b == other.b) + } +} + +impl MyStruct where T: MyEq { + fn my_eq(self, other: Self) -> bool { + (self.a == other.a) & self.b.my_eq(other.b) + } +} + +fn main() { + let inner = InnerStruct { a: 1, b: 2 }; + let my_struct = MyStruct { a: 5, b: inner }; + assert(my_struct.my_eq(my_struct)); + + let mut my_struct_new = MyStruct { a: 5, b: InnerStruct { a: 10, b: 15 } }; + assert(my_struct_new.my_eq(my_struct_new)); +} diff --git a/test_programs/compile_success_empty/intrinsic_die/src/main.nr b/test_programs/compile_success_empty/intrinsic_die/src/main.nr index c6e269c155d..17aaf02c283 100644 --- a/test_programs/compile_success_empty/intrinsic_die/src/main.nr +++ b/test_programs/compile_success_empty/intrinsic_die/src/main.nr @@ -1,6 +1,5 @@ // This test checks that we perform dead-instruction-elimination on intrinsic functions. fn main(x: Field) { - let hash = std::hash::pedersen_commitment([x]); let g1_x = 0x0000000000000000000000000000000000000000000000000000000000000001; let g1_y = 0x0000000000000002cf135e7506a45d632d270d45f1181294833fc48d823f272c; let g1 = std::embedded_curve_ops::EmbeddedCurvePoint { x: g1_x, y: g1_y, is_infinite: false }; diff --git a/test_programs/compile_success_empty/macros/src/main.nr b/test_programs/compile_success_empty/macros/src/main.nr index f466226d575..587c2c4c077 100644 --- a/test_programs/compile_success_empty/macros/src/main.nr +++ b/test_programs/compile_success_empty/macros/src/main.nr @@ -1,4 +1,4 @@ -comptime fn my_macro(x: Field, y: Field) -> Expr { +comptime fn my_macro(x: Field, y: Field) -> Quoted { // Current version of macros in Noir are not hygienic // so we can quote a and b here and expect them to resolve // to the a and b in main at the callsite of my_macro. diff --git a/test_programs/compile_success_empty/numeric_generics_explicit/Nargo.toml b/test_programs/compile_success_empty/numeric_generics_explicit/Nargo.toml new file mode 100644 index 00000000000..980e5db588a --- /dev/null +++ b/test_programs/compile_success_empty/numeric_generics_explicit/Nargo.toml @@ -0,0 +1,6 @@ +[package] +name = "numeric_generics_explicit" +type = "bin" +authors = [""] + +[dependencies] diff --git a/test_programs/compile_success_empty/numeric_generics_explicit/src/main.nr b/test_programs/compile_success_empty/numeric_generics_explicit/src/main.nr new file mode 100644 index 00000000000..7c4f7761ff6 --- /dev/null +++ b/test_programs/compile_success_empty/numeric_generics_explicit/src/main.nr @@ -0,0 +1,111 @@ +// Regression that a global of the same name does not trigger a duplicate definition error +global N = 1000; + +fn main() { + let a = id([1, 2]); + let b = id([1, 2, 3]); + + let itWorks1 = MyStruct { data: a }; + assert(itWorks1.data[1] == 2); + let itWorks2 = MyStruct { data: b }; + assert(itWorks2.data[1] == 2); + + let c = [1, 2]; + let itAlsoWorks = MyStruct { data: c }; + assert(itAlsoWorks.data[1] == 2); + + assert(foo(itWorks2).data[0] == itWorks2.data[0] + 1); + + double_numeric_generics_test(); + + let my_type = PublicStorage::read::(); + assert(my_type.a == 1); + assert(my_type.b == 2); + assert(my_type.c == 3); + + let foo = baz::<10>(); + assert(foo.data == [1; 10]); +} + +// Used in the signature of a function +fn id(x: [Field; I]) -> [Field; I] { + x +} + +// Used as a field of a struct +struct MyStruct { + data: [Field; S], +} + +// Used in an impl +impl MyStruct { + fn insert(mut self: Self, index: Field, elem: Field) -> Self { + // Regression test for numeric generics on impls + assert(index as u32 < S); + + self.data[index] = elem; + self + } +} + +fn foo(mut s: MyStruct<2+1>) -> MyStruct<10/2-2> { + s.data[0] = s.data[0] + 1; + s +} + +fn baz() -> MyStruct { + MyStruct { data: [1; N] } +} + +fn double() -> u32 { + // Used as an expression + N * 2 +} + +fn double_numeric_generics_test() { + // Example usage of a numeric generic arguments. + assert(double::<9>() == 18); + assert(double::<123>() == 246); + assert(double::<7 + 8>() == 30); +} + +struct MyType { + a: Field, + b: Field, + c: Field, +} + +impl Deserialize for MyType { + fn deserialize(fields: [Field; N]) -> Self { + MyType { a: fields[0], b: fields[1], c: fields[2] } + } +} + +trait Deserialize { + fn deserialize(fields: [Field; N]) -> Self; +} + +struct PublicStorage {} + +impl PublicStorage { + fn read() -> T where T: Deserialize { + // Used as a type within a function body + let mut fields: [Field; N] = [0; N]; + // Used a loop bound + for i in 0..N { + fields[i] = i as Field + 1; + } + T::deserialize(fields) + } +} + +// Check that we can thread numeric generics into nested structs +// and also that we can handle nested structs with numeric generics +// which are declared after the parent struct +struct NestedNumeric { + a: Field, + b: InnerNumeric +} +struct InnerNumeric { + inner: [u32; N], +} diff --git a/test_programs/compile_success_empty/reexports/src/main.nr b/test_programs/compile_success_empty/reexports/src/main.nr index 0fd65a33564..ed469ff77d0 100644 --- a/test_programs/compile_success_empty/reexports/src/main.nr +++ b/test_programs/compile_success_empty/reexports/src/main.nr @@ -1,4 +1,4 @@ -use reexporting_lib::{FooStruct, MyStruct, lib}; +use dep::reexporting_lib::{FooStruct, MyStruct, lib}; fn main() { let x: FooStruct = MyStruct { inner: 0 }; diff --git a/test_programs/compile_success_empty/regression_4635/src/main.nr b/test_programs/compile_success_empty/regression_4635/src/main.nr index 23918e30785..350b60ba3f7 100644 --- a/test_programs/compile_success_empty/regression_4635/src/main.nr +++ b/test_programs/compile_success_empty/regression_4635/src/main.nr @@ -8,7 +8,7 @@ impl FromField for Field { } } -trait Deserialize { +trait Deserialize { fn deserialize(fields: [Field; N]) -> Self; } diff --git a/test_programs/compile_success_empty/static_assert/Nargo.toml b/test_programs/compile_success_empty/static_assert/Nargo.toml new file mode 100644 index 00000000000..5dabd7803e3 --- /dev/null +++ b/test_programs/compile_success_empty/static_assert/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "static_assert" +type = "bin" +authors = [""] +compiler_version = ">=0.31.0" + +[dependencies] \ No newline at end of file diff --git a/test_programs/compile_success_empty/static_assert/src/main.nr b/test_programs/compile_success_empty/static_assert/src/main.nr new file mode 100644 index 00000000000..e61d9388ceb --- /dev/null +++ b/test_programs/compile_success_empty/static_assert/src/main.nr @@ -0,0 +1,46 @@ +use std::static_assert; + +global GLOBAL_ONE = 1; +global GLOBAL_TWO = 2; +global GLOBAL_THREE = GLOBAL_ONE + GLOBAL_TWO; + +// contents known at compile time +// length known at compile time +global GLOBAL_ARRAY_PAIR = [GLOBAL_ONE, GLOBAL_TWO]; +global GLOBAL_SLICE_PAIR = &[GLOBAL_ONE, GLOBAL_TWO]; + +struct Foo { + field: Field, + array: [Field; 3], + slice: [Field], +} + +fn main( + dynamic_one: Field, // == 1 + dynamic_two: Field // == 2 +) { + // contents unknown at compile time + // length known at compile time + let dynamic_array_pair = [dynamic_one, dynamic_two]; + let dynamic_slice_pair = &[dynamic_one, dynamic_two]; + + static_assert(true, ""); + + static_assert(1 + 1 == 2, ""); + + let x = 2; + let y = 3; + let xy = x + y; + static_assert(xy == 5, ""); + + static_assert(3 == GLOBAL_THREE, ""); + + static_assert(GLOBAL_ARRAY_PAIR.len() == 2, ""); + static_assert(GLOBAL_SLICE_PAIR.len() == 2, ""); + + static_assert(dynamic_array_pair.len() == 2, ""); + static_assert(dynamic_slice_pair.len() == 2, ""); + + assert_constant([1, 2, dynamic_one].len() == 4); + static_assert([1, 2, dynamic_one].len() == 3, ""); +} diff --git a/test_programs/compile_success_empty/trait_generics/src/main.nr b/test_programs/compile_success_empty/trait_generics/src/main.nr index 30b2e79d579..15591f2f2ea 100644 --- a/test_programs/compile_success_empty/trait_generics/src/main.nr +++ b/test_programs/compile_success_empty/trait_generics/src/main.nr @@ -15,7 +15,7 @@ trait MyInto { fn into(self) -> T; } -impl MyInto<[U; N]> for [T; N] where T: MyInto { +impl MyInto<[U; N]> for [T; N] where T: MyInto { fn into(self) -> [U; N] { self.map(|x: T| x.into()) } @@ -29,7 +29,7 @@ impl MyInto for Field { /// Serialize example -trait Serializable { +trait Serializable { fn serialize(self) -> [Field; N]; } diff --git a/test_programs/compile_success_empty/trait_impl_with_where_clause/Nargo.toml b/test_programs/compile_success_empty/trait_impl_with_where_clause/Nargo.toml new file mode 100644 index 00000000000..672569634ea --- /dev/null +++ b/test_programs/compile_success_empty/trait_impl_with_where_clause/Nargo.toml @@ -0,0 +1,6 @@ +[package] +name = "trait_impl_with_where_clause" +type = "bin" +authors = [""] + +[dependencies] diff --git a/test_programs/compile_success_empty/impl_with_where_clause/src/main.nr b/test_programs/compile_success_empty/trait_impl_with_where_clause/src/main.nr similarity index 100% rename from test_programs/compile_success_empty/impl_with_where_clause/src/main.nr rename to test_programs/compile_success_empty/trait_impl_with_where_clause/src/main.nr diff --git a/test_programs/compile_success_empty/workspace_reexport_bug/binary/src/main.nr b/test_programs/compile_success_empty/workspace_reexport_bug/binary/src/main.nr index a4207794a8a..ab0ae9a48b8 100644 --- a/test_programs/compile_success_empty/workspace_reexport_bug/binary/src/main.nr +++ b/test_programs/compile_success_empty/workspace_reexport_bug/binary/src/main.nr @@ -1,2 +1,2 @@ -use library::ReExportMeFromAnotherLib; +use dep::library::ReExportMeFromAnotherLib; fn main(_x: ReExportMeFromAnotherLib) {} diff --git a/test_programs/compile_success_empty/workspace_reexport_bug/library/src/lib.nr b/test_programs/compile_success_empty/workspace_reexport_bug/library/src/lib.nr index e3a1539ea65..8e84662ed03 100644 --- a/test_programs/compile_success_empty/workspace_reexport_bug/library/src/lib.nr +++ b/test_programs/compile_success_empty/workspace_reexport_bug/library/src/lib.nr @@ -1,2 +1,2 @@ // Re-export -use library2::ReExportMeFromAnotherLib; +use dep::library2::ReExportMeFromAnotherLib; diff --git a/test_programs/execution_failure/div_by_zero_numerator_witness/src/main.nr b/test_programs/execution_failure/div_by_zero_numerator_witness/src/main.nr index 012e823b297..7c6cae4932e 100644 --- a/test_programs/execution_failure/div_by_zero_numerator_witness/src/main.nr +++ b/test_programs/execution_failure/div_by_zero_numerator_witness/src/main.nr @@ -1,3 +1,4 @@ + fn main(x: Field) { let a: Field = x / 0; std::println(a); diff --git a/test_programs/execution_failure/regression_5202/Nargo.toml b/test_programs/execution_failure/regression_5202/Nargo.toml new file mode 100644 index 00000000000..dcdc044836e --- /dev/null +++ b/test_programs/execution_failure/regression_5202/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "regression_5202" +type = "bin" +authors = [""] +compiler_version = ">=0.29.0" + +[dependencies] diff --git a/test_programs/execution_failure/regression_5202/src/main.nr b/test_programs/execution_failure/regression_5202/src/main.nr new file mode 100644 index 00000000000..45ffafca4c7 --- /dev/null +++ b/test_programs/execution_failure/regression_5202/src/main.nr @@ -0,0 +1,41 @@ +trait ToField { + fn to_field(self) -> Field; +} + +impl ToField for bool { fn to_field(self) -> Field { self as Field } } + +unconstrained fn get_unconstrained_option() -> Option { + Option::some(13) +} + +unconstrained fn should_i_assert() -> bool { + false +} + +fn get_magical_boolean() -> bool { + let option = get_unconstrained_option(); + + let pre_assert = option.is_some().to_field(); + + if should_i_assert() { + // Note that `should_i_assert` is unconstrained, so Noir should not be able to infer + // any behavior from the contents of this block. In this case it is actually false, so the + // assertion below is not even executed (if it did it'd fail since the values are not equal). + + assert_eq(option, Option::some(42)); /// <- this seems to be the trigger for the bug + } + + // In my testing, the `option` value exhibits weird behavior from this point on, as if it had been mutated + let post_assert = option.is_some().to_field(); + + // The following expression should be true, but I can get it to evaluate to false depending on how I call it + pre_assert == post_assert +} + +fn main() { + let magic = get_magical_boolean(); + + // One of these asserts should fail. Before #5202, they would both pass + assert_eq(magic, true); + assert_eq(magic, false); +} diff --git a/test_programs/execution_success/regression_5202/fraction/Nargo.toml b/test_programs/execution_success/as_witness/Nargo.toml similarity index 55% rename from test_programs/execution_success/regression_5202/fraction/Nargo.toml rename to test_programs/execution_success/as_witness/Nargo.toml index 82e929d0bc7..18f3f99b5b5 100644 --- a/test_programs/execution_success/regression_5202/fraction/Nargo.toml +++ b/test_programs/execution_success/as_witness/Nargo.toml @@ -1,6 +1,6 @@ [package] -name = "fraction" -type = "lib" +name = "as_witness" +type = "bin" authors = [""] [dependencies] diff --git a/test_programs/execution_success/as_witness/Prover.toml b/test_programs/execution_success/as_witness/Prover.toml new file mode 100644 index 00000000000..cd8a5b5e03c --- /dev/null +++ b/test_programs/execution_success/as_witness/Prover.toml @@ -0,0 +1 @@ +a = 42 \ No newline at end of file diff --git a/test_programs/execution_success/as_witness/src/main.nr b/test_programs/execution_success/as_witness/src/main.nr new file mode 100644 index 00000000000..a24f4af7669 --- /dev/null +++ b/test_programs/execution_success/as_witness/src/main.nr @@ -0,0 +1,5 @@ +// Simple example of checking where two arrays are different +fn main(a: Field) -> pub Field { + std::as_witness(a); + a +} diff --git a/test_programs/execution_success/diamond_deps_0/src/main.nr b/test_programs/execution_success/diamond_deps_0/src/main.nr index 690d6fc9fc8..ca95c6e0aa8 100644 --- a/test_programs/execution_success/diamond_deps_0/src/main.nr +++ b/test_programs/execution_success/diamond_deps_0/src/main.nr @@ -1,6 +1,6 @@ -use dep1::call_dep1_then_dep2; -use dep2::call_dep2; -use dep2::RESOLVE_THIS; +use dep::dep1::call_dep1_then_dep2; +use dep::dep2::call_dep2; +use dep::dep2::RESOLVE_THIS; fn main(x: Field, y: pub Field) -> pub Field { call_dep1_then_dep2(x, y) + call_dep2(x, y) + RESOLVE_THIS diff --git a/test_programs/execution_success/hashmap/src/main.nr b/test_programs/execution_success/hashmap/src/main.nr index 8cf70cc5970..56b13d6779b 100644 --- a/test_programs/execution_success/hashmap/src/main.nr +++ b/test_programs/execution_success/hashmap/src/main.nr @@ -3,7 +3,6 @@ mod utils; use std::collections::map::HashMap; use std::hash::BuildHasherDefault; use std::hash::poseidon2::Poseidon2Hasher; -use std::cmp::Eq; use utils::cut; diff --git a/test_programs/execution_success/modulus/src/main.nr b/test_programs/execution_success/modulus/src/main.nr index c7d6a2e2c7d..1627cc0dba2 100644 --- a/test_programs/execution_success/modulus/src/main.nr +++ b/test_programs/execution_success/modulus/src/main.nr @@ -3,6 +3,11 @@ fn main(bn254_modulus_be_bytes: [u8; 32], bn254_modulus_be_bits: [u1; 254]) { // NOTE: The constraints used in this circuit will only work when testing nargo with the plonk bn254 backend assert(modulus_size == 254); + assert_reverse( + std::field::modulus_be_bytes(), + std::field::modulus_le_bytes() + ); + let modulus_be_byte_array = std::field::modulus_be_bytes(); for i in 0..32 { assert(modulus_be_byte_array[i] == bn254_modulus_be_bytes[i]); @@ -21,3 +26,9 @@ fn main(bn254_modulus_be_bytes: [u8; 32], bn254_modulus_be_bits: [u1; 254]) { assert(modulus_le_bits[i] == bn254_modulus_be_bits[253 - i]); } } + +fn assert_reverse(forwards: [u8], backwards: [u8]) { + for i in 0..32 { + assert_eq(forwards[i], backwards[31 - i]); + } +} diff --git a/test_programs/execution_success/regression_4088/src/main.nr b/test_programs/execution_success/regression_4088/src/main.nr index 9e4d7892fc3..12a7afca68c 100644 --- a/test_programs/execution_success/regression_4088/src/main.nr +++ b/test_programs/execution_success/regression_4088/src/main.nr @@ -1,4 +1,4 @@ -trait Serialize { +trait Serialize { fn serialize(self) -> [Field; N]; } @@ -12,7 +12,7 @@ impl Serialize<1> for ValueNote { } } -fn check(serialized_note: [Field; N]) { +fn check(serialized_note: [Field; N]) { assert(serialized_note[0] == 0); } diff --git a/test_programs/execution_success/regression_4124/src/main.nr b/test_programs/execution_success/regression_4124/src/main.nr index 2b0e65a0b6c..6caea017798 100644 --- a/test_programs/execution_success/regression_4124/src/main.nr +++ b/test_programs/execution_success/regression_4124/src/main.nr @@ -1,6 +1,6 @@ use std::option::Option; -trait MyDeserialize { +trait MyDeserialize { fn deserialize(fields: [Field; N]) -> Self; } @@ -10,7 +10,7 @@ impl MyDeserialize<1> for Field { } } -pub fn storage_read() -> [Field; N] { +pub fn storage_read() -> [Field; N] { std::unsafe::zeroed() } diff --git a/test_programs/execution_success/regression_5202/fraction/LICENSE b/test_programs/execution_success/regression_5202/fraction/LICENSE deleted file mode 100644 index 929af4807ca..00000000000 --- a/test_programs/execution_success/regression_5202/fraction/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2023 Resurgence Labs - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/test_programs/execution_success/regression_5202/fraction/README.md b/test_programs/execution_success/regression_5202/fraction/README.md deleted file mode 100644 index 17c81e5555a..00000000000 --- a/test_programs/execution_success/regression_5202/fraction/README.md +++ /dev/null @@ -1,10 +0,0 @@ -# fraction -The Noir repository for accessing Fractions by Resurgence Labs - -# usage -To Use as a Dependency, add the following line to your project's Nargo.toml under [dependencies]: -fraction = { tag = "v1.2.2", git = "https://github.com/resurgencelabs/fraction" } - -Note: This is a highly dynamic repository. The code will change as more features are added, both at the repository level by Resurgence Labs, as well as at the language level itself by the Noir team. - - diff --git a/test_programs/execution_success/regression_5202/fraction/src/lib.nr b/test_programs/execution_success/regression_5202/fraction/src/lib.nr deleted file mode 100644 index 4b20eb6b76c..00000000000 --- a/test_programs/execution_success/regression_5202/fraction/src/lib.nr +++ /dev/null @@ -1,201 +0,0 @@ -use dep::std; - -global MAX: Fraction = Fraction { sign: true, num: 3050913689, den: 1 }; -global MIN: Fraction = Fraction { sign: false, num: 3050913689, den: 1 }; -global ZERO: Fraction = Fraction { sign: true, num: 0, den: 1 }; -global ONE: Fraction = Fraction { sign: true, num: 1, den: 1 }; -global NEGATIVE_ONE: Fraction = Fraction { sign: false, num: 1, den: 1 }; - -struct Fraction { - sign: bool, - num: u32, - den: u32, -} - -// Create a Fraction type variable without lenghtier code -pub fn toFraction(s: bool, n: u32, d: u32) -> Fraction { - assert(d != 0); - Fraction { sign: s, num: n, den: d } -} - -// Swaps the numerator and denominator -fn invertFraction(f: Fraction) -> Fraction { - assert(f.num != 0); - Fraction { sign: f.sign, num: f.den, den: f.num } -} - -// Changes the Sign of the Fraction -fn signChangeFraction(f: Fraction) -> Fraction { - Fraction { sign: !f.sign, num: f.num, den: f.den } -} - -// this method will only work till numerator and denominator values are under 100 -// this has been set for efficiency reasons, and will be modified once the Noir team -// can implement dynamic limit for loops -fn reduceFraction(f: Fraction) -> Fraction { - let mut a = f.num; - let mut b = f.den; - let mut j = 1; - let mut gcd = 1; - - let min = if a > b { b } else { a }; - - for i in 2..100 { - j = i as u32; - if (j <= min) { - if (a % j == 0) & (b % j == 0) { - gcd = j; - } - } - } - - Fraction { sign: f.sign, num: f.num / gcd, den: f.den / gcd } -} - -// Adds two fractions -pub fn addFraction(f1: Fraction, f2: Fraction) -> Fraction { - let mut an = U128::from_integer(f1.num); - let mut ad = U128::from_integer(f1.den); - let mut bn = U128::from_integer(f2.num); - let mut bd = U128::from_integer(f2.den); - let mut m = f1; - let mut n = f2; - - if f1.sign == f2.sign { - if ((ad * bd > U128::from_integer(2000000000)) - | ((an * bd + ad * bn) > U128::from_integer(2000000000)) - | ((an * bd) > U128::from_integer(2000000000)) - | ((ad * bn) > U128::from_integer(2000000000))) { - m = reduceFraction(m); - n = reduceFraction(n); - } - an = U128::from_integer(m.num); - ad = U128::from_integer(m.den); - bn = U128::from_integer(n.num); - bd = U128::from_integer(n.den); - if ((ad * bd > U128::from_integer(2000000000)) - | ((an * bd + ad * bn) > U128::from_integer(2000000000)) - | ((an * bd) > U128::from_integer(2000000000)) - | ((ad * bn) > U128::from_integer(2000000000))) { - let mut ddd = (an * bd + ad * bn) / (ad * bd); - let mut factor = U128::from_integer(1); - for _ in 1..5 { - if ddd * U128::from_integer(10) < U128::from_integer(2000000000) { - ddd *= U128::from_integer(10); - factor *= U128::from_integer(10); - } - } - let np: u32 = U128::to_integer(((an * bd + ad * bn) * factor) / (ad * bd)); - let fx: u32 = U128::to_integer(factor); - Fraction { sign: f1.sign, num: np, den: fx } - } else { - Fraction { sign: f1.sign, num: (m.num * n.den + n.num * m.den), den: m.den * n.den } - } - } else if ((an * bd) > (bn * ad)) { - if ((ad * bd > U128::from_integer(2000000000)) - | ((an * bd - ad * bn) > U128::from_integer(2000000000)) - | ((an * bd) > U128::from_integer(2000000000))) { - m = reduceFraction(m); - n = reduceFraction(n); - } - an = U128::from_integer(m.num); - ad = U128::from_integer(m.den); - bn = U128::from_integer(n.num); - bd = U128::from_integer(n.den); - - if ((ad * bd > U128::from_integer(2000000000)) - | ((an * bd - ad * bn) > U128::from_integer(2000000000)) - | ((an * bd) > U128::from_integer(2000000000))) { - let mut ddd = (an * bd - ad * bn) / (ad * bd); - let mut factor = U128::from_integer(1); - for _ in 1..5 { - if ddd * U128::from_integer(10) < U128::from_integer(2000000000) { - ddd *= U128::from_integer(10); - factor *= U128::from_integer(10); - } - } - let np: u32 = U128::to_integer(((an * bd - ad * bn) * factor) / (ad * bd)); - let fx: u32 = U128::to_integer(factor); - Fraction { sign: f1.sign, num: np, den: fx } - } else { - Fraction { sign: f1.sign, num: (m.num * n.den - n.num * m.den), den: m.den * n.den } - } - } else { - if ((ad * bd > U128::from_integer(2000000000)) - | ((bn * ad - bd * an) > U128::from_integer(2000000000)) - | ((bn * ad) > U128::from_integer(2000000000))) { - m = reduceFraction(m); - n = reduceFraction(n); - } - an = U128::from_integer(m.num); - ad = U128::from_integer(m.den); - bn = U128::from_integer(n.num); - bd = U128::from_integer(n.den); - if ((ad * bd > U128::from_integer(2000000000)) - | ((bn * ad - bd * an) > U128::from_integer(2000000000)) - | ((bn * ad) > U128::from_integer(2000000000))) { - let mut ddd = (bn * ad - bd * an) / (ad * bd); - let mut factor = U128::from_integer(1); - for _ in 1..5 { - if ddd * U128::from_integer(10) < U128::from_integer(2000000000) { - ddd *= U128::from_integer(10); - factor *= U128::from_integer(10); - } - } - let np: u32 = U128::to_integer(((bn * ad - bd * an) * factor) / (ad * bd)); - let fx: u32 = U128::to_integer(factor); - Fraction { sign: f2.sign, num: np, den: fx } - } else { - Fraction { sign: f2.sign, num: (n.num * m.den - m.num * n.den), den: m.den * n.den } - } - } -} - -// Returns the closest but smaller Integer to the Given Fraction, but typecast to Fraction for convenience -pub fn floor(f: Fraction) -> Fraction { - let q = f.num / f.den; - if q * f.den == f.num { - Fraction { sign: f.sign, num: f.num, den: f.den } - } else if f.sign { - Fraction { sign: f.sign, num: q, den: 1 } - } else { - Fraction { sign: f.sign, num: q + 1, den: 1 } - } -} - -#[test] -fn test_sum() { - let f1 = toFraction(true, 3, 5); - let f2 = toFraction(true, 2, 5); - let f = addFraction(f1, f2); - assert(f.num == f.den); -} - -#[test] -fn test_reduce() { - let f1 = toFraction(true, 2, 10); - let f2 = reduceFraction(f1); - assert(f2.num == 1); -} - -#[test] -fn test_floor() { - let f = toFraction(true, 7, 5); - let fl = floor(f); - assert(fl.num == 1); - assert(fl.den == 1); -} - -#[test] -fn test_floor2() { - let f = toFraction(false, 12, 5); - let fl = floor(f); - assert(fl.num == 3); - assert(fl.den == 1); -} - -#[test] -fn test_globals() { - let a = addFraction(ONE, NEGATIVE_ONE); - assert(a.num == ZERO.num); -} diff --git a/test_programs/execution_success/regression_5202/src/main.nr b/test_programs/execution_success/regression_5202/src/main.nr deleted file mode 100644 index 2ebfd7890ba..00000000000 --- a/test_programs/execution_success/regression_5202/src/main.nr +++ /dev/null @@ -1,23 +0,0 @@ -use fraction::{Fraction, MAX, floor, toFraction, addFraction}; - -fn main() { - let g1 = toFraction(true, 33333333, 5); - let g2 = toFraction(true, 500000, 33333333); - let a = addFraction(g1, g2); - - let f1 = floor(a); - let f2 = MAX; - assert(f1.sign); - assert(f2.sign); - - if f1.sign != f2.sign { - if (f1.sign) { () } else { () } - } else { - // Test fails here before the fix to #5202. - // An optimization which assumes an if condition to be true/false - // within the then/else branch respectively wasn't being set properly - // causing f1.sign to be assumed to be false in this else branch. - assert(f1.sign); - assert(f2.sign); - } -} diff --git a/test_programs/execution_success/slices/src/main.nr b/test_programs/execution_success/slices/src/main.nr index 8be79cdc3c4..2bd4dbd97b0 100644 --- a/test_programs/execution_success/slices/src/main.nr +++ b/test_programs/execution_success/slices/src/main.nr @@ -45,6 +45,14 @@ fn main(x: Field, y: pub Field) { assert(append[0] == 1); assert(append[4] == 5); + let mapped = &[1, 2].map(|x| x + 1); + assert_eq(mapped, &[2, 3]); + + assert_eq(&[1, 2, 3].fold(0, |acc, x| acc + x), 6); + assert_eq(&[1, 2, 3].reduce(|acc, x| acc + x), 6); + assert(&[2, 4, 6].all(|x| x > 0)); + assert(&[2, 4, 6].any(|x| x > 5)); + regression_2083(); // The parameters to this function must come from witness values (inputs to main) regression_merge_slices(x, y); diff --git a/test_programs/execution_success/traits_in_crates_1/crate1/src/lib.nr b/test_programs/execution_success/traits_in_crates_1/crate1/src/lib.nr index e36a263093a..62dd5a2c111 100644 --- a/test_programs/execution_success/traits_in_crates_1/crate1/src/lib.nr +++ b/test_programs/execution_success/traits_in_crates_1/crate1/src/lib.nr @@ -2,7 +2,7 @@ trait MyTrait { fn Add10(&mut self); } -impl MyTrait for crate2::MyStruct { +impl MyTrait for dep::crate2::MyStruct { fn Add10(&mut self) { self.Q += 10; } diff --git a/test_programs/execution_success/traits_in_crates_1/src/main.nr b/test_programs/execution_success/traits_in_crates_1/src/main.nr index 2afec29ee1f..7ba2f63c5c0 100644 --- a/test_programs/execution_success/traits_in_crates_1/src/main.nr +++ b/test_programs/execution_success/traits_in_crates_1/src/main.nr @@ -1,5 +1,5 @@ fn main(x: Field, y: pub Field) { - let mut V = crate2::MyStruct { Q: x }; + let mut V = dep::crate2::MyStruct { Q: x }; V.Add10(); assert(V.Q == y); } diff --git a/test_programs/execution_success/traits_in_crates_2/crate2/src/lib.nr b/test_programs/execution_success/traits_in_crates_2/crate2/src/lib.nr index fe6a94a4a95..38870489131 100644 --- a/test_programs/execution_success/traits_in_crates_2/crate2/src/lib.nr +++ b/test_programs/execution_success/traits_in_crates_2/crate2/src/lib.nr @@ -2,7 +2,7 @@ struct MyStruct { Q: Field, } -impl crate1::MyTrait for MyStruct { +impl dep::crate1::MyTrait for MyStruct { fn Add10(&mut self) { self.Q += 10; } diff --git a/test_programs/execution_success/traits_in_crates_2/src/main.nr b/test_programs/execution_success/traits_in_crates_2/src/main.nr index 2afec29ee1f..7ba2f63c5c0 100644 --- a/test_programs/execution_success/traits_in_crates_2/src/main.nr +++ b/test_programs/execution_success/traits_in_crates_2/src/main.nr @@ -1,5 +1,5 @@ fn main(x: Field, y: pub Field) { - let mut V = crate2::MyStruct { Q: x }; + let mut V = dep::crate2::MyStruct { Q: x }; V.Add10(); assert(V.Q == y); } diff --git a/test_programs/noir_test_failure/should_fail_mismatch/src/main.nr b/test_programs/noir_test_failure/should_fail_mismatch/src/main.nr index 08a9234a752..62e03c153ed 100644 --- a/test_programs/noir_test_failure/should_fail_mismatch/src/main.nr +++ b/test_programs/noir_test_failure/should_fail_mismatch/src/main.nr @@ -2,14 +2,9 @@ fn test_different_string() { assert_eq(0, 1, "Different string"); } -// The assert message has a space -#[test(should_fail_with = "Not equal")] -fn test_with_extra_space() { - assert_eq(0, 1, "Not equal "); -} -// The assert message has a space -#[test(should_fail_with = "Not equal")] -fn test_runtime_mismatch() { - // We use a pedersen commitment here so that the assertion failure is only known at runtime. - assert_eq(std::hash::pedersen_commitment([27]).x, 0, "Not equal "); -} + +// The failure reason is a substring of the expected message, but it should be the other way around +#[test(should_fail_with = "Definitely Not equal!")] +fn test_wrong_expectation() { + assert_eq(0, 1, "Not equal"); +} \ No newline at end of file diff --git a/test_programs/noir_test_success/comptime_globals/src/main.nr b/test_programs/noir_test_success/comptime_globals/src/main.nr index efe9f0742b9..95c54b96609 100644 --- a/test_programs/noir_test_success/comptime_globals/src/main.nr +++ b/test_programs/noir_test_success/comptime_globals/src/main.nr @@ -5,7 +5,7 @@ comptime global FOO: Field = foo(); // Due to this function's mutability and branching, SSA currently fails // to fold this function into a constant before the assert_constant check // is evaluated before loop unrolling. -fn foo() -> Field { +comptime fn foo() -> Field { let mut three = 3; if three == 3 { 5 } else { 6 } } diff --git a/test_programs/noir_test_success/fuzzer_checks/Nargo.toml b/test_programs/noir_test_success/fuzzer_checks/Nargo.toml new file mode 100644 index 00000000000..cd09d0d344d --- /dev/null +++ b/test_programs/noir_test_success/fuzzer_checks/Nargo.toml @@ -0,0 +1,5 @@ +[package] +name = "fuzzer_checks" +type = "bin" +authors = [""] +[dependencies] diff --git a/test_programs/noir_test_success/fuzzer_checks/src/main.nr b/test_programs/noir_test_success/fuzzer_checks/src/main.nr new file mode 100644 index 00000000000..2b928db092e --- /dev/null +++ b/test_programs/noir_test_success/fuzzer_checks/src/main.nr @@ -0,0 +1,6 @@ + +#[test(should_fail_with = "42 is not allowed")] +fn finds_magic_value(x: u32) { + let x = x as u64; + assert(2 * x != 42, "42 is not allowed"); +} diff --git a/test_programs/noir_test_success/regression_4561/src/main.nr b/test_programs/noir_test_success/regression_4561/src/main.nr index 70c447b49af..ad40941ff51 100644 --- a/test_programs/noir_test_success/regression_4561/src/main.nr +++ b/test_programs/noir_test_success/regression_4561/src/main.nr @@ -42,3 +42,37 @@ fn two_nested_return() { OracleMock::mock("two_nested_return").returns((0, [1, 2, 3, 4, 5, 6], 7, [1, 2, 3, 4, 5, 6])); assert_eq(two_nested_return_unconstrained(), (0, [[1, 2, 3], [4, 5, 6]], 7, [[1, 2, 3], [4, 5, 6]])); } + +#[oracle(foo_return)] +unconstrained fn foo_return() -> (Field, TReturn, TestTypeFoo) {} +unconstrained fn foo_return_unconstrained() -> (Field, TReturn, TestTypeFoo) { + foo_return() +} + +struct TestTypeFoo { + a: Field, + b: [[[Field; 3]; 4]; 2], + c: [TReturnElem; 2], + d: TReturnElem, +} + +#[test] +fn complexe_struct_return() { + OracleMock::mock("foo_return").returns( + ( + 0, [1, 2, 3, 4, 5, 6], 7, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24], [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12], [1, 2, 3, 4, 5, 6] + ) + ); + let foo_x = foo_return_unconstrained(); + assert_eq((foo_x.0, foo_x.1), (0, [[1, 2, 3], [4, 5, 6]])); + assert_eq(foo_x.2.a, 7); + assert_eq( + foo_x.2.b, [ + [[1, 2, 3], [4, 5, 6], [7, 8, 9], [10, 11, 12]], [[13, 14, 15], [16, 17, 18], [19, 20, 21], [22, 23, 24]] + ] + ); + let a: TReturnElem = [1, 2, 3]; + let b: TReturnElem = [4, 5, 6]; + assert_eq(foo_x.2.c, [a, b]); + assert_eq(foo_x.2.d, a); +} diff --git a/test_programs/noir_test_success/should_fail_with_matches/src/main.nr b/test_programs/noir_test_success/should_fail_with_matches/src/main.nr index b713976643b..42696762ffe 100644 --- a/test_programs/noir_test_success/should_fail_with_matches/src/main.nr +++ b/test_programs/noir_test_success/should_fail_with_matches/src/main.nr @@ -3,6 +3,11 @@ fn test_should_fail_with_match() { assert_eq(0, 1, "Not equal"); } +#[test(should_fail_with = "Not equal")] +fn test_should_fail_with_match_partial_match() { + assert_eq(0, 1, "Definitely Not equal!"); +} + #[test(should_fail)] fn test_should_fail_without_match() { assert_eq(0, 1); @@ -48,6 +53,11 @@ unconstrained fn unconstrained_test_should_fail_with_match() { assert_eq(0, 1, "Not equal"); } +#[test(should_fail_with = "Not equal")] +unconstrained fn unconstrained_test_should_fail_with_match_partial_match() { + assert_eq(0, 1, "Definitely Not equal!"); +} + #[test(should_fail)] unconstrained fn unconstrained_test_should_fail_without_match() { assert_eq(0, 1); diff --git a/test_programs/test_libraries/diamond_deps_1/src/lib.nr b/test_programs/test_libraries/diamond_deps_1/src/lib.nr index d76ce5a05e9..60c001ec64e 100644 --- a/test_programs/test_libraries/diamond_deps_1/src/lib.nr +++ b/test_programs/test_libraries/diamond_deps_1/src/lib.nr @@ -1,4 +1,4 @@ -use dep2::call_dep2; +use dep::dep2::call_dep2; pub fn call_dep1_then_dep2(x: Field, y: Field) -> Field { call_dep2(x, y) diff --git a/test_programs/test_libraries/reexporting_lib/src/lib.nr b/test_programs/test_libraries/reexporting_lib/src/lib.nr index 1bced548304..f12dfe01ecd 100644 --- a/test_programs/test_libraries/reexporting_lib/src/lib.nr +++ b/test_programs/test_libraries/reexporting_lib/src/lib.nr @@ -1,3 +1,3 @@ -use exporting_lib::{MyStruct, FooStruct}; +use dep::exporting_lib::{MyStruct, FooStruct}; -use exporting_lib as lib; +use dep::exporting_lib as lib; diff --git a/tooling/acvm_cli/Cargo.toml b/tooling/acvm_cli/Cargo.toml index 1cfd1f3b270..a592f2d65f3 100644 --- a/tooling/acvm_cli/Cargo.toml +++ b/tooling/acvm_cli/Cargo.toml @@ -20,7 +20,7 @@ path = "src/main.rs" [dependencies] thiserror.workspace = true toml.workspace = true -color-eyre = "0.6.2" +color-eyre.workspace = true clap.workspace = true acvm.workspace = true nargo.workspace = true diff --git a/tooling/fuzzer/src/dictionary/mod.rs b/tooling/fuzzer/src/dictionary/mod.rs new file mode 100644 index 00000000000..bf2ab87be29 --- /dev/null +++ b/tooling/fuzzer/src/dictionary/mod.rs @@ -0,0 +1,124 @@ +//! This module defines how to build a dictionary of values which are likely to be correspond +//! to significant inputs during fuzzing by inspecting the [Program] being fuzzed. +//! +//! This dictionary can be fed into the fuzzer's [strategy][proptest::strategy::Strategy] in order to bias it towards +//! generating these values to ensure they get proper coverage. +use std::collections::HashSet; + +use acvm::{ + acir::{ + circuit::{ + brillig::{BrilligBytecode, BrilligInputs}, + directives::Directive, + opcodes::{BlackBoxFuncCall, FunctionInput}, + Circuit, Opcode, Program, + }, + native_types::Expression, + }, + brillig_vm::brillig::Opcode as BrilligOpcode, + AcirField, +}; + +/// Constructs a [HashSet] of values pulled from a [Program] which are likely to be correspond +/// to significant inputs during fuzzing. +pub(super) fn build_dictionary_from_program(program: &Program) -> HashSet { + let constrained_dictionaries = program.functions.iter().map(build_dictionary_from_circuit); + let unconstrained_dictionaries = + program.unconstrained_functions.iter().map(build_dictionary_from_unconstrained_function); + let dictionaries = constrained_dictionaries.chain(unconstrained_dictionaries); + + let mut constants: HashSet = HashSet::new(); + for dictionary in dictionaries { + constants.extend(dictionary); + } + constants +} + +fn build_dictionary_from_circuit(circuit: &Circuit) -> HashSet { + let mut constants: HashSet = HashSet::new(); + + fn insert_expr(dictionary: &mut HashSet, expr: &Expression) { + let quad_coefficients = expr.mul_terms.iter().map(|(k, _, _)| *k); + let linear_coefficients = expr.linear_combinations.iter().map(|(k, _)| *k); + let coefficients = linear_coefficients.chain(quad_coefficients); + + dictionary.extend(coefficients.clone()); + dictionary.insert(expr.q_c); + + // We divide the constant term by any coefficients in the expression to aid solving constraints such as `2 * x - 4 == 0`. + let scaled_constants = coefficients.map(|coefficient| expr.q_c / coefficient); + dictionary.extend(scaled_constants); + } + + fn insert_array_len(dictionary: &mut HashSet, array: &[T]) { + let array_length = array.len() as u128; + dictionary.insert(F::from(array_length)); + dictionary.insert(F::from(array_length - 1)); + } + + for opcode in &circuit.opcodes { + match opcode { + Opcode::AssertZero(expr) + | Opcode::Call { predicate: Some(expr), .. } + | Opcode::MemoryOp { predicate: Some(expr), .. } + | Opcode::Directive(Directive::ToLeRadix { a: expr, .. }) => { + insert_expr(&mut constants, expr) + } + + Opcode::MemoryInit { init, .. } => insert_array_len(&mut constants, init), + + Opcode::BrilligCall { inputs, predicate, .. } => { + for input in inputs { + match input { + BrilligInputs::Single(expr) => insert_expr(&mut constants, expr), + BrilligInputs::Array(exprs) => { + exprs.iter().for_each(|expr| insert_expr(&mut constants, expr)); + insert_array_len(&mut constants, exprs); + } + BrilligInputs::MemoryArray(_) => (), + } + } + if let Some(predicate) = predicate { + insert_expr(&mut constants, predicate) + } + } + + Opcode::BlackBoxFuncCall(BlackBoxFuncCall::RANGE { + input: FunctionInput { num_bits, .. }, + }) => { + let field = 1u128.wrapping_shl(*num_bits); + constants.insert(F::from(field)); + constants.insert(F::from(field - 1)); + } + _ => (), + } + } + + constants +} + +fn build_dictionary_from_unconstrained_function( + function: &BrilligBytecode, +) -> HashSet { + let mut constants: HashSet = HashSet::new(); + + for opcode in &function.bytecode { + match opcode { + BrilligOpcode::Cast { bit_size, .. } => { + let field = 1u128.wrapping_shl(*bit_size); + constants.insert(F::from(field)); + constants.insert(F::from(field - 1)); + } + BrilligOpcode::Const { bit_size, value, .. } => { + constants.insert(*value); + + let field = 1u128.wrapping_shl(*bit_size); + constants.insert(F::from(field)); + constants.insert(F::from(field - 1)); + } + _ => (), + } + } + + constants +} diff --git a/tooling/fuzzer/src/lib.rs b/tooling/fuzzer/src/lib.rs index 42dccc1dc83..28d7353f35a 100644 --- a/tooling/fuzzer/src/lib.rs +++ b/tooling/fuzzer/src/lib.rs @@ -4,9 +4,11 @@ //! Code is used under the MIT license. use acvm::{blackbox_solver::StubbedBlackBoxSolver, FieldElement}; +use dictionary::build_dictionary_from_program; use noirc_abi::InputMap; use proptest::test_runner::{TestCaseError, TestError, TestRunner}; +mod dictionary; mod strategies; mod types; @@ -37,7 +39,8 @@ impl FuzzedExecutor { /// Fuzzes the provided program. pub fn fuzz(&self) -> FuzzTestResult { - let strategy = strategies::arb_input_map(&self.program.abi); + let dictionary = build_dictionary_from_program(&self.program.bytecode); + let strategy = strategies::arb_input_map(&self.program.abi, dictionary); let run_result: Result<(), TestError> = self.runner.clone().run(&strategy, |input_map| { diff --git a/tooling/fuzzer/src/strategies/mod.rs b/tooling/fuzzer/src/strategies/mod.rs index f5b03953ba8..46187a28d5b 100644 --- a/tooling/fuzzer/src/strategies/mod.rs +++ b/tooling/fuzzer/src/strategies/mod.rs @@ -5,28 +5,22 @@ use proptest::prelude::*; use acvm::{AcirField, FieldElement}; use noirc_abi::{input_parser::InputValue, Abi, AbiType, InputMap, Sign}; -use std::collections::BTreeMap; +use std::collections::{BTreeMap, HashSet}; use uint::UintStrategy; mod int; mod uint; -proptest::prop_compose! { - pub(super) fn arb_field_from_integer(bit_size: u32)(value: u128)-> FieldElement { - let width = (bit_size % 128).clamp(1, 127); - let max_value = 2u128.pow(width) - 1; - let value = value % max_value; - FieldElement::from(value) - } -} - -pub(super) fn arb_value_from_abi_type(abi_type: &AbiType) -> SBoxedStrategy { +pub(super) fn arb_value_from_abi_type( + abi_type: &AbiType, + dictionary: HashSet, +) -> SBoxedStrategy { match abi_type { AbiType::Field => vec(any::(), 32) .prop_map(|bytes| InputValue::Field(FieldElement::from_be_bytes_reduce(&bytes))) .sboxed(), AbiType::Integer { width, sign } if sign == &Sign::Unsigned => { - UintStrategy::new(*width as usize) + UintStrategy::new(*width as usize, dictionary) .prop_map(|uint| InputValue::Field(uint.into())) .sboxed() } @@ -55,7 +49,7 @@ pub(super) fn arb_value_from_abi_type(abi_type: &AbiType) -> SBoxedStrategy { let length = *length as usize; - let elements = vec(arb_value_from_abi_type(typ), length..=length); + let elements = vec(arb_value_from_abi_type(typ, dictionary), length..=length); elements.prop_map(InputValue::Vec).sboxed() } @@ -63,7 +57,9 @@ pub(super) fn arb_value_from_abi_type(abi_type: &AbiType) -> SBoxedStrategy { let fields: Vec> = fields .iter() - .map(|(name, typ)| (Just(name.clone()), arb_value_from_abi_type(typ)).sboxed()) + .map(|(name, typ)| { + (Just(name.clone()), arb_value_from_abi_type(typ, dictionary.clone())).sboxed() + }) .collect(); fields @@ -75,17 +71,23 @@ pub(super) fn arb_value_from_abi_type(abi_type: &AbiType) -> SBoxedStrategy { - let fields: Vec<_> = fields.iter().map(arb_value_from_abi_type).collect(); + let fields: Vec<_> = + fields.iter().map(|typ| arb_value_from_abi_type(typ, dictionary.clone())).collect(); fields.prop_map(InputValue::Vec).sboxed() } } } -pub(super) fn arb_input_map(abi: &Abi) -> BoxedStrategy { +pub(super) fn arb_input_map( + abi: &Abi, + dictionary: HashSet, +) -> BoxedStrategy { let values: Vec<_> = abi .parameters .iter() - .map(|param| (Just(param.name.clone()), arb_value_from_abi_type(¶m.typ))) + .map(|param| { + (Just(param.name.clone()), arb_value_from_abi_type(¶m.typ, dictionary.clone())) + }) .collect(); values diff --git a/tooling/fuzzer/src/strategies/uint.rs b/tooling/fuzzer/src/strategies/uint.rs index 5021e832b97..94610dbc829 100644 --- a/tooling/fuzzer/src/strategies/uint.rs +++ b/tooling/fuzzer/src/strategies/uint.rs @@ -1,3 +1,6 @@ +use std::collections::HashSet; + +use acvm::{AcirField, FieldElement}; use proptest::{ strategy::{NewTree, Strategy}, test_runner::TestRunner, @@ -13,9 +16,12 @@ use rand::Rng; pub struct UintStrategy { /// Bit size of uint (e.g. 128) bits: usize, - + /// A set of fixtures to be generated + fixtures: Vec, /// The weight for edge cases (+/- 3 around 0 and max possible value) edge_weight: usize, + /// The weight for fixtures + fixtures_weight: usize, /// The weight for purely random values random_weight: usize, } @@ -24,8 +30,15 @@ impl UintStrategy { /// Create a new strategy. /// # Arguments /// * `bits` - Size of uint in bits - pub fn new(bits: usize) -> Self { - Self { bits, edge_weight: 10usize, random_weight: 50usize } + /// * `fixtures` - Set of `FieldElements` representing values which the fuzzer weight towards testing. + pub fn new(bits: usize, fixtures: HashSet) -> Self { + Self { + bits, + fixtures: fixtures.into_iter().collect(), + edge_weight: 10usize, + fixtures_weight: 40usize, + random_weight: 50usize, + } } fn generate_edge_tree(&self, runner: &mut TestRunner) -> NewTree { @@ -37,6 +50,22 @@ impl UintStrategy { Ok(proptest::num::u128::BinarySearch::new(start)) } + fn generate_fixtures_tree(&self, runner: &mut TestRunner) -> NewTree { + // generate random cases if there's no fixtures + if self.fixtures.is_empty() { + return self.generate_random_tree(runner); + } + + // Generate value tree from fixture. + let fixture = &self.fixtures[runner.rng().gen_range(0..self.fixtures.len())]; + if fixture.num_bits() <= self.bits as u32 { + return Ok(proptest::num::u128::BinarySearch::new(fixture.to_u128())); + } + + // If fixture is not a valid type, generate random value. + self.generate_random_tree(runner) + } + fn generate_random_tree(&self, runner: &mut TestRunner) -> NewTree { let rng = runner.rng(); let start = rng.gen_range(0..=self.type_max()); @@ -57,11 +86,12 @@ impl Strategy for UintStrategy { type Tree = proptest::num::u128::BinarySearch; type Value = u128; fn new_tree(&self, runner: &mut TestRunner) -> NewTree { - let total_weight = self.random_weight + self.edge_weight; + let total_weight = self.random_weight + self.fixtures_weight + self.edge_weight; let bias = runner.rng().gen_range(0..total_weight); - // randomly select one of 2 strategies + // randomly select one of 3 strategies match bias { x if x < self.edge_weight => self.generate_edge_tree(runner), + x if x < self.edge_weight + self.fixtures_weight => self.generate_fixtures_tree(runner), _ => self.generate_random_tree(runner), } } diff --git a/tooling/lsp/src/solver.rs b/tooling/lsp/src/solver.rs index 0fcac73b905..9d1185e3a79 100644 --- a/tooling/lsp/src/solver.rs +++ b/tooling/lsp/src/solver.rs @@ -24,6 +24,14 @@ impl BlackBoxFunctionSolver for WrapperSolver { self.0.pedersen_commitment(inputs, domain_separator) } + fn pedersen_hash( + &self, + inputs: &[acvm::FieldElement], + domain_separator: u32, + ) -> Result { + self.0.pedersen_hash(inputs, domain_separator) + } + fn multi_scalar_mul( &self, points: &[acvm::FieldElement], @@ -36,14 +44,6 @@ impl BlackBoxFunctionSolver for WrapperSolver { self.0.multi_scalar_mul(points, scalars_lo, scalars_hi) } - fn pedersen_hash( - &self, - inputs: &[acvm::FieldElement], - domain_separator: u32, - ) -> Result { - self.0.pedersen_hash(inputs, domain_separator) - } - fn ec_add( &self, input1_x: &acvm::FieldElement, diff --git a/tooling/nargo/src/ops/test.rs b/tooling/nargo/src/ops/test.rs index ace2e9f0d0c..18c6f2530b9 100644 --- a/tooling/nargo/src/ops/test.rs +++ b/tooling/nargo/src/ops/test.rs @@ -128,7 +128,7 @@ fn check_expected_failure_message( }; let expected_failure_message_matches = - matches!(&failed_assertion, Some(message) if message == expected_failure_message); + matches!(&failed_assertion, Some(message) if message.contains(expected_failure_message)); if expected_failure_message_matches { return TestStatus::Pass; } diff --git a/tooling/nargo_cli/Cargo.toml b/tooling/nargo_cli/Cargo.toml index b9d7d7e3e48..e0e54449a6f 100644 --- a/tooling/nargo_cli/Cargo.toml +++ b/tooling/nargo_cli/Cargo.toml @@ -43,11 +43,16 @@ prettytable-rs = "0.10" rayon = "1.8.0" thiserror.workspace = true tower.workspace = true -async-lsp = { workspace = true, features = ["client-monitor", "stdio", "tracing", "tokio"] } +async-lsp = { workspace = true, features = [ + "client-monitor", + "stdio", + "tracing", + "tokio", +] } const_format.workspace = true similar-asserts.workspace = true termcolor = "1.1.2" -color-eyre = "0.6.2" +color-eyre.workspace = true tokio = { version = "1.0", features = ["io-std", "rt"] } dap.workspace = true clap-markdown = { git = "https://github.com/noir-lang/clap-markdown", rev = "450d759532c88f0dba70891ceecdbc9ff8f25d2b", optional = true } diff --git a/tooling/nargo_cli/benches/criterion.rs b/tooling/nargo_cli/benches/criterion.rs index 9f67bcffd6e..effab7d7c27 100644 --- a/tooling/nargo_cli/benches/criterion.rs +++ b/tooling/nargo_cli/benches/criterion.rs @@ -28,16 +28,10 @@ macro_rules! criterion_command { }; } criterion_command!(execution, "execute"); -criterion_command!(prove, "prove"); criterion_group! { name = execution_benches; config = Criterion::default().sample_size(20).measurement_time(Duration::from_secs(20)).with_profiler(PProfProfiler::new(100, Output::Flamegraph(None))); targets = criterion_selected_tests_execution } -criterion_group! { - name = prove_benches; - config = Criterion::default().sample_size(10).measurement_time(Duration::from_secs(20)).with_profiler(PProfProfiler::new(100, Output::Flamegraph(None))); - targets = criterion_selected_tests_prove -} -criterion_main!(execution_benches, prove_benches); +criterion_main!(execution_benches); diff --git a/tooling/nargo_cli/build.rs b/tooling/nargo_cli/build.rs index 43c277ba03e..a6873910524 100644 --- a/tooling/nargo_cli/build.rs +++ b/tooling/nargo_cli/build.rs @@ -61,395 +61,299 @@ const IGNORED_BRILLIG_TESTS: [&str; 11] = [ /// Certain features are only available in the elaborator. /// We skip these tests for non-elaborator code since they are not /// expected to work there. This can be removed once the old code is removed. -const IGNORED_NEW_FEATURE_TESTS: [&str; 3] = - ["macros", "wildcard_type", "type_definition_annotation"]; +const IGNORED_NEW_FEATURE_TESTS: [&str; 5] = [ + "macros", + "wildcard_type", + "type_definition_annotation", + "numeric_generics_explicit", + "derive_impl", +]; -fn generate_execution_success_tests(test_file: &mut File, test_data_dir: &Path) { - let test_sub_dir = "execution_success"; +fn read_test_cases( + test_data_dir: &Path, + test_sub_dir: &str, +) -> impl Iterator { let test_data_dir = test_data_dir.join(test_sub_dir); - let test_case_dirs = fs::read_dir(test_data_dir).unwrap().flatten().filter(|c| c.path().is_dir()); - for test_dir in test_case_dirs { + test_case_dirs.into_iter().map(|dir| { let test_name = - test_dir.file_name().into_string().expect("Directory can't be converted to string"); + dir.file_name().into_string().expect("Directory can't be converted to string"); if test_name.contains('-') { panic!( "Invalid test directory: {test_name}. Cannot include `-`, please convert to `_`" ); - }; - let test_dir = &test_dir.path(); - - let brillig_ignored = - if IGNORED_BRILLIG_TESTS.contains(&test_name.as_str()) { "\n#[ignore]" } else { "" }; - let new_features_ignored = if IGNORED_NEW_FEATURE_TESTS.contains(&test_name.as_str()) { - "\n#[ignore]" - } else { - "" - }; - - write!( - test_file, - r#" -#[test]{new_features_ignored} -fn execution_success_legacy_{test_name}() {{ - let test_program_dir = PathBuf::from("{test_dir}"); - - let mut cmd = Command::cargo_bin("nargo").unwrap(); - cmd.arg("--program-dir").arg(test_program_dir); - cmd.arg("execute").arg("--force").arg("--use-legacy"); - - cmd.assert().success(); -}} + } + (test_name, dir.path()) + }) +} +fn generate_test_case( + test_file: &mut File, + test_type: &str, + test_name: &str, + test_dir: &std::path::Display, + test_content: &str, +) { + write!( + test_file, + r#" #[test] -fn execution_success_{test_name}() {{ +fn {test_type}_{test_name}() {{ let test_program_dir = PathBuf::from("{test_dir}"); - let mut cmd = Command::cargo_bin("nargo").unwrap(); - cmd.arg("--program-dir").arg(test_program_dir); - cmd.arg("execute").arg("--force"); - - cmd.assert().success(); + let mut nargo = Command::cargo_bin("nargo").unwrap(); + nargo.arg("--program-dir").arg(test_program_dir); + {test_content} }} +"# + ) + .expect("Could not write templated test file."); +} -#[test]{brillig_ignored} -fn execution_success_{test_name}_brillig() {{ - let test_program_dir = PathBuf::from("{test_dir}"); - - let mut cmd = Command::cargo_bin("nargo").unwrap(); - cmd.arg("--program-dir").arg(test_program_dir); - cmd.arg("execute").arg("--force").arg("--force-brillig"); +fn generate_execution_success_tests(test_file: &mut File, test_data_dir: &Path) { + let test_type = "execution_success"; + let test_cases = read_test_cases(test_data_dir, test_type); + for (test_name, test_dir) in test_cases { + let test_dir = test_dir.display(); - cmd.assert().success(); -}} - "#, - test_dir = test_dir.display(), - ) - .expect("Could not write templated test file."); + generate_test_case( + test_file, + test_type, + &test_name, + &test_dir, + r#" + nargo.arg("execute").arg("--force"); + + nargo.assert().success();"#, + ); + + if !IGNORED_NEW_FEATURE_TESTS.contains(&test_name.as_str()) { + generate_test_case( + test_file, + test_type, + &format!("legacy_{test_name}"), + &test_dir, + r#" + nargo.arg("execute").arg("--force").arg("--use-legacy"); + + nargo.assert().success();"#, + ); + } + + if !IGNORED_BRILLIG_TESTS.contains(&test_name.as_str()) { + generate_test_case( + test_file, + test_type, + &format!("{test_name}_brillig"), + &test_dir, + r#" + nargo.arg("execute").arg("--force").arg("--force-brillig"); + + nargo.assert().success();"#, + ); + } } } fn generate_execution_failure_tests(test_file: &mut File, test_data_dir: &Path) { - let test_sub_dir = "execution_failure"; - let test_data_dir = test_data_dir.join(test_sub_dir); - - let test_case_dirs = - fs::read_dir(test_data_dir).unwrap().flatten().filter(|c| c.path().is_dir()); + let test_type = "execution_failure"; + let test_cases = read_test_cases(test_data_dir, test_type); + for (test_name, test_dir) in test_cases { + let test_dir = test_dir.display(); - for test_dir in test_case_dirs { - let test_name = - test_dir.file_name().into_string().expect("Directory can't be converted to string"); - if test_name.contains('-') { - panic!( - "Invalid test directory: {test_name}. Cannot include `-`, please convert to `_`" - ); - }; - let test_dir = &test_dir.path(); - - write!( + generate_test_case( test_file, + test_type, + &test_name, + &test_dir, r#" -#[test] -fn execution_failure_legacy_{test_name}() {{ - let test_program_dir = PathBuf::from("{test_dir}"); - - let mut cmd = Command::cargo_bin("nargo").unwrap(); - cmd.arg("--program-dir").arg(test_program_dir); - cmd.arg("execute").arg("--force").arg("--use-legacy"); - - cmd.assert().failure().stderr(predicate::str::contains("The application panicked (crashed).").not()); -}} - -#[test] -fn execution_failure_{test_name}() {{ - let test_program_dir = PathBuf::from("{test_dir}"); + nargo.arg("execute").arg("--force"); + + nargo.assert().failure().stderr(predicate::str::contains("The application panicked (crashed).").not());"#, + ); - let mut cmd = Command::cargo_bin("nargo").unwrap(); - cmd.arg("--program-dir").arg(test_program_dir); - cmd.arg("execute").arg("--force"); - - cmd.assert().failure().stderr(predicate::str::contains("The application panicked (crashed).").not()); -}} - "#, - test_dir = test_dir.display(), - ) - .expect("Could not write templated test file."); + generate_test_case( + test_file, + test_type, + &format!("legacy_{test_name}"), + &test_dir, + r#" + nargo.arg("execute").arg("--force").arg("--use-legacy"); + + nargo.assert().failure().stderr(predicate::str::contains("The application panicked (crashed).").not());"#, + ); } } fn generate_noir_test_success_tests(test_file: &mut File, test_data_dir: &Path) { - let test_sub_dir = "noir_test_success"; - let test_data_dir = test_data_dir.join(test_sub_dir); + let test_type = "noir_test_success"; + let test_cases = read_test_cases(test_data_dir, "noir_test_success"); + for (test_name, test_dir) in test_cases { + let test_dir = test_dir.display(); - let test_case_dirs = - fs::read_dir(test_data_dir).unwrap().flatten().filter(|c| c.path().is_dir()); - - for test_dir in test_case_dirs { - let test_name = - test_dir.file_name().into_string().expect("Directory can't be converted to string"); - if test_name.contains('-') { - panic!( - "Invalid test directory: {test_name}. Cannot include `-`, please convert to `_`" - ); - }; - let test_dir = &test_dir.path(); - - write!( + generate_test_case( test_file, + test_type, + &test_name, + &test_dir, r#" -#[test] -fn noir_test_success_legacy_{test_name}() {{ - let test_program_dir = PathBuf::from("{test_dir}"); - - let mut cmd = Command::cargo_bin("nargo").unwrap(); - cmd.arg("--program-dir").arg(test_program_dir); - cmd.arg("test").arg("--use-legacy"); - - cmd.assert().success(); -}} + nargo.arg("test"); + + nargo.assert().success();"#, + ); -#[test] -fn noir_test_success_{test_name}() {{ - let test_program_dir = PathBuf::from("{test_dir}"); - - let mut cmd = Command::cargo_bin("nargo").unwrap(); - cmd.arg("--program-dir").arg(test_program_dir); - cmd.arg("test"); - - cmd.assert().success(); -}} - "#, - test_dir = test_dir.display(), - ) - .expect("Could not write templated test file."); + generate_test_case( + test_file, + test_type, + &format!("legacy_{test_name}"), + &test_dir, + r#" + nargo.arg("test").arg("--use-legacy"); + + nargo.assert().success();"#, + ); } } fn generate_noir_test_failure_tests(test_file: &mut File, test_data_dir: &Path) { - let test_sub_dir = "noir_test_failure"; - let test_data_dir = test_data_dir.join(test_sub_dir); - - let test_case_dirs = - fs::read_dir(test_data_dir).unwrap().flatten().filter(|c| c.path().is_dir()); - - for test_dir in test_case_dirs { - let test_name = - test_dir.file_name().into_string().expect("Directory can't be converted to string"); - if test_name.contains('-') { - panic!( - "Invalid test directory: {test_name}. Cannot include `-`, please convert to `_`" - ); - }; - let test_dir = &test_dir.path(); - - write!( + let test_type = "noir_test_failure"; + let test_cases = read_test_cases(test_data_dir, test_type); + for (test_name, test_dir) in test_cases { + let test_dir = test_dir.display(); + generate_test_case( test_file, + test_type, + &test_name, + &test_dir, r#" -#[test] -fn noir_test_failure_legacy_{test_name}() {{ - let test_program_dir = PathBuf::from("{test_dir}"); - - let mut cmd = Command::cargo_bin("nargo").unwrap(); - cmd.arg("--program-dir").arg(test_program_dir); - cmd.arg("test").arg("--use-legacy"); - - cmd.assert().failure(); -}} + nargo.arg("test"); + + nargo.assert().failure();"#, + ); -#[test] -fn noir_test_failure_{test_name}() {{ - let test_program_dir = PathBuf::from("{test_dir}"); - - let mut cmd = Command::cargo_bin("nargo").unwrap(); - cmd.arg("--program-dir").arg(test_program_dir); - cmd.arg("test"); - - cmd.assert().failure(); -}} - "#, - test_dir = test_dir.display(), - ) - .expect("Could not write templated test file."); + generate_test_case( + test_file, + test_type, + &format!("legacy_{test_name}"), + &test_dir, + r#" + nargo.arg("test").arg("--use-legacy"); + + nargo.assert().failure();"#, + ); } } fn generate_compile_success_empty_tests(test_file: &mut File, test_data_dir: &Path) { - let test_sub_dir = "compile_success_empty"; - let test_data_dir = test_data_dir.join(test_sub_dir); - - let test_case_dirs = - fs::read_dir(test_data_dir).unwrap().flatten().filter(|c| c.path().is_dir()); - - for test_dir in test_case_dirs { - let test_name = - test_dir.file_name().into_string().expect("Directory can't be converted to string"); - if test_name.contains('-') { - panic!( - "Invalid test directory: {test_name}. Cannot include `-`, please convert to `_`" - ); - }; - let test_dir = &test_dir.path(); - - let new_feature_ignored = if IGNORED_NEW_FEATURE_TESTS.contains(&test_name.as_str()) { - "\n#[ignore]" - } else { - "" - }; - - write!( + let test_type = "compile_success_empty"; + let test_cases = read_test_cases(test_data_dir, test_type); + for (test_name, test_dir) in test_cases { + let test_dir = test_dir.display(); + + let assert_zero_opcodes = r#" + let output = nargo.output().expect("Failed to execute command"); + + if !output.status.success() {{ + panic!("`nargo info` failed with: {}", String::from_utf8(output.stderr).unwrap_or_default()); + }} + + // `compile_success_empty` tests should be able to compile down to an empty circuit. + let json: serde_json::Value = serde_json::from_slice(&output.stdout).unwrap_or_else(|e| {{ + panic!("JSON was not well-formatted {:?}\n\n{:?}", e, std::str::from_utf8(&output.stdout)) + }}); + let num_opcodes = &json["programs"][0]["functions"][0]["acir_opcodes"]; + assert_eq!(num_opcodes.as_u64().expect("number of opcodes should fit in a u64"), 0); + "#; + + generate_test_case( test_file, - r#" -#[test]{new_feature_ignored} -fn compile_success_empty_legacy_{test_name}() {{ - let test_program_dir = PathBuf::from("{test_dir}"); - let mut cmd = Command::cargo_bin("nargo").unwrap(); - cmd.arg("--program-dir").arg(test_program_dir); - cmd.arg("info"); - cmd.arg("--json"); - cmd.arg("--force"); - cmd.arg("--use-legacy"); - - let output = cmd.output().expect("Failed to execute command"); - - if !output.status.success() {{ - panic!("`nargo info` failed with: {{}}", String::from_utf8(output.stderr).unwrap_or_default()); - }} - - // `compile_success_empty` tests should be able to compile down to an empty circuit. - let json: serde_json::Value = serde_json::from_slice(&output.stdout).unwrap_or_else(|e| {{ - panic!("JSON was not well-formatted {{:?}}\n\n{{:?}}", e, std::str::from_utf8(&output.stdout)) - }}); - let num_opcodes = &json["programs"][0]["functions"][0]["acir_opcodes"]; - assert_eq!(num_opcodes.as_u64().expect("number of opcodes should fit in a u64"), 0); -}} - -#[test] -fn compile_success_empty_{test_name}() {{ - let test_program_dir = PathBuf::from("{test_dir}"); - let mut cmd = Command::cargo_bin("nargo").unwrap(); - cmd.arg("--program-dir").arg(test_program_dir); - cmd.arg("info"); - cmd.arg("--json"); - cmd.arg("--force"); - - let output = cmd.output().expect("Failed to execute command"); - - if !output.status.success() {{ - panic!("`nargo info` failed with: {{}}", String::from_utf8(output.stderr).unwrap_or_default()); - }} - - // `compile_success_empty` tests should be able to compile down to an empty circuit. - let json: serde_json::Value = serde_json::from_slice(&output.stdout).unwrap_or_else(|e| {{ - panic!("JSON was not well-formatted {{:?}}\n\n{{:?}}", e, std::str::from_utf8(&output.stdout)) - }}); - let num_opcodes = &json["programs"][0]["functions"][0]["acir_opcodes"]; - assert_eq!(num_opcodes.as_u64().expect("number of opcodes should fit in a u64"), 0); -}} - "#, - test_dir = test_dir.display(), - ) - .expect("Could not write templated test file."); + test_type, + &test_name, + &test_dir, + &format!( + r#" + nargo.arg("info").arg("--json").arg("--force"); + + {assert_zero_opcodes}"#, + ), + ); + + if !IGNORED_NEW_FEATURE_TESTS.contains(&test_name.as_str()) { + generate_test_case( + test_file, + test_type, + &format!("legacy_{test_name}"), + &test_dir, + &format!( + r#" + nargo.arg("info").arg("--json").arg("--force").arg("--use-legacy"); + + {assert_zero_opcodes}"#, + ), + ); + } } } fn generate_compile_success_contract_tests(test_file: &mut File, test_data_dir: &Path) { - let test_sub_dir = "compile_success_contract"; - let test_data_dir = test_data_dir.join(test_sub_dir); + let test_type = "compile_success_contract"; + let test_cases = read_test_cases(test_data_dir, test_type); + for (test_name, test_dir) in test_cases { + let test_dir = test_dir.display(); - let test_case_dirs = - fs::read_dir(test_data_dir).unwrap().flatten().filter(|c| c.path().is_dir()); - - for test_dir in test_case_dirs { - let test_name = - test_dir.file_name().into_string().expect("Directory can't be converted to string"); - if test_name.contains('-') { - panic!( - "Invalid test directory: {test_name}. Cannot include `-`, please convert to `_`" - ); - }; - let test_dir = &test_dir.path(); - - write!( + generate_test_case( test_file, + test_type, + &test_name, + &test_dir, r#" -#[test] -fn compile_success_contract_legacy_{test_name}() {{ - let test_program_dir = PathBuf::from("{test_dir}"); - - let mut cmd = Command::cargo_bin("nargo").unwrap(); - cmd.arg("--program-dir").arg(test_program_dir); - cmd.arg("compile").arg("--force").arg("--use-legacy"); + nargo.arg("compile").arg("--force"); + + nargo.assert().success();"#, + ); - cmd.assert().success(); -}} -#[test] -fn compile_success_contract_{test_name}() {{ - let test_program_dir = PathBuf::from("{test_dir}"); - - let mut cmd = Command::cargo_bin("nargo").unwrap(); - cmd.arg("--program-dir").arg(test_program_dir); - cmd.arg("compile").arg("--force"); - - cmd.assert().success(); -}} - "#, - test_dir = test_dir.display(), - ) - .expect("Could not write templated test file."); + generate_test_case( + test_file, + test_type, + &format!("legacy_{test_name}"), + &test_dir, + r#" + nargo.arg("compile").arg("--force").arg("--use-legacy"); + + nargo.assert().success();"#, + ); } } fn generate_compile_failure_tests(test_file: &mut File, test_data_dir: &Path) { - let test_sub_dir = "compile_failure"; - let test_data_dir = test_data_dir.join(test_sub_dir); - - let test_case_dirs = - fs::read_dir(test_data_dir).unwrap().flatten().filter(|c| c.path().is_dir()); + let test_type = "compile_failure"; + let test_cases = read_test_cases(test_data_dir, test_type); + for (test_name, test_dir) in test_cases { + let test_dir = test_dir.display(); - for test_dir in test_case_dirs { - let test_name = - test_dir.file_name().into_string().expect("Directory can't be converted to string"); - if test_name.contains('-') { - panic!( - "Invalid test directory: {test_name}. Cannot include `-`, please convert to `_`" - ); - }; - let test_dir = &test_dir.path(); - - let new_feature_ignored = if IGNORED_NEW_FEATURE_TESTS.contains(&test_name.as_str()) { - "\n#[ignore]" - } else { - "" - }; - - write!( + generate_test_case( test_file, - r#" -#[test]{new_feature_ignored} -fn compile_failure_legacy_{test_name}() {{ - let test_program_dir = PathBuf::from("{test_dir}"); - - let mut cmd = Command::cargo_bin("nargo").unwrap(); - cmd.arg("--program-dir").arg(test_program_dir); - cmd.arg("compile").arg("--force").arg("--use-legacy"); - - cmd.assert().failure().stderr(predicate::str::contains("The application panicked (crashed).").not()); -}} -#[test] -fn compile_failure_{test_name}() {{ - let test_program_dir = PathBuf::from("{test_dir}"); - - let mut cmd = Command::cargo_bin("nargo").unwrap(); - cmd.arg("--program-dir").arg(test_program_dir); - cmd.arg("compile").arg("--force"); - - cmd.assert().failure().stderr(predicate::str::contains("The application panicked (crashed).").not()); -}} - "#, - test_dir = test_dir.display(), - ) - .expect("Could not write templated test file."); + test_type, + &test_name, + &test_dir, + r#"nargo.arg("compile").arg("--force"); + + nargo.assert().failure().stderr(predicate::str::contains("The application panicked (crashed).").not());"#, + ); + + if !IGNORED_NEW_FEATURE_TESTS.contains(&test_name.as_str()) { + generate_test_case( + test_file, + test_type, + &format!("legacy_{test_name}"), + &test_dir, + r#" + nargo.arg("compile").arg("--force").arg("--use-legacy"); + + nargo.assert().failure().stderr(predicate::str::contains("The application panicked (crashed).").not());"#, + ); + } } } diff --git a/tooling/nargo_cli/tests/stdlib-tests.rs b/tooling/nargo_cli/tests/stdlib-tests.rs index 5badd76605a..bf6614860e2 100644 --- a/tooling/nargo_cli/tests/stdlib-tests.rs +++ b/tooling/nargo_cli/tests/stdlib-tests.rs @@ -1,7 +1,6 @@ use std::io::Write; use std::{collections::BTreeMap, path::PathBuf}; -use acvm::blackbox_solver::StubbedBlackBoxSolver; use fm::FileManager; use noirc_driver::{check_crate, compile_no_check, file_manager_with_stdlib, CompileOptions}; use noirc_frontend::hir::FunctionNameMatch; @@ -33,7 +32,7 @@ fn run_stdlib_tests() { let (mut context, dummy_crate_id) = prepare_package(&file_manager, &parsed_files, &dummy_package); - let result = check_crate(&mut context, dummy_crate_id, true, false, false); + let result = check_crate(&mut context, dummy_crate_id, false, false, false); report_errors(result, &context.file_manager, true, false) .expect("Error encountered while compiling standard library"); @@ -56,7 +55,7 @@ fn run_stdlib_tests() { let status = if test_function_has_no_arguments { run_test( - &StubbedBlackBoxSolver, + &bn254_blackbox_solver::Bn254BlackBoxSolver, &mut context, &test_function, false, diff --git a/tooling/nargo_fmt/src/items.rs b/tooling/nargo_fmt/src/items.rs index 7f998f45b59..80b641fd830 100644 --- a/tooling/nargo_fmt/src/items.rs +++ b/tooling/nargo_fmt/src/items.rs @@ -74,7 +74,8 @@ impl<'me, T> Items<'me, T> { let mut different_line = false; let leading = self.visitor.slice(start..end); - let leading_trimmed = leading.trim(); + // Trim any possible whitespace before and after a comma separator + let leading_trimmed = leading.trim().trim_start_matches(',').trim(); let starts_with_block_comment = leading_trimmed.starts_with("/*"); let ends_with_block_comment = leading_trimmed.ends_with("*/"); diff --git a/tooling/nargo_fmt/src/rewrite/expr.rs b/tooling/nargo_fmt/src/rewrite/expr.rs index ec8ac4abec7..015644c15cb 100644 --- a/tooling/nargo_fmt/src/rewrite/expr.rs +++ b/tooling/nargo_fmt/src/rewrite/expr.rs @@ -168,9 +168,7 @@ pub(crate) fn rewrite( format!("{path_string}{turbofish}") } ExpressionKind::Lambda(_) => visitor.slice(span).to_string(), - ExpressionKind::Quote(block, block_span) => { - format!("quote {}", rewrite_block(visitor, block, block_span)) - } + ExpressionKind::Quote(_) => visitor.slice(span).to_string(), ExpressionKind::Comptime(block, block_span) => { format!("comptime {}", rewrite_block(visitor, block, block_span)) } @@ -185,7 +183,6 @@ pub(crate) fn rewrite( format!("$({})", rewrite_sub_expr(visitor, shape, *expr)) } } - ExpressionKind::UnquoteMarker(_) => unreachable!("UnquoteMarker in runtime code"), } } diff --git a/tooling/nargo_fmt/src/rewrite/typ.rs b/tooling/nargo_fmt/src/rewrite/typ.rs index 3eb398346c3..3298ed8ae73 100644 --- a/tooling/nargo_fmt/src/rewrite/typ.rs +++ b/tooling/nargo_fmt/src/rewrite/typ.rs @@ -55,6 +55,10 @@ pub(crate) fn rewrite(visitor: &FmtVisitor, _shape: Shape, typ: UnresolvedType) format!("fn{env}({args}) -> {return_type}") } + UnresolvedTypeData::Resolved(_) => { + unreachable!("Unexpected macro expansion of a type in nargo fmt input") + } + UnresolvedTypeData::Unspecified => todo!(), UnresolvedTypeData::FieldElement | UnresolvedTypeData::Integer(_, _) diff --git a/tooling/nargo_fmt/src/utils.rs b/tooling/nargo_fmt/src/utils.rs index 2c5c3085e66..020f411ae2f 100644 --- a/tooling/nargo_fmt/src/utils.rs +++ b/tooling/nargo_fmt/src/utils.rs @@ -3,7 +3,7 @@ use std::borrow::Cow; use crate::items::HasItem; use crate::rewrite; use crate::visitor::{FmtVisitor, Shape}; -use noirc_frontend::ast::{Expression, Ident, Param, Visibility}; +use noirc_frontend::ast::{Expression, Ident, Param, UnresolvedGeneric, Visibility}; use noirc_frontend::hir::resolution::errors::Span; use noirc_frontend::lexer::Lexer; use noirc_frontend::token::Token; @@ -80,6 +80,7 @@ pub(crate) fn find_comment_end(slice: &str, is_last: bool) -> usize { std::cmp::max(find_comment_end(slice) + block, separator_index + 1) } (_, Some(newline)) if newline > separator_index => newline + 1, + (None, None) => 0, _ => slice.len(), } } else if let Some(newline_index) = newline_index { @@ -170,6 +171,26 @@ impl HasItem for Ident { } } +impl HasItem for UnresolvedGeneric { + fn span(&self) -> Span { + self.span() + } + + fn format(self, visitor: &FmtVisitor, _shape: Shape) -> String { + match self { + UnresolvedGeneric::Variable(_) => visitor.slice(self.span()).into(), + UnresolvedGeneric::Numeric { ident, typ } => { + let mut result = "".to_owned(); + result.push_str(&ident.0.contents); + result.push_str(": "); + let typ = rewrite::typ(visitor, _shape, typ); + result.push_str(&typ); + result + } + } + } +} + pub(crate) fn first_line_width(exprs: &str) -> usize { exprs.lines().next().map_or(0, |line: &str| line.chars().count()) } diff --git a/tooling/nargo_fmt/src/visitor/item.rs b/tooling/nargo_fmt/src/visitor/item.rs index a5d042dc71e..5aaaf20ff47 100644 --- a/tooling/nargo_fmt/src/visitor/item.rs +++ b/tooling/nargo_fmt/src/visitor/item.rs @@ -44,7 +44,7 @@ impl super::FmtVisitor<'_> { if !func.def.generics.is_empty() { let full_span = name_span.end()..params_open; - let start = name_span.end(); + let start = self.span_before(full_span.clone(), Token::Less).start(); let end = self.span_after(full_span, Token::Greater).start(); let generics = func.def.generics; @@ -188,8 +188,8 @@ impl super::FmtVisitor<'_> { continue; } - let slice = - self.slice(self.last_position..impl_.object_type.span.unwrap().end()); + let before_brace = self.span_before(span, Token::LeftBrace).start(); + let slice = self.slice(self.last_position..before_brace).trim(); let after_brace = self.span_after(span, Token::LeftBrace).start(); self.last_position = after_brace; diff --git a/tooling/nargo_fmt/tests/expected/contract.nr b/tooling/nargo_fmt/tests/expected/contract.nr index e3a5877725a..14b1af4a848 100644 --- a/tooling/nargo_fmt/tests/expected/contract.nr +++ b/tooling/nargo_fmt/tests/expected/contract.nr @@ -3,11 +3,11 @@ // Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. // Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum. contract Benchmarking { - use aztec::protocol_types::abis::function_selector::FunctionSelector; + use dep::aztec::protocol_types::abis::function_selector::FunctionSelector; use value_note::{utils::{increment, decrement}, value_note::{VALUE_NOTE_LEN, ValueNote, ValueNoteMethods}}; - use aztec::{ + use dep::aztec::{ context::Context, note::{note_getter_options::NoteGetterOptions, note_header::NoteHeader}, log::emit_unencrypted_log, state_vars::{Map, PublicMutable, PrivateSet}, types::type_serialization::field_serialization::{FieldSerializationMethods, FIELD_SERIALIZED_LEN}, diff --git a/tooling/nargo_fmt/tests/expected/fn.nr b/tooling/nargo_fmt/tests/expected/fn.nr index 3d231cd3f7f..4dde9a1b3ec 100644 --- a/tooling/nargo_fmt/tests/expected/fn.nr +++ b/tooling/nargo_fmt/tests/expected/fn.nr @@ -61,3 +61,11 @@ fn main( ) {} pub fn from_baz(x: [Field; crate::foo::MAGIC_NUMBER]) {} + +fn id(x: [Field; I]) -> [Field; I] {} + +fn id_two(x: [Field; I]) -> [Field; I] {} + +fn whitespace_before_generics(foo: T) {} + +fn more_whitespace_before_generics(foo: T) {} diff --git a/tooling/nargo_fmt/tests/expected/impl.nr b/tooling/nargo_fmt/tests/expected/impl.nr index ec734b57970..3c2fa42837a 100644 --- a/tooling/nargo_fmt/tests/expected/impl.nr +++ b/tooling/nargo_fmt/tests/expected/impl.nr @@ -19,3 +19,9 @@ impl MyType { impl MyType { fn method(self) {} } + +impl MyStruct where T: MyEq { + fn my_eq(self, other: Self) -> bool { + (self.a == other.a) & self.b.my_eq(other.b) + } +} diff --git a/tooling/nargo_fmt/tests/expected/let.nr b/tooling/nargo_fmt/tests/expected/let.nr index 0edc0eaf922..7ff69e74306 100644 --- a/tooling/nargo_fmt/tests/expected/let.nr +++ b/tooling/nargo_fmt/tests/expected/let.nr @@ -51,10 +51,10 @@ fn let_() { let expr = MyExpr { /*A boolean literal (true, false).*/ kind: ExprKind::Bool(true) }; - let mut V = crate2::MyStruct { Q: x }; - let mut V = crate2::MyStruct {}; - let mut V = crate2::MyStruct {/*test*/}; - let mut V = crate2::MyStruct { + let mut V = dep::crate2::MyStruct { Q: x }; + let mut V = dep::crate2::MyStruct {}; + let mut V = dep::crate2::MyStruct {/*test*/}; + let mut V = dep::crate2::MyStruct { // sad }; } diff --git a/tooling/nargo_fmt/tests/input/contract.nr b/tooling/nargo_fmt/tests/input/contract.nr index e3a5877725a..14b1af4a848 100644 --- a/tooling/nargo_fmt/tests/input/contract.nr +++ b/tooling/nargo_fmt/tests/input/contract.nr @@ -3,11 +3,11 @@ // Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. // Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum. contract Benchmarking { - use aztec::protocol_types::abis::function_selector::FunctionSelector; + use dep::aztec::protocol_types::abis::function_selector::FunctionSelector; use value_note::{utils::{increment, decrement}, value_note::{VALUE_NOTE_LEN, ValueNote, ValueNoteMethods}}; - use aztec::{ + use dep::aztec::{ context::Context, note::{note_getter_options::NoteGetterOptions, note_header::NoteHeader}, log::emit_unencrypted_log, state_vars::{Map, PublicMutable, PrivateSet}, types::type_serialization::field_serialization::{FieldSerializationMethods, FIELD_SERIALIZED_LEN}, diff --git a/tooling/nargo_fmt/tests/input/fn.nr b/tooling/nargo_fmt/tests/input/fn.nr index 1c6d201fa39..16ed95a540d 100644 --- a/tooling/nargo_fmt/tests/input/fn.nr +++ b/tooling/nargo_fmt/tests/input/fn.nr @@ -44,3 +44,13 @@ fn main( ) {} pub fn from_baz(x: [Field; crate::foo::MAGIC_NUMBER]) {} + +fn id< T , let I : Field > ( x : [ Field ; I ] ) -> [Field; I ] { } + +fn id_two(x: [Field ; I]) -> [ Field; I] {} + +fn whitespace_before_generics < T > (foo: T) {} + +fn more_whitespace_before_generics < +T > (foo: T) {} diff --git a/tooling/nargo_fmt/tests/input/impl.nr b/tooling/nargo_fmt/tests/input/impl.nr index ea909dfad44..21ce6a2e175 100644 --- a/tooling/nargo_fmt/tests/input/impl.nr +++ b/tooling/nargo_fmt/tests/input/impl.nr @@ -19,3 +19,9 @@ fn method(self) {} impl MyType { fn method(self) {} } + +impl MyStruct where T: MyEq { + fn my_eq(self, other: Self) -> bool { + (self.a == other.a) & self.b.my_eq(other.b) + } +} diff --git a/tooling/nargo_fmt/tests/input/let.nr b/tooling/nargo_fmt/tests/input/let.nr index 16ce0a9d7f1..37cdc6655c7 100644 --- a/tooling/nargo_fmt/tests/input/let.nr +++ b/tooling/nargo_fmt/tests/input/let.nr @@ -26,10 +26,10 @@ kind: ExprKind::Bool(true), let expr = MyExpr {/*A boolean literal (true, false).*/kind: ExprKind::Bool(true),}; - let mut V = crate2::MyStruct { Q: x }; - let mut V = crate2::MyStruct {}; - let mut V = crate2::MyStruct {/*test*/}; - let mut V = crate2::MyStruct { + let mut V = dep::crate2::MyStruct { Q: x }; + let mut V = dep::crate2::MyStruct {}; + let mut V = dep::crate2::MyStruct {/*test*/}; + let mut V = dep::crate2::MyStruct { // sad }; } diff --git a/tooling/noir_js_backend_barretenberg/package.json b/tooling/noir_js_backend_barretenberg/package.json index 2bb142c9fa0..b0c9e85315a 100644 --- a/tooling/noir_js_backend_barretenberg/package.json +++ b/tooling/noir_js_backend_barretenberg/package.json @@ -41,7 +41,7 @@ "lint": "NODE_NO_WARNINGS=1 eslint . --ext .ts --ignore-path ./.eslintignore --max-warnings 0" }, "dependencies": { - "@aztec/bb.js": "0.41.0", + "@aztec/bb.js": "0.43.0", "@noir-lang/types": "workspace:*", "fflate": "^0.8.0" }, diff --git a/tooling/noir_js_backend_barretenberg/src/backend.ts b/tooling/noir_js_backend_barretenberg/src/backend.ts index d07681dd8c1..96c4d13aa61 100644 --- a/tooling/noir_js_backend_barretenberg/src/backend.ts +++ b/tooling/noir_js_backend_barretenberg/src/backend.ts @@ -45,7 +45,11 @@ export class BarretenbergVerifierBackend implements VerifierBackend { const { Barretenberg, RawBuffer, Crs } = await import('@aztec/bb.js'); const api = await Barretenberg.new(this.options); - const [_exact, _total, subgroupSize] = await api.acirGetCircuitSizes(this.acirUncompressedBytecode); + const honkRecursion = false; + const [_exact, _total, subgroupSize] = await api.acirGetCircuitSizes( + this.acirUncompressedBytecode, + honkRecursion, + ); const crs = await Crs.new(subgroupSize + 1); await api.commonInitSlabAllocator(subgroupSize); await api.srsInitSrs(new RawBuffer(crs.getG1Data()), crs.numPoints, new RawBuffer(crs.getG2Data())); diff --git a/tooling/noirc_abi/src/lib.rs b/tooling/noirc_abi/src/lib.rs index b3d80099137..c3e1ade04fa 100644 --- a/tooling/noirc_abi/src/lib.rs +++ b/tooling/noirc_abi/src/lib.rs @@ -171,7 +171,7 @@ pub struct AbiReturnType { pub visibility: AbiVisibility, } -#[derive(Clone, Debug, Serialize, Deserialize)] +#[derive(Clone, Debug, Default, Serialize, Deserialize)] #[cfg_attr(test, derive(arbitrary::Arbitrary))] pub struct Abi { /// An ordered list of the arguments to the program's `main` function, specifying their types and visibility. diff --git a/tooling/noirc_abi_wasm/build.sh b/tooling/noirc_abi_wasm/build.sh index c07d2d8a4c1..16fb26e55db 100755 --- a/tooling/noirc_abi_wasm/build.sh +++ b/tooling/noirc_abi_wasm/build.sh @@ -25,7 +25,7 @@ function run_if_available { require_command jq require_command cargo require_command wasm-bindgen -#require_command wasm-opt +require_command wasm-opt self_path=$(dirname "$(readlink -f "$0")") pname=$(cargo read-manifest | jq -r '.name') diff --git a/tooling/profiler/Cargo.toml b/tooling/profiler/Cargo.toml new file mode 100644 index 00000000000..d33e99f1a4c --- /dev/null +++ b/tooling/profiler/Cargo.toml @@ -0,0 +1,43 @@ +[package] +name = "noir_profiler" +description = "Profiler for noir circuits" +version.workspace = true +authors.workspace = true +edition.workspace = true +license.workspace = true +rust-version.workspace = true +repository.workspace = true + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[[bin]] +name = "noir-profiler" +path = "src/main.rs" + +[dependencies] +color-eyre.workspace = true +clap.workspace = true +noirc_artifacts.workspace = true +nargo.workspace = true +const_format.workspace = true +serde.workspace = true +serde_json.workspace = true +fm.workspace = true +codespan-reporting.workspace = true +inferno = "0.11.19" +im.workspace = true +acir.workspace = true +noirc_errors.workspace = true + +# Logs +tracing-subscriber.workspace = true +tracing-appender = "0.2.3" + +[dev-dependencies] +noirc_abi.workspace = true +noirc_driver.workspace = true +tempfile.workspace = true + +[features] +default = ["bn254"] +bn254 = ["acir/bn254"] diff --git a/tooling/profiler/src/cli/gates_flamegraph_cmd.rs b/tooling/profiler/src/cli/gates_flamegraph_cmd.rs new file mode 100644 index 00000000000..38e7fff5f42 --- /dev/null +++ b/tooling/profiler/src/cli/gates_flamegraph_cmd.rs @@ -0,0 +1,486 @@ +use std::collections::BTreeMap; +use std::io::BufWriter; +use std::path::{Path, PathBuf}; +use std::process::Command; + +use clap::Args; +use codespan_reporting::files::Files; +use color_eyre::eyre::{self, Context}; +use inferno::flamegraph::{from_lines, Options}; +use serde::{Deserialize, Serialize}; + +use acir::circuit::OpcodeLocation; +use nargo::errors::Location; +use noirc_artifacts::debug::DebugArtifact; +use noirc_artifacts::program::ProgramArtifact; +use noirc_errors::reporter::line_and_column_from_span; + +#[derive(Debug, Clone, Args)] +pub(crate) struct GatesFlamegraphCommand { + /// The path to the artifact JSON file + #[clap(long, short)] + artifact_path: String, + + /// Path to the noir backend binary + #[clap(long, short)] + backend_path: String, + + /// The output folder for the flamegraph svg files + #[clap(long, short)] + output: String, +} + +trait GatesProvider { + fn get_gates(&self, artifact_path: &Path) -> eyre::Result; +} + +struct BackendGatesProvider { + backend_path: PathBuf, +} + +impl GatesProvider for BackendGatesProvider { + fn get_gates(&self, artifact_path: &Path) -> eyre::Result { + let backend_gates_response = + Command::new(&self.backend_path).arg("gates").arg("-b").arg(artifact_path).output()?; + + // Parse the backend gates command stdout as json + let backend_gates_response: BackendGatesResponse = + serde_json::from_slice(&backend_gates_response.stdout)?; + Ok(backend_gates_response) + } +} + +trait FlamegraphGenerator { + fn generate_flamegraph<'lines, I: IntoIterator>( + &self, + folded_lines: I, + artifact_name: &str, + function_name: &str, + output_path: &Path, + ) -> eyre::Result<()>; +} + +struct InfernoFlamegraphGenerator {} + +impl FlamegraphGenerator for InfernoFlamegraphGenerator { + fn generate_flamegraph<'lines, I: IntoIterator>( + &self, + folded_lines: I, + artifact_name: &str, + function_name: &str, + output_path: &Path, + ) -> eyre::Result<()> { + let flamegraph_file = std::fs::File::create(output_path)?; + let flamegraph_writer = BufWriter::new(flamegraph_file); + + let mut options = Options::default(); + options.hash = true; + options.deterministic = true; + options.title = format!("{}-{}", artifact_name, function_name); + options.subtitle = Some("Sample = Gate".to_string()); + options.frame_height = 24; + options.color_diffusion = true; + + from_lines(&mut options, folded_lines, flamegraph_writer)?; + + Ok(()) + } +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +struct BackendGatesReport { + acir_opcodes: usize, + circuit_size: usize, + gates_per_opcode: Vec, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +struct BackendGatesResponse { + functions: Vec, +} + +struct FoldedStackItem { + total_gates: usize, + nested_items: BTreeMap, +} + +pub(crate) fn run(args: GatesFlamegraphCommand) -> eyre::Result<()> { + run_with_provider( + &PathBuf::from(args.artifact_path), + &BackendGatesProvider { backend_path: PathBuf::from(args.backend_path) }, + &InfernoFlamegraphGenerator {}, + &PathBuf::from(args.output), + ) +} + +fn run_with_provider( + artifact_path: &Path, + gates_provider: &Provider, + flamegraph_generator: &Generator, + output_path: &Path, +) -> eyre::Result<()> { + let program = + read_program_from_file(artifact_path).context("Error reading program from file")?; + + let backend_gates_response = + gates_provider.get_gates(artifact_path).context("Error querying backend for gates")?; + + let function_names = program.names.clone(); + + let debug_artifact: DebugArtifact = program.into(); + + for (func_idx, (func_gates, func_name)) in + backend_gates_response.functions.into_iter().zip(function_names).enumerate() + { + println!( + "Opcode count: {}, Total gates by opcodes: {}, Circuit size: {}", + func_gates.acir_opcodes, + func_gates.gates_per_opcode.iter().sum::(), + func_gates.circuit_size + ); + + // Create a nested hashmap with the stack items, folding the gates for all the callsites that are equal + let mut folded_stack_items = BTreeMap::new(); + + func_gates.gates_per_opcode.into_iter().enumerate().for_each(|(opcode_index, gates)| { + let call_stack = &debug_artifact.debug_symbols[func_idx] + .locations + .get(&OpcodeLocation::Acir(opcode_index)); + let location_names = if let Some(call_stack) = call_stack { + call_stack + .iter() + .map(|location| location_to_callsite_label(*location, &debug_artifact)) + .collect::>() + } else { + vec!["unknown".to_string()] + }; + + add_locations_to_folded_stack_items(&mut folded_stack_items, location_names, gates); + }); + let folded_lines = to_folded_sorted_lines(&folded_stack_items, Default::default()); + + flamegraph_generator.generate_flamegraph( + folded_lines.iter().map(|as_string| as_string.as_str()), + artifact_path.to_str().unwrap(), + &func_name, + &Path::new(&output_path).join(Path::new(&format!("{}.svg", &func_name))), + )?; + } + + Ok(()) +} + +pub(crate) fn read_program_from_file>( + circuit_path: P, +) -> eyre::Result { + let file_path = circuit_path.as_ref().with_extension("json"); + + let input_string = std::fs::read(file_path)?; + let program = serde_json::from_slice(&input_string)?; + + Ok(program) +} + +fn location_to_callsite_label<'files>( + location: Location, + files: &'files impl Files<'files, FileId = fm::FileId>, +) -> String { + let filename = + Path::new(&files.name(location.file).expect("should have a file path").to_string()) + .file_name() + .map(|os_str| os_str.to_string_lossy().to_string()) + .unwrap_or("invalid_path".to_string()); + let source = files.source(location.file).expect("should have a file source"); + + let code_slice = source + .as_ref() + .chars() + .skip(location.span.start() as usize) + .take(location.span.end() as usize - location.span.start() as usize) + .collect::(); + + // ";" is used for frame separation, and is not allowed by inferno + // Check code slice for ";" and replace it with 'GREEK QUESTION MARK' (U+037E) + let code_slice = code_slice.replace(';', "\u{037E}"); + + let (line, column) = line_and_column_from_span(source.as_ref(), &location.span); + + format!("{}:{}:{}::{}", filename, line, column, code_slice) +} + +fn add_locations_to_folded_stack_items( + stack_items: &mut BTreeMap, + locations: Vec, + gates: usize, +) { + let mut child_map = stack_items; + for (index, location) in locations.iter().enumerate() { + let current_item = child_map + .entry(location.clone()) + .or_insert(FoldedStackItem { total_gates: 0, nested_items: BTreeMap::new() }); + + child_map = &mut current_item.nested_items; + + if index == locations.len() - 1 { + current_item.total_gates += gates; + } + } +} + +/// Creates a vector of lines in the format that inferno expects from a nested hashmap of stack items +/// The lines have to be sorted in the following way, exploring the graph in a depth-first manner: +/// main 100 +/// main::foo 0 +/// main::foo::bar 200 +/// main::baz 27 +/// main::baz::qux 800 +fn to_folded_sorted_lines( + folded_stack_items: &BTreeMap, + parent_stacks: im::Vector, +) -> Vec { + folded_stack_items + .iter() + .flat_map(move |(location, folded_stack_item)| { + let frame_list: Vec = + parent_stacks.iter().cloned().chain(std::iter::once(location.clone())).collect(); + let line: String = + format!("{} {}", frame_list.join(";"), folded_stack_item.total_gates); + + let mut new_parent_stacks = parent_stacks.clone(); + new_parent_stacks.push_back(location.clone()); + + let child_lines: Vec = + to_folded_sorted_lines(&folded_stack_item.nested_items, new_parent_stacks); + + std::iter::once(line).chain(child_lines) + }) + .collect() +} + +#[cfg(test)] +mod tests { + use acir::circuit::{OpcodeLocation, Program}; + use color_eyre::eyre::{self}; + use fm::{FileId, FileManager}; + use noirc_artifacts::program::ProgramArtifact; + use noirc_driver::DebugFile; + use noirc_errors::{ + debug_info::{DebugInfo, ProgramDebugInfo}, + Location, Span, + }; + use std::{ + cell::RefCell, + collections::{BTreeMap, HashMap}, + path::{Path, PathBuf}, + }; + use tempfile::TempDir; + + use super::{BackendGatesReport, BackendGatesResponse, GatesProvider}; + + struct TestGateProvider { + mock_responses: HashMap, + } + + impl GatesProvider for TestGateProvider { + fn get_gates(&self, artifact_path: &std::path::Path) -> eyre::Result { + let response = self + .mock_responses + .get(artifact_path) + .expect("should have a mock response for the artifact path"); + + Ok(response.clone()) + } + } + + #[derive(Default)] + struct TestFlamegraphGenerator { + lines_received: RefCell>>, + } + + impl super::FlamegraphGenerator for TestFlamegraphGenerator { + fn generate_flamegraph<'lines, I: IntoIterator>( + &self, + folded_lines: I, + _artifact_name: &str, + _function_name: &str, + _output_path: &std::path::Path, + ) -> eyre::Result<()> { + let lines = folded_lines.into_iter().map(|line| line.to_string()).collect(); + self.lines_received.borrow_mut().push(lines); + Ok(()) + } + } + + fn find_spans_for(source: &str, needle: &str) -> Vec { + let mut spans = Vec::new(); + let mut start = 0; + while let Some(start_idx) = source[start..].find(needle) { + let start_idx = start + start_idx; + let end_idx = start_idx + needle.len(); + spans.push(Span::inclusive(start_idx as u32, end_idx as u32 - 1)); + start = end_idx; + } + spans + } + + struct TestCase { + expected_folded_sorted_lines: Vec>, + debug_symbols: ProgramDebugInfo, + file_map: BTreeMap, + gates_report: BackendGatesResponse, + } + + fn simple_test_case(temp_dir: &TempDir) -> TestCase { + let source_code = r##" + fn main() { + foo(); + bar(); + whatever(); + } + fn foo() { + baz(); + } + fn bar () { + whatever() + } + fn baz () { + whatever() + } + "##; + + let source_file_name = Path::new("main.nr"); + let mut fm = FileManager::new(temp_dir.path()); + let file_id = fm.add_file_with_source(source_file_name, source_code.to_string()).unwrap(); + + let main_declaration_location = + Location::new(find_spans_for(source_code, "fn main()")[0], file_id); + let main_foo_call_location = + Location::new(find_spans_for(source_code, "foo()")[0], file_id); + let main_bar_call_location = + Location::new(find_spans_for(source_code, "bar()")[0], file_id); + let main_whatever_call_location = + Location::new(find_spans_for(source_code, "whatever()")[0], file_id); + let foo_baz_call_location = Location::new(find_spans_for(source_code, "baz()")[0], file_id); + let bar_whatever_call_location = + Location::new(find_spans_for(source_code, "whatever()")[1], file_id); + let baz_whatever_call_location = + Location::new(find_spans_for(source_code, "whatever()")[2], file_id); + + let mut opcode_locations = BTreeMap::>::new(); + // main::foo::baz::whatever + opcode_locations.insert( + OpcodeLocation::Acir(0), + vec![ + main_declaration_location, + main_foo_call_location, + foo_baz_call_location, + baz_whatever_call_location, + ], + ); + + // main::bar::whatever + opcode_locations.insert( + OpcodeLocation::Acir(1), + vec![main_declaration_location, main_bar_call_location, bar_whatever_call_location], + ); + // main::whatever + opcode_locations.insert( + OpcodeLocation::Acir(2), + vec![main_declaration_location, main_whatever_call_location], + ); + + let file_map = BTreeMap::from_iter(vec![( + file_id, + DebugFile { source: source_code.to_string(), path: source_file_name.to_path_buf() }, + )]); + + let debug_symbols = ProgramDebugInfo { + debug_infos: vec![DebugInfo::new( + opcode_locations, + BTreeMap::default(), + BTreeMap::default(), + BTreeMap::default(), + )], + }; + + let backend_gates_response = BackendGatesResponse { + functions: vec![BackendGatesReport { + acir_opcodes: 3, + circuit_size: 100, + gates_per_opcode: vec![10, 20, 30], + }], + }; + + let expected_folded_sorted_lines = vec![ + "main.nr:2:9::fn main() 0".to_string(), + "main.nr:2:9::fn main();main.nr:3:13::foo() 0".to_string(), + "main.nr:2:9::fn main();main.nr:3:13::foo();main.nr:8:13::baz() 0".to_string(), + "main.nr:2:9::fn main();main.nr:3:13::foo();main.nr:8:13::baz();main.nr:14:13::whatever() 10".to_string(), + "main.nr:2:9::fn main();main.nr:4:13::bar() 0".to_string(), + "main.nr:2:9::fn main();main.nr:4:13::bar();main.nr:11:13::whatever() 20".to_string(), + "main.nr:2:9::fn main();main.nr:5:13::whatever() 30".to_string(), + ]; + + TestCase { + expected_folded_sorted_lines: vec![expected_folded_sorted_lines], + debug_symbols, + file_map, + gates_report: backend_gates_response, + } + } + + #[test] + fn test_flamegraph() { + let temp_dir = tempfile::tempdir().unwrap(); + + let test_cases = vec![simple_test_case(&temp_dir)]; + let artifact_names: Vec<_> = + test_cases.iter().enumerate().map(|(idx, _)| format!("test{}.json", idx)).collect(); + + let test_cases_with_names: Vec<_> = test_cases.into_iter().zip(artifact_names).collect(); + + let mut mock_responses: HashMap = HashMap::new(); + // Collect mock responses + for (test_case, artifact_name) in test_cases_with_names.iter() { + mock_responses.insert( + temp_dir.path().join(artifact_name.clone()), + test_case.gates_report.clone(), + ); + } + + let provider = TestGateProvider { mock_responses }; + + for (test_case, artifact_name) in test_cases_with_names.iter() { + let artifact_path = temp_dir.path().join(artifact_name.clone()); + + let artifact = ProgramArtifact { + noir_version: "0.0.0".to_string(), + hash: 27, + abi: noirc_abi::Abi::default(), + bytecode: Program::default(), + debug_symbols: test_case.debug_symbols.clone(), + file_map: test_case.file_map.clone(), + names: vec!["main".to_string()], + }; + + // Write the artifact to a file + let artifact_file = std::fs::File::create(&artifact_path).unwrap(); + serde_json::to_writer(artifact_file, &artifact).unwrap(); + + let flamegraph_generator = TestFlamegraphGenerator::default(); + + super::run_with_provider( + &artifact_path, + &provider, + &flamegraph_generator, + temp_dir.path(), + ) + .expect("should run without errors"); + + // Check that the flamegraph generator was called with the correct folded sorted lines + let calls_received = flamegraph_generator.lines_received.borrow().clone(); + + assert_eq!(calls_received, test_case.expected_folded_sorted_lines); + } + } +} diff --git a/tooling/profiler/src/cli/mod.rs b/tooling/profiler/src/cli/mod.rs new file mode 100644 index 00000000000..d54a3f6167c --- /dev/null +++ b/tooling/profiler/src/cli/mod.rs @@ -0,0 +1,33 @@ +use clap::{Parser, Subcommand}; +use color_eyre::eyre; +use const_format::formatcp; + +mod gates_flamegraph_cmd; + +const PROFILER_VERSION: &str = env!("CARGO_PKG_VERSION"); + +static VERSION_STRING: &str = formatcp!("version = {}\n", PROFILER_VERSION,); + +#[derive(Parser, Debug)] +#[command(name="Noir profiler", author, version=VERSION_STRING, about, long_about = None)] +struct ProfilerCli { + #[command(subcommand)] + command: GatesFlamegraphCommand, +} + +#[non_exhaustive] +#[derive(Subcommand, Clone, Debug)] +enum GatesFlamegraphCommand { + GatesFlamegraph(gates_flamegraph_cmd::GatesFlamegraphCommand), +} + +pub(crate) fn start_cli() -> eyre::Result<()> { + let ProfilerCli { command } = ProfilerCli::parse(); + + match command { + GatesFlamegraphCommand::GatesFlamegraph(args) => gates_flamegraph_cmd::run(args), + } + .map_err(|err| eyre::eyre!("{}", err))?; + + Ok(()) +} diff --git a/tooling/profiler/src/main.rs b/tooling/profiler/src/main.rs new file mode 100644 index 00000000000..8e08644de23 --- /dev/null +++ b/tooling/profiler/src/main.rs @@ -0,0 +1,35 @@ +#![forbid(unsafe_code)] +#![warn(unreachable_pub)] +#![warn(clippy::semicolon_if_nothing_returned)] +#![cfg_attr(not(test), warn(unused_crate_dependencies, unused_extern_crates))] + +mod cli; + +use std::env; + +use tracing_appender::rolling; +use tracing_subscriber::{fmt::format::FmtSpan, EnvFilter}; + +fn main() { + // Setup tracing + if let Ok(log_dir) = env::var("PROFILER_LOG_DIR") { + let debug_file = rolling::daily(log_dir, "profiler-log"); + tracing_subscriber::fmt() + .with_span_events(FmtSpan::ACTIVE) + .with_writer(debug_file) + .with_ansi(false) + .with_env_filter(EnvFilter::from_default_env()) + .init(); + } else { + tracing_subscriber::fmt() + .with_span_events(FmtSpan::ACTIVE) + .with_ansi(true) + .with_env_filter(EnvFilter::from_env("NOIR_LOG")) + .init(); + } + + if let Err(report) = cli::start_cli() { + eprintln!("{report}"); + std::process::exit(1); + } +} diff --git a/yarn.lock b/yarn.lock index a947ec56aba..73dfbf6e82e 100644 --- a/yarn.lock +++ b/yarn.lock @@ -221,9 +221,9 @@ __metadata: languageName: node linkType: hard -"@aztec/bb.js@npm:0.41.0": - version: 0.41.0 - resolution: "@aztec/bb.js@npm:0.41.0" +"@aztec/bb.js@npm:0.43.0": + version: 0.43.0 + resolution: "@aztec/bb.js@npm:0.43.0" dependencies: comlink: ^4.4.1 commander: ^10.0.1 @@ -231,7 +231,7 @@ __metadata: tslib: ^2.4.0 bin: bb.js: dest/node/main.js - checksum: e5e0095eaff3de45726366726337b131bb6ff7cf2cb53be705572c7d6715dae4c948bf86a03cfad68bc98c0c2d83e64cbe3723cc72260c8dbfa262af8cb81f9b + checksum: 63d2617529e00a05e1ac9364639dc10761e50cb6a16e010ac6354011440de037112a82d7cdd29a65b139af528c7d865b047e157b25d15ac36ff701863d550a5b languageName: node linkType: hard @@ -4396,7 +4396,7 @@ __metadata: version: 0.0.0-use.local resolution: "@noir-lang/backend_barretenberg@workspace:tooling/noir_js_backend_barretenberg" dependencies: - "@aztec/bb.js": 0.41.0 + "@aztec/bb.js": 0.43.0 "@noir-lang/types": "workspace:*" "@types/node": ^20.6.2 "@types/prettier": ^3