diff --git a/crates/nargo_cli/tests/test_data/regression/Nargo.toml b/crates/nargo_cli/tests/test_data/regression/Nargo.toml new file mode 100644 index 0000000000..e0b467ce5d --- /dev/null +++ b/crates/nargo_cli/tests/test_data/regression/Nargo.toml @@ -0,0 +1,5 @@ +[package] +authors = [""] +compiler_version = "0.1" + +[dependencies] \ No newline at end of file diff --git a/crates/nargo_cli/tests/test_data/regression/Prover.toml b/crates/nargo_cli/tests/test_data/regression/Prover.toml new file mode 100644 index 0000000000..2875190982 --- /dev/null +++ b/crates/nargo_cli/tests/test_data/regression/Prover.toml @@ -0,0 +1,2 @@ +x = [0x3f, 0x1c, 0xb8, 0x99, 0xab] +z = 3 diff --git a/crates/nargo_cli/tests/test_data/regression/src/main.nr b/crates/nargo_cli/tests/test_data/regression/src/main.nr new file mode 100644 index 0000000000..7ecc57a480 --- /dev/null +++ b/crates/nargo_cli/tests/test_data/regression/src/main.nr @@ -0,0 +1,56 @@ +global NIBBLE_LENGTH: comptime Field = 16; + +fn compact_decode(input: [u8; N], length: Field) -> ([u4; NIBBLE_LENGTH], Field) +{ + constrain 2*input.len() as u64 <= NIBBLE_LENGTH as u64; + constrain length as u64 <= input.len() as u64; + + let mut nibble = [0 as u4; NIBBLE_LENGTH]; + + let first_nibble = (input[0] >> 4) as u4; + let parity = first_nibble as u1; + + if parity == 1 + { + nibble[0] = (input[0] & 0x0f) as u4; + for i in 1..input.len() + { + if i as u64 < length as u64 + { + let x = input[i]; + nibble[2*i - 1] = (x >> 4) as u4; + nibble[2*i] = (x & 0x0f) as u4; + } + } + } + else + { + for i in 0..2 + { + if (i as u64) < length as u64 - 1 + { + let x = input[i + 1]; + nibble[2*i] = (x >> 4) as u4; + nibble[2*i + 1] = (x & 0x0f) as u4; + } + } + } + + let out = (nibble, 2*length + (parity as Field) - 2); + + out +} + +fn main(x: [u8; 5], z: Field) +{ + //Issue 1144 + let (nib, len) = compact_decode(x,z); + constrain len == 5; + constrain [nib[0], nib[1], nib[2], nib[3], nib[4]] == [15, 1, 12, 11, 8]; +} + +#[test] +fn test_1144() +{ + main([0x3f, 0x1c, 0xb8, 0x99, 0xab], 3); +} \ No newline at end of file diff --git a/crates/noirc_evaluator/src/ssa/acir_gen/internal_var_cache.rs b/crates/noirc_evaluator/src/ssa/acir_gen/internal_var_cache.rs index 70e3fd41e5..fc9f9ae5af 100644 --- a/crates/noirc_evaluator/src/ssa/acir_gen/internal_var_cache.rs +++ b/crates/noirc_evaluator/src/ssa/acir_gen/internal_var_cache.rs @@ -125,8 +125,11 @@ impl InternalVarCache { let w = evaluator.create_intermediate_variable(Expression::from(value)); for &id in ids { let mut cached_var = self.get_or_compute_internal_var_unwrap(id, evaluator, ctx); - assert!(cached_var.cached_witness().is_none()); - cached_var.set_witness(w); + if let Some(cached_witness) = cached_var.cached_witness() { + assert_eq!(*cached_witness, w); + } else { + cached_var.set_witness(w); + } self.update(cached_var); } w diff --git a/crates/noirc_evaluator/src/ssa/block.rs b/crates/noirc_evaluator/src/ssa/block.rs index d0f8b8f085..26340a4a72 100644 --- a/crates/noirc_evaluator/src/ssa/block.rs +++ b/crates/noirc_evaluator/src/ssa/block.rs @@ -528,7 +528,18 @@ pub(super) fn merge_path( removed_blocks.push_back(next); if short_circuit.is_dummy() { - instructions.extend(&block.instructions); + if instructions.is_empty() { + instructions.extend(&block.instructions); + } else { + let nonop = block.instructions.iter().filter(|&i| { + if let Some(ins) = ctx.try_get_instruction(*i) { + ins.operation.opcode() != Opcode::Nop + } else { + true + } + }); + instructions.extend(nonop); + } } if short_circuit.is_dummy() && block.is_short_circuit(ctx, assumption) { diff --git a/crates/noirc_evaluator/src/ssa/context.rs b/crates/noirc_evaluator/src/ssa/context.rs index c7d4dba979..2efdd8ff30 100644 --- a/crates/noirc_evaluator/src/ssa/context.rs +++ b/crates/noirc_evaluator/src/ssa/context.rs @@ -19,6 +19,8 @@ use noirc_frontend::monomorphization::ast::{Definition, Expression, FuncId, Lite use num_bigint::BigUint; use std::collections::{HashMap, HashSet}; +use super::node::Opcode; + // This is a 'master' class for generating the SSA IR from the AST // It contains all the data; the node objects representing the source code in the nodes arena // and The CFG in the blocks arena @@ -726,7 +728,6 @@ impl SsaContext { inline::inline_tree(self, self.first_block, &decision)?; block::merge_path(self, self.first_block, BlockId::dummy(), None)?; - //The CFG is now fully flattened, so we keep only the first block. let mut to_remove = Vec::new(); for b in &self.blocks { @@ -741,7 +742,6 @@ impl SsaContext { self[first_block].dominated.clear(); optimizations::cse(self, first_block, true)?; - //Truncation integer::overflow_strategy(self)?; self.log(enable_logging, "\noverflow:", ""); @@ -1072,6 +1072,12 @@ impl SsaContext { if a == NodeId::dummy() || b == NodeId::dummy() { return NodeId::dummy(); } + if let Some(ins) = self.try_get_instruction(a) { + if ins.operation.opcode() == Opcode::Nop { + assert_eq!(self.try_get_instruction(b).unwrap().operation.opcode(), Opcode::Nop); + return NodeId::dummy(); + } + } let exit_block = self.current_block; let block1 = self[exit_block].predecessor[0]; diff --git a/crates/noirc_evaluator/src/ssa/optimizations.rs b/crates/noirc_evaluator/src/ssa/optimizations.rs index 2e9370961f..8dcc7cfecf 100644 --- a/crates/noirc_evaluator/src/ssa/optimizations.rs +++ b/crates/noirc_evaluator/src/ssa/optimizations.rs @@ -499,6 +499,11 @@ fn cse_block_with_anchor( new_list.push(*ins_id); } } + Operation::Nop => { + if new_list.is_empty() { + new_list.push(*ins_id); + } + } _ => { //TODO: checks we do not need to propagate res arguments new_list.push(*ins_id);