Skip to content

Commit

Permalink
Merge branch 'master' into aztec-packages
Browse files Browse the repository at this point in the history
  • Loading branch information
sirasistant committed Jan 17, 2024
2 parents a4b6635 + 5cdb1d0 commit 0c185c2
Show file tree
Hide file tree
Showing 82 changed files with 1,762 additions and 1,003 deletions.
2 changes: 1 addition & 1 deletion .github/scripts/acvm_js-build.sh
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
#!/bin/bash
set -eu

.github/scripts/install_wasm-bindgen.sh
.github/scripts/wasm-bindgen-install.sh
yarn workspace @noir-lang/acvm_js build
3 changes: 1 addition & 2 deletions .github/scripts/noir-wasm-build.sh
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
#!/bin/bash
set -eu

.github/scripts/noirc-abi-build.sh

.github/scripts/wasm-pack-install.sh
yarn workspace @noir-lang/noir_wasm build
2 changes: 1 addition & 1 deletion .github/scripts/noirc-abi-build.sh
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
#!/bin/bash
set -eu

.github/scripts/install_wasm-bindgen.sh
.github/scripts/wasm-bindgen-install.sh
yarn workspace @noir-lang/noirc_abi build
File renamed without changes.
5 changes: 5 additions & 0 deletions .github/scripts/wasm-pack-install.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
#!/bin/bash
set -eu

curl -L --proto '=https' --tlsv1.2 -sSf https://raw.githubusercontent.com/cargo-bins/cargo-binstall/main/install-from-binstall-release.sh | bash
cargo-binstall wasm-pack --version 0.12.1 -y
20 changes: 19 additions & 1 deletion .github/workflows/docker-test-flow.yml
Original file line number Diff line number Diff line change
Expand Up @@ -124,13 +124,19 @@ jobs:
build-noir-wasm:
name: Build noir wasm
runs-on: ubuntu-latest
needs: [build-base-js]
needs: [build-base-js, build-noirc-abi]
container:
image: ghcr.io/noir-lang/noir:${{ github.sha }}-js
credentials:
username: ${{ github.actor }}
password: ${{ secrets.github_token }}
steps:
- name: Download noirc abi
uses: actions/download-artifact@v4
with:
name: noirc_abi_wasm
path: |
/usr/src/noir/tooling/noirc_abi_wasm
- name: Build
working-directory: /usr/src/noir
run: |
Expand Down Expand Up @@ -160,6 +166,12 @@ jobs:
- name: Prep downloaded artifact
run: |
chmod +x /usr/src/noir/target/release/nargo
- name: Download noirc abi
uses: actions/download-artifact@v4
with:
name: noirc_abi_wasm
path: |
/usr/src/noir/tooling/noirc_abi_wasm
- name: Download noir_wasm artifact
uses: actions/download-artifact@v4
with:
Expand Down Expand Up @@ -188,6 +200,12 @@ jobs:
- name: Prep downloaded artifact
run: |
chmod +x /usr/src/noir/target/release/nargo
- name: Download noirc abi
uses: actions/download-artifact@v4
with:
name: noirc_abi_wasm
path: |
/usr/src/noir/tooling/noirc_abi_wasm
- name: Download noir_wasm artifact
uses: actions/download-artifact@v4
with:
Expand Down
3 changes: 1 addition & 2 deletions acvm-repo/acvm/tests/solver.rs
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,6 @@ use acvm_blackbox_solver::StubbedBlackBoxSolver;
// Reenable these test cases once we move the brillig implementation of inversion down into the acvm stdlib.

#[test]
#[ignore]
fn inversion_brillig_oracle_equivalence() {
// Opcodes below describe the following:
// fn main(x : Field, y : pub Field) {
Expand Down Expand Up @@ -126,7 +125,6 @@ fn inversion_brillig_oracle_equivalence() {
}

#[test]
#[ignore]
fn double_inversion_brillig_oracle() {
// Opcodes below describe the following:
// fn main(x : Field, y : pub Field) {
Expand Down Expand Up @@ -453,6 +451,7 @@ fn brillig_oracle_predicate() {
// ACVM should be able to be finalized in `Solved` state.
acvm.finalize();
}

#[test]
fn unsatisfied_opcode_resolved() {
let a = Witness(0);
Expand Down
10 changes: 5 additions & 5 deletions compiler/fm/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -88,15 +88,15 @@ impl FileManager {
assert!(old_value.is_none(), "ice: the same path was inserted into the file manager twice");
}

pub fn fetch_file(&self, file_id: FileId) -> &str {
pub fn fetch_file(&self, file_id: FileId) -> Option<&str> {
// Unwrap as we ensure that all file_id's map to a corresponding file in the file map
self.file_map.get_file(file_id).unwrap().source()
self.file_map.get_file(file_id).map(|file| file.source())
}

pub fn path(&self, file_id: FileId) -> &Path {
pub fn path(&self, file_id: FileId) -> Option<&Path> {
// Unwrap as we ensure that all file_ids are created by the file manager
// So all file_ids will points to a corresponding path
self.id_to_path.get(&file_id).unwrap().as_path()
self.id_to_path.get(&file_id).map(|path| path.as_path())
}

// TODO: This should accept a &Path instead of a PathBuf
Expand Down Expand Up @@ -204,7 +204,7 @@ mod tests {

let file_id = fm.add_file_with_source(file_name, "fn foo() {}".to_string()).unwrap();

assert!(fm.path(file_id).ends_with("foo.nr"));
assert!(fm.path(file_id).unwrap().ends_with("foo.nr"));
}

/// Tests that two identical files that have different paths are treated as the same file
Expand Down
8 changes: 3 additions & 5 deletions compiler/noirc_driver/src/debug.rs
Original file line number Diff line number Diff line change
Expand Up @@ -31,14 +31,12 @@ pub(crate) fn filter_relevant_files(
let mut file_map = BTreeMap::new();

for file_id in files_with_debug_symbols {
let file_source = file_manager.fetch_file(file_id);
let file_path = file_manager.path(file_id).expect("file should exist");
let file_source = file_manager.fetch_file(file_id).expect("file should exist");

file_map.insert(
file_id,
DebugFile {
source: file_source.to_string(),
path: file_manager.path(file_id).to_path_buf(),
},
DebugFile { source: file_source.to_string(), path: file_path.to_path_buf() },
);
}
file_map
Expand Down
10 changes: 7 additions & 3 deletions compiler/noirc_driver/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,10 @@ pub const NOIR_ARTIFACT_VERSION_STRING: &str =

#[derive(Args, Clone, Debug, Default, Serialize, Deserialize)]
pub struct CompileOptions {
/// Force a full recompilation.
#[arg(long = "force")]
pub force_compile: bool,

/// Emit debug information for the intermediate SSA IR
#[arg(long, hide = true)]
pub show_ssa: bool,
Expand Down Expand Up @@ -206,7 +210,6 @@ pub fn compile_main(
crate_id: CrateId,
options: &CompileOptions,
cached_program: Option<CompiledProgram>,
force_compile: bool,
) -> CompilationResult<CompiledProgram> {
let (_, mut warnings) =
check_crate(context, crate_id, options.deny_warnings, options.disable_macros)?;
Expand All @@ -220,8 +223,9 @@ pub fn compile_main(
vec![err]
})?;

let compiled_program = compile_no_check(context, options, main, cached_program, force_compile)
.map_err(FileDiagnostic::from)?;
let compiled_program =
compile_no_check(context, options, main, cached_program, options.force_compile)
.map_err(FileDiagnostic::from)?;
let compilation_warnings = vecmap(compiled_program.warnings.clone(), FileDiagnostic::from);
if options.deny_warnings && !compilation_warnings.is_empty() {
return Err(compilation_warnings);
Expand Down
5 changes: 4 additions & 1 deletion compiler/noirc_evaluator/src/errors.rs
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,8 @@ pub enum RuntimeError {
UnknownLoopBound { call_stack: CallStack },
#[error("Argument is not constant")]
AssertConstantFailed { call_stack: CallStack },
#[error("Nested slices are not supported")]
NestedSlice { call_stack: CallStack },
}

// We avoid showing the actual lhs and rhs since most of the time they are just 0
Expand Down Expand Up @@ -129,7 +131,8 @@ impl RuntimeError {
| RuntimeError::UnknownLoopBound { call_stack }
| RuntimeError::AssertConstantFailed { call_stack }
| RuntimeError::IntegerOutOfBounds { call_stack, .. }
| RuntimeError::UnsupportedIntegerSize { call_stack, .. } => call_stack,
| RuntimeError::UnsupportedIntegerSize { call_stack, .. }
| RuntimeError::NestedSlice { call_stack, .. } => call_stack,
}
}
}
Expand Down
37 changes: 19 additions & 18 deletions compiler/noirc_evaluator/src/ssa/ir/instruction/call.rs
Original file line number Diff line number Diff line change
Expand Up @@ -47,13 +47,8 @@ pub(super) fn simplify_call(
let field = constant_args[0];
let limb_count = constant_args[1].to_u128() as u32;

let result_slice = constant_to_radix(endian, field, 2, limb_count, dfg);

let length = dfg
.try_get_array_length(result_slice)
.expect("ICE: a constant array should have an associated length");
let len_value =
dfg.make_constant(FieldElement::from(length as u128), Type::field());
let (len_value, result_slice) =
constant_to_radix(endian, field, 2, limb_count, dfg);

// `Intrinsic::ToBits` returns slices which are represented
// by tuples with the structure (length, slice contents)
Expand All @@ -68,13 +63,8 @@ pub(super) fn simplify_call(
let radix = constant_args[1].to_u128() as u32;
let limb_count = constant_args[2].to_u128() as u32;

let result_slice = constant_to_radix(endian, field, radix, limb_count, dfg);

let length = dfg
.try_get_array_length(result_slice)
.expect("ICE: a constant array should have an associated length");
let len_value =
dfg.make_constant(FieldElement::from(length as u128), Type::field());
let (len_value, result_slice) =
constant_to_radix(endian, field, radix, limb_count, dfg);

// `Intrinsic::ToRadix` returns slices which are represented
// by tuples with the structure (length, slice contents)
Expand Down Expand Up @@ -468,14 +458,26 @@ fn make_constant_array(dfg: &mut DataFlowGraph, results: Vec<FieldElement>, typ:
dfg.make_array(result_constants.into(), typ)
}

/// Returns a Value::Array of constants corresponding to the limbs of the radix decomposition.
fn make_constant_slice(
dfg: &mut DataFlowGraph,
results: Vec<FieldElement>,
typ: Type,
) -> (ValueId, ValueId) {
let result_constants = vecmap(results, |element| dfg.make_constant(element, typ.clone()));

let typ = Type::Slice(Rc::new(vec![typ]));
let length = FieldElement::from(result_constants.len() as u128);
(dfg.make_constant(length, Type::field()), dfg.make_array(result_constants.into(), typ))
}

/// Returns a slice (represented by a tuple (len, slice)) of constants corresponding to the limbs of the radix decomposition.
fn constant_to_radix(
endian: Endian,
field: FieldElement,
radix: u32,
limb_count: u32,
dfg: &mut DataFlowGraph,
) -> ValueId {
) -> (ValueId, ValueId) {
let bit_size = u32::BITS - (radix - 1).leading_zeros();
let radix_big = BigUint::from(radix);
assert_eq!(BigUint::from(2u128).pow(bit_size), radix_big, "ICE: Radix must be a power of 2");
Expand All @@ -490,8 +492,7 @@ fn constant_to_radix(
if endian == Endian::Big {
limbs.reverse();
}

make_constant_array(dfg, limbs, Type::unsigned(bit_size))
make_constant_slice(dfg, limbs, Type::unsigned(bit_size))
}

fn to_u8_vec(dfg: &DataFlowGraph, values: im::Vector<Id<Value>>) -> Vec<u8> {
Expand Down
20 changes: 17 additions & 3 deletions compiler/noirc_evaluator/src/ssa/ssa_gen/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -187,12 +187,14 @@ impl<'a> FunctionContext<'a> {

let typ = Self::convert_type(&array.typ).flatten();
Ok(match array.typ {
ast::Type::Array(_, _) => self.codegen_array(elements, typ[0].clone()),
ast::Type::Array(_, _) => {
self.codegen_array_checked(elements, typ[0].clone())?
}
ast::Type::Slice(_) => {
let slice_length =
self.builder.field_constant(array.contents.len() as u128);

let slice_contents = self.codegen_array(elements, typ[1].clone());
let slice_contents =
self.codegen_array_checked(elements, typ[1].clone())?;
Tree::Branch(vec![slice_length.into(), slice_contents])
}
_ => unreachable!(
Expand Down Expand Up @@ -231,6 +233,18 @@ impl<'a> FunctionContext<'a> {
self.codegen_array(elements, typ)
}

// Codegen an array but make sure that we do not have a nested slice
fn codegen_array_checked(
&mut self,
elements: Vec<Values>,
typ: Type,
) -> Result<Values, RuntimeError> {
if typ.is_nested_slice() {
return Err(RuntimeError::NestedSlice { call_stack: self.builder.get_call_stack() });
}
Ok(self.codegen_array(elements, typ))
}

/// Codegen an array by allocating enough space for each element and inserting separate
/// store instructions until each element is stored. The store instructions will be separated
/// by add instructions to calculate the new offset address to store to next.
Expand Down
Loading

0 comments on commit 0c185c2

Please sign in to comment.