diff --git a/noir/.github/actions/setup/action.yml b/noir/.github/actions/setup/action.yml index 8e24b6738a9..b265a63d29a 100644 --- a/noir/.github/actions/setup/action.yml +++ b/noir/.github/actions/setup/action.yml @@ -4,7 +4,7 @@ description: Installs the workspace's yarn dependencies and caches them runs: using: composite steps: - - uses: actions/setup-node@v3 + - uses: actions/setup-node@v4 id: node with: node-version: 18.17.1 diff --git a/noir/.github/workflows/docs-dead-links.yml b/noir/.github/workflows/docs-dead-links.yml index ffb18fa0eb2..40e948fe2c1 100644 --- a/noir/.github/workflows/docs-dead-links.yml +++ b/noir/.github/workflows/docs-dead-links.yml @@ -29,7 +29,7 @@ jobs: env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} WORKFLOW_NAME: ${{ github.workflow }} - WORKFLOW_URL: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}/job/${{ github.job }} + WORKFLOW_URL: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }} with: update_existing: true filename: .github/DEAD_LINKS_IN_DOCS.md diff --git a/noir/.github/workflows/docs-pr.yml b/noir/.github/workflows/docs-pr.yml index a16487a49ef..f4a1be826a8 100644 --- a/noir/.github/workflows/docs-pr.yml +++ b/noir/.github/workflows/docs-pr.yml @@ -11,7 +11,7 @@ jobs: steps: - name: Check if label is present id: check-labels - uses: actions/github-script@v3 + uses: actions/github-script@v7.0.1 with: github-token: ${{ secrets.GITHUB_TOKEN }} script: | @@ -21,10 +21,11 @@ jobs: } // Fetch the list of files changed in the PR - const { data: files } = await github.pulls.listFiles({ + const { data: files } = await github.rest.pulls.listFiles({ owner: context.repo.owner, repo: context.repo.repo, - pull_number: context.issue.number + pull_number: context.issue.number, + per_page: 100 }); // Check if any file is within the 'docs' folder @@ -33,13 +34,13 @@ jobs: - name: Add label if not present if: steps.check-labels.outputs.result == 'true' - uses: actions/github-script@v3 + uses: actions/github-script@v7.0.1 with: github-token: ${{ secrets.GITHUB_TOKEN }} script: | const labels = context.payload.pull_request.labels.map(label => label.name); if (!labels.includes('documentation')) { - github.issues.addLabels({ + github.rest.issues.addLabels({ owner: context.repo.owner, repo: context.repo.repo, issue_number: context.issue.number, @@ -47,20 +48,14 @@ jobs: }) } - build_and_deploy_preview: + build_preview: runs-on: ubuntu-latest - permissions: - pull-requests: write - needs: add_label - if: needs.add_label.outputs.has_label == 'true' steps: - name: Checkout code uses: actions/checkout@v4 - - name: Setup Node.js - uses: actions/setup-node@v2 - with: - node-version: '18' + - name: Install Yarn dependencies + uses: ./.github/actions/setup - name: Install wasm-bindgen-cli uses: taiki-e/install-action@v2 @@ -71,13 +66,34 @@ jobs: run: | npm i wasm-opt -g - - name: Install Yarn dependencies - uses: ./.github/actions/setup - - name: Build docs run: - yarn workspaces foreach -Rt run build + yarn workspaces foreach -Rpt --from docs run build + + - name: Upload artifact + uses: actions/upload-artifact@v3 + with: + name: docs + path: ./docs/build/ + retention-days: 3 + + deploy_preview: + needs: [build_preview, add_label] + runs-on: ubuntu-latest + permissions: + pull-requests: write + if: needs.add_label.outputs.has_label == 'true' + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Download built docs + uses: actions/download-artifact@v3 + with: + name: docs + path: ./docs/build + - name: Deploy to Netlify uses: nwtgck/actions-netlify@v2.1 with: diff --git a/noir/.github/workflows/publish-docs.yml b/noir/.github/workflows/publish-docs.yml index 07b39d7627c..231b57550c9 100644 --- a/noir/.github/workflows/publish-docs.yml +++ b/noir/.github/workflows/publish-docs.yml @@ -15,10 +15,8 @@ jobs: - name: Checkout release branch uses: actions/checkout@v4 - - name: Setup Node.js - uses: actions/setup-node@v2 - with: - node-version: '18' + - name: Install Yarn dependencies + uses: ./.github/actions/setup - name: Install wasm-bindgen-cli uses: taiki-e/install-action@v2 @@ -29,9 +27,6 @@ jobs: run: | npm i wasm-opt -g - - name: Install Yarn dependencies - uses: ./.github/actions/setup - - name: Build docs for deploying working-directory: docs run: diff --git a/noir/.github/workflows/release.yml b/noir/.github/workflows/release.yml index f9f6fe2fc54..22a733b38c5 100644 --- a/noir/.github/workflows/release.yml +++ b/noir/.github/workflows/release.yml @@ -44,6 +44,15 @@ jobs: run: | cargo update --workspace + - uses: actions/setup-node@v3 + with: + node-version: 18.17.1 + cache: 'yarn' + cache-dependency-path: 'yarn.lock' + + - name: Update yarn.lock + run: yarn + - name: Configure git run: | git config user.name kevaundray @@ -68,11 +77,6 @@ jobs: ref: ${{ fromJSON(needs.release-please.outputs.release-pr).headBranchName }} token: ${{ secrets.NOIR_RELEASES_TOKEN }} - - name: Setup Node.js - uses: actions/setup-node@v2 - with: - node-version: '18' - - name: Install Yarn dependencies uses: ./.github/actions/setup diff --git a/noir/.gitrepo b/noir/.gitrepo index 5fcea33359c..b37742621ff 100644 --- a/noir/.gitrepo +++ b/noir/.gitrepo @@ -6,7 +6,7 @@ [subrepo] remote = https://github.com/noir-lang/noir branch = aztec-packages - commit = 13f93d523342daf478e08e8ccc0f00962c7fbe05 - parent = 41ae75cdee6285729551965972e8cb039ff3045a + commit = 602f23f4fb698cf6e37071936a2a46593a998d08 + parent = 7c076653169771223a378f6c01bd9d3e3aafb682 method = merge cmdver = 0.4.6 diff --git a/noir/.release-please-manifest.json b/noir/.release-please-manifest.json index 01f6fb140b1..f440a7a2c51 100644 --- a/noir/.release-please-manifest.json +++ b/noir/.release-please-manifest.json @@ -1,4 +1,4 @@ { - ".": "0.22.0", - "acvm-repo": "0.38.0" + ".": "0.23.0", + "acvm-repo": "0.39.0" } \ No newline at end of file diff --git a/noir/CHANGELOG.md b/noir/CHANGELOG.md index 3fc044076a0..af7eb5b2f19 100644 --- a/noir/CHANGELOG.md +++ b/noir/CHANGELOG.md @@ -1,5 +1,97 @@ # Changelog +## [0.23.0](https://github.com/noir-lang/noir/compare/v0.22.0...v0.23.0) (2024-01-22) + + +### ⚠ BREAKING CHANGES + +* Ban nested slices ([#4018](https://github.com/noir-lang/noir/issues/4018)) +* Breaking changes from aztec-packages ([#3955](https://github.com/noir-lang/noir/issues/3955)) +* Rename Arithmetic opcode to AssertZero ([#3840](https://github.com/noir-lang/noir/issues/3840)) +* remove circuit methods from noir_wasm ([#3869](https://github.com/noir-lang/noir/issues/3869)) + +### Features + +* Add `assert_max_bit_size` method to `Field` ([#4016](https://github.com/noir-lang/noir/issues/4016)) ([bc9a44f](https://github.com/noir-lang/noir/commit/bc9a44f285e0569825a307b06ee8acd93461c87e)) +* Add `noir-compiler` checks to `aztec_macros` ([#4031](https://github.com/noir-lang/noir/issues/4031)) ([420a5c7](https://github.com/noir-lang/noir/commit/420a5c74a14dcfeede04337a42282093a7b5e63e)) +* Add a `--force` flag to force a full recompile ([#4054](https://github.com/noir-lang/noir/issues/4054)) ([27a8e68](https://github.com/noir-lang/noir/commit/27a8e6864643d81d96e84990e2e26cd16596a695)) +* Add dependency resolver for `noir_wasm` and implement `FileManager` for consistency with native interface ([#3891](https://github.com/noir-lang/noir/issues/3891)) ([c29c7d7](https://github.com/noir-lang/noir/commit/c29c7d7c9615b9f45c696b1bdc1c497d55469dfa)) +* Add foreign call support to `noir_codegen` functions ([#3933](https://github.com/noir-lang/noir/issues/3933)) ([e5e52a8](https://github.com/noir-lang/noir/commit/e5e52a81b31d7735b680e97a9bef89a010a99763)) +* Add MVP `nargo export` command ([#3870](https://github.com/noir-lang/noir/issues/3870)) ([fbb51ed](https://github.com/noir-lang/noir/commit/fbb51ed33e9e4d9105d8946cdfc4ea387c85258e)) +* Add support for codegenning multiple functions which use the same structs in their interface ([#3868](https://github.com/noir-lang/noir/issues/3868)) ([1dcfcc5](https://github.com/noir-lang/noir/commit/1dcfcc5265f618685a783504b1d4be213e4cda2d)) +* Added efficient field comparisons for bn254 ([#4042](https://github.com/noir-lang/noir/issues/4042)) ([1f9cad0](https://github.com/noir-lang/noir/commit/1f9cad00c57ea257f57419d2446a46938beb19f9)) +* Assert maximum bit size when creating a U128 from an integer ([#4024](https://github.com/noir-lang/noir/issues/4024)) ([8f9c7e4](https://github.com/noir-lang/noir/commit/8f9c7e4de9f2ae5b39714d8e0d26b2befcd11c4a)) +* Avoid unnecessary range checks by inspecting instructions for casts ([#4039](https://github.com/noir-lang/noir/issues/4039)) ([378c18e](https://github.com/noir-lang/noir/commit/378c18eb42d75852b97f849d05c9e3f650601339)) +* Breaking changes from aztec-packages ([#3955](https://github.com/noir-lang/noir/issues/3955)) ([5be049e](https://github.com/noir-lang/noir/commit/5be049eee6c342649462282ee04f6411e6ea392c)) +* Bubble up `Instruction::Constrain`s to be applied as early as possible. ([#4065](https://github.com/noir-lang/noir/issues/4065)) ([66f5cdd](https://github.com/noir-lang/noir/commit/66f5cddc133ba0311028eba96c0ff6ec2ecaee59)) +* Cached LSP parsing ([#4083](https://github.com/noir-lang/noir/issues/4083)) ([b4f724e](https://github.com/noir-lang/noir/commit/b4f724e848b291a733e417c394ac3fc7649c08c5)) +* Comparison for signed integers ([#3873](https://github.com/noir-lang/noir/issues/3873)) ([bcbd49b](https://github.com/noir-lang/noir/commit/bcbd49b8b44749e149f83c1240094fa2f0a19087)) +* Decompose `Instruction::Cast` to have an explicit truncation instruction ([#3946](https://github.com/noir-lang/noir/issues/3946)) ([35f18ef](https://github.com/noir-lang/noir/commit/35f18ef4d7c8041e3cf622a5643748d0793c2aa6)) +* Decompose `Instruction::Constrain` into multiple more basic constraints ([#3892](https://github.com/noir-lang/noir/issues/3892)) ([51cf9d3](https://github.com/noir-lang/noir/commit/51cf9d37c8b9fbb14bb54b178d93129a7563e131)) +* Docker testing flow ([#3895](https://github.com/noir-lang/noir/issues/3895)) ([179c90d](https://github.com/noir-lang/noir/commit/179c90dc3263c85de105c57925d9c5894427e8e1)) +* Extract parsing to its own pass and do it in parallel ([#4063](https://github.com/noir-lang/noir/issues/4063)) ([569cbbc](https://github.com/noir-lang/noir/commit/569cbbc231a242c32821cba56f3649f3228a1cc7)) +* Implement `Eq` trait on curve points ([#3944](https://github.com/noir-lang/noir/issues/3944)) ([abf751a](https://github.com/noir-lang/noir/commit/abf751ab7f57f87520be16b2bc6168efdf95a430)) +* Implement DAP protocol in Nargo ([#3627](https://github.com/noir-lang/noir/issues/3627)) ([13834d4](https://github.com/noir-lang/noir/commit/13834d43bd876909cb50494a41b42297f7e6375b)) +* Implement generic traits ([#4000](https://github.com/noir-lang/noir/issues/4000)) ([916fd15](https://github.com/noir-lang/noir/commit/916fd158aa361ac80d32767f575ad896c3462b15)) +* Implement Operator Overloading ([#3931](https://github.com/noir-lang/noir/issues/3931)) ([4b16090](https://github.com/noir-lang/noir/commit/4b16090beecd0fcdd41c9e7b8f615c4625c26a5b)) +* **lsp:** Cache definitions for goto requests ([#3930](https://github.com/noir-lang/noir/issues/3930)) ([4a2140f](https://github.com/noir-lang/noir/commit/4a2140f1f36bbe3afbc006f8db74820308ae27d5)) +* **lsp:** Goto global ([#4043](https://github.com/noir-lang/noir/issues/4043)) ([15237b3](https://github.com/noir-lang/noir/commit/15237b34dbce5ea54973a178449e67cca8ac4f9d)) +* **lsp:** Goto struct member inside Impl method ([#3918](https://github.com/noir-lang/noir/issues/3918)) ([99c2c5a](https://github.com/noir-lang/noir/commit/99c2c5a2c2c0da6bad783b60d9e3de8d9a1f4ee4)) +* **lsp:** Goto trait from trait impl ([#3956](https://github.com/noir-lang/noir/issues/3956)) ([eb566e2](https://github.com/noir-lang/noir/commit/eb566e2125e847a3e3efbd2bc15a88a1c454a7df)) +* **lsp:** Goto trait method declaration ([#3991](https://github.com/noir-lang/noir/issues/3991)) ([eb79166](https://github.com/noir-lang/noir/commit/eb79166f7d2b7aa45c9c6c0aa37db1c0a5dfa00f)) +* **lsp:** Goto type alias ([#4061](https://github.com/noir-lang/noir/issues/4061)) ([dc83385](https://github.com/noir-lang/noir/commit/dc83385e9fe5766cd8218265be38c54243cae76e)) +* **lsp:** Goto type definition ([#4029](https://github.com/noir-lang/noir/issues/4029)) ([8bb4ddf](https://github.com/noir-lang/noir/commit/8bb4ddfdd81d491ff713a056a7eae522f329d173)) +* **lsp:** Re-add code lens feature with improved performance ([#3829](https://github.com/noir-lang/noir/issues/3829)) ([8f5cd6c](https://github.com/noir-lang/noir/commit/8f5cd6c0b641b3970bf626e8910b2a4c7cc8c310)) +* Optimize array ops for arrays of structs ([#4027](https://github.com/noir-lang/noir/issues/4027)) ([c9ec0d8](https://github.com/noir-lang/noir/commit/c9ec0d811ddc8653201ed765b51585a7c1b946fb)) +* Optimize logic gate ACIR-gen ([#3897](https://github.com/noir-lang/noir/issues/3897)) ([926460a](https://github.com/noir-lang/noir/commit/926460a0c70e21e2f4720148cf424e44ab9b0678)) +* Prefer `AcirContext`-native methods for performing logic operations ([#3898](https://github.com/noir-lang/noir/issues/3898)) ([0ec39b8](https://github.com/noir-lang/noir/commit/0ec39b8396084ed1e7f20609c8ad8a5844a86674)) +* Remove range constraints from witnesses which are constrained to be constants ([#3928](https://github.com/noir-lang/noir/issues/3928)) ([afe9c7a](https://github.com/noir-lang/noir/commit/afe9c7a38bb9d4245205d3aa46d4ce23d70a5671)) +* Remove truncation from brillig casts ([#3997](https://github.com/noir-lang/noir/issues/3997)) ([857ff97](https://github.com/noir-lang/noir/commit/857ff97b196174a0999f0fe7e387bfca5c3b7cd3)) +* Remove truncations which can be seen to be noops using type information ([#3953](https://github.com/noir-lang/noir/issues/3953)) ([cc3c2c2](https://github.com/noir-lang/noir/commit/cc3c2c22644f0b5d8369bad2362ea6e9112a0713)) +* Remove unnecessary predicate from `Lt` instruction ([#3922](https://github.com/noir-lang/noir/issues/3922)) ([a63433f](https://github.com/noir-lang/noir/commit/a63433fb8747722ec3cf2c6eb85d34e5b04bc15c)) +* Simplify chains of casts to be all in terms of the original `ValueId` ([#3984](https://github.com/noir-lang/noir/issues/3984)) ([2384d3e](https://github.com/noir-lang/noir/commit/2384d3e97af24a8718fbf57f6b276a5ce1de06fe)) +* Simplify multiplications by `0` or `1` in ACIR gen ([#3924](https://github.com/noir-lang/noir/issues/3924)) ([e58844d](https://github.com/noir-lang/noir/commit/e58844daf9f040626a3a7595f8c4f831e48a4037)) +* Support for u128 ([#3913](https://github.com/noir-lang/noir/issues/3913)) ([b4911dc](https://github.com/noir-lang/noir/commit/b4911dcf676f0925ac631ba6f60fc9c4945b2fee)) +* Support printing more types ([#4071](https://github.com/noir-lang/noir/issues/4071)) ([f5c4632](https://github.com/noir-lang/noir/commit/f5c4632e174beba508e1e31d0e2ae3f6d028ae2c)) +* Sync `aztec-packages` ([#4011](https://github.com/noir-lang/noir/issues/4011)) ([fee2452](https://github.com/noir-lang/noir/commit/fee24523c427c27f0bdaf98ea09a852a2da3e94c)) +* Sync commits from `aztec-packages` ([#4068](https://github.com/noir-lang/noir/issues/4068)) ([7a8f3a3](https://github.com/noir-lang/noir/commit/7a8f3a33b57875e681e3d81e667e3570a1cdbdcc)) +* Use singleton `WasmBlackBoxFunctionSolver` in `noir_js` ([#3966](https://github.com/noir-lang/noir/issues/3966)) ([10b28de](https://github.com/noir-lang/noir/commit/10b28def4d74822b7af2c19a1cc693788272b00b)) + + +### Bug Fixes + +* Acir gen doesn't panic on unsupported BB function ([#3866](https://github.com/noir-lang/noir/issues/3866)) ([34fd978](https://github.com/noir-lang/noir/commit/34fd978d206789a9e9f5167bfd690a34386834d0)) +* Allow abi encoding arrays of structs from JS ([#3867](https://github.com/noir-lang/noir/issues/3867)) ([9b713f8](https://github.com/noir-lang/noir/commit/9b713f8cf599df262a12ec1098136c50b2b46766)) +* Allow abi encoding tuples from JS ([#3894](https://github.com/noir-lang/noir/issues/3894)) ([f7fa181](https://github.com/noir-lang/noir/commit/f7fa1811ad2591020c914976f26e2f11a91cd177)) +* Allow ast when macro errors ([#4005](https://github.com/noir-lang/noir/issues/4005)) ([efccec3](https://github.com/noir-lang/noir/commit/efccec3c24eb093fba99b1c29f01a78aae5776d0)) +* Allow lsp to run inside of a docker container ([#3876](https://github.com/noir-lang/noir/issues/3876)) ([2529977](https://github.com/noir-lang/noir/commit/2529977acd684219f57ef086415557cc07af043b)) +* Bit-shifts for signed integers ([#3890](https://github.com/noir-lang/noir/issues/3890)) ([6ddd98a](https://github.com/noir-lang/noir/commit/6ddd98ab7d3fefde491cf12b785f76bf0585609e)) +* Checks for cyclic dependencies ([#3699](https://github.com/noir-lang/noir/issues/3699)) ([642011a](https://github.com/noir-lang/noir/commit/642011ab6ebbe8f012eda1da1abbf8660500723d)) +* **debugger:** Crash when stepping through locations spanning multiple lines ([#3920](https://github.com/noir-lang/noir/issues/3920)) ([223e860](https://github.com/noir-lang/noir/commit/223e860975c2698bd5043340b937de74552ec15b)) +* Don't fail if no tests and the user didn't provide a pattern ([#3864](https://github.com/noir-lang/noir/issues/3864)) ([decbd0f](https://github.com/noir-lang/noir/commit/decbd0f0c019844cd2b235e7804d2f6ba7b23897)) +* Fix advisory issue in cargo-deny ([#4077](https://github.com/noir-lang/noir/issues/4077)) ([19baea0](https://github.com/noir-lang/noir/commit/19baea0d18e2d26bd04b649f79dd8e681488d1dc)) +* Fixing dark mode background on the CTA button ([#3882](https://github.com/noir-lang/noir/issues/3882)) ([57eae42](https://github.com/noir-lang/noir/commit/57eae42080d6a928e8010c6bc77489964a5777ef)) +* Fixup exports from `noir_wasm` ([#4022](https://github.com/noir-lang/noir/issues/4022)) ([358cdd2](https://github.com/noir-lang/noir/commit/358cdd2725444091b3322c47754e3cbd9b1d3614)) +* Handle multiple imports in the same file ([#3903](https://github.com/noir-lang/noir/issues/3903)) ([219423e](https://github.com/noir-lang/noir/commit/219423eb87fa12bd8cca2a6fd2ce4c06e308783c)) +* Hoist constraints on inputs to top of program ([#4076](https://github.com/noir-lang/noir/issues/4076)) ([447aa34](https://github.com/noir-lang/noir/commit/447aa343555cbd5a7cd735876e08f43271ecdd40)) +* Implement missing codegen for `BlackBoxFunc::EcdsaSecp256r1` in brillig ([#3943](https://github.com/noir-lang/noir/issues/3943)) ([2c5eceb](https://github.com/noir-lang/noir/commit/2c5eceb04ab6bc38e954492642121c7fe3da866f)) +* Improve `nargo test` output ([#3973](https://github.com/noir-lang/noir/issues/3973)) ([3ab5ff4](https://github.com/noir-lang/noir/commit/3ab5ff431145a1f747b698caed15caebaa145f04)) +* Make `constant_to_radix` emit a slice instead of an array ([#4049](https://github.com/noir-lang/noir/issues/4049)) ([5cdb1d0](https://github.com/noir-lang/noir/commit/5cdb1d0dabe2e38a1610f718747cc2fb4263339d)) +* Operator overloading & static trait method references resolving to generic impls ([#3967](https://github.com/noir-lang/noir/issues/3967)) ([f1de8fa](https://github.com/noir-lang/noir/commit/f1de8fa3247bcee624bcd7a0f89fe7c7cd8430f1)) +* Preserve brillig entrypoint functions without arguments ([#3951](https://github.com/noir-lang/noir/issues/3951)) ([1111465](https://github.com/noir-lang/noir/commit/1111465551557ed9e97e4b43d6eccc4b5896a39f)) +* Prevent `Instruction::Constrain`s for non-primitive types ([#3916](https://github.com/noir-lang/noir/issues/3916)) ([467948f](https://github.com/noir-lang/noir/commit/467948f9ee9ae65b4e2badaa1d15835fced3e835)) +* Remove panic for adding an invalid crate name in wasm compiler ([#3977](https://github.com/noir-lang/noir/issues/3977)) ([7a1baa5](https://github.com/noir-lang/noir/commit/7a1baa56faa2deb385ef1b6c9da9073dafd5a376)) +* Return error rather instead of panicking on invalid circuit ([#3976](https://github.com/noir-lang/noir/issues/3976)) ([67201bf](https://github.com/noir-lang/noir/commit/67201bfc21a9c8858aa86be9cd47d463fb78d925)) +* Search all levels of struct nesting before codegenning primitive types ([#3970](https://github.com/noir-lang/noir/issues/3970)) ([13ae014](https://github.com/noir-lang/noir/commit/13ae014ddcbd9eddb401c563b95053f7a1a89f1c)) +* Update generics docs to mention we have traits now ([#3980](https://github.com/noir-lang/noir/issues/3980)) ([c2acdf1](https://github.com/noir-lang/noir/commit/c2acdf1793a67abc9a074457e057a44da3b82c39)) + + +### Miscellaneous Chores + +* Ban nested slices ([#4018](https://github.com/noir-lang/noir/issues/4018)) ([f8a1fb7](https://github.com/noir-lang/noir/commit/f8a1fb7eed1ae4a9779eb16b142a64094aa603c6)) +* Remove circuit methods from noir_wasm ([#3869](https://github.com/noir-lang/noir/issues/3869)) ([12d884e](https://github.com/noir-lang/noir/commit/12d884e2b74efab7257626d8878ea1a7455ecf85)) +* Rename Arithmetic opcode to AssertZero ([#3840](https://github.com/noir-lang/noir/issues/3840)) ([836f171](https://github.com/noir-lang/noir/commit/836f17145c2901060706294461c2d282dd121b3e)) + ## [0.22.0](https://github.com/noir-lang/noir/compare/v0.21.0...v0.22.0) (2023-12-18) diff --git a/noir/Cargo.lock b/noir/Cargo.lock index 79f1934059f..93f1d25fc76 100644 --- a/noir/Cargo.lock +++ b/noir/Cargo.lock @@ -4,7 +4,7 @@ version = 3 [[package]] name = "acir" -version = "0.38.0" +version = "0.39.0" dependencies = [ "acir_field", "base64 0.21.2", @@ -23,7 +23,7 @@ dependencies = [ [[package]] name = "acir_field" -version = "0.38.0" +version = "0.39.0" dependencies = [ "ark-bls12-381", "ark-bn254", @@ -37,7 +37,7 @@ dependencies = [ [[package]] name = "acvm" -version = "0.38.0" +version = "0.39.0" dependencies = [ "acir", "acvm_blackbox_solver", @@ -53,7 +53,7 @@ dependencies = [ [[package]] name = "acvm_blackbox_solver" -version = "0.38.0" +version = "0.39.0" dependencies = [ "acir", "blake2", @@ -68,7 +68,7 @@ dependencies = [ [[package]] name = "acvm_js" -version = "0.38.0" +version = "0.39.0" dependencies = [ "acvm", "bn254_blackbox_solver", @@ -115,14 +115,15 @@ dependencies = [ [[package]] name = "ahash" -version = "0.8.3" +version = "0.8.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2c99f64d1e06488f620f932677e24bc6e2897582980441ae90a671415bd7ec2f" +checksum = "91429305e9f0a25f6205c5b8e0d2db09e0708a7a6df0f42212bb56c32c8ac97a" dependencies = [ "cfg-if 1.0.0", "getrandom 0.2.10", "once_cell", "version_check", + "zerocopy", ] [[package]] @@ -211,7 +212,7 @@ checksum = "a4668cab20f66d8d020e1fbc0ebe47217433c1b6c8f2040faf858554e394ace6" [[package]] name = "arena" -version = "0.22.0" +version = "0.23.0" dependencies = [ "generational-arena", ] @@ -415,7 +416,7 @@ checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa" [[package]] name = "aztec_macros" -version = "0.22.0" +version = "0.23.0" dependencies = [ "iter-extended", "noirc_frontend", @@ -579,7 +580,7 @@ dependencies = [ [[package]] name = "bn254_blackbox_solver" -version = "0.38.0" +version = "0.39.0" dependencies = [ "acir", "acvm_blackbox_solver", @@ -601,7 +602,7 @@ dependencies = [ [[package]] name = "brillig" -version = "0.38.0" +version = "0.39.0" dependencies = [ "acir_field", "serde", @@ -609,7 +610,7 @@ dependencies = [ [[package]] name = "brillig_vm" -version = "0.38.0" +version = "0.39.0" dependencies = [ "acir", "acvm_blackbox_solver", @@ -845,7 +846,7 @@ dependencies = [ "heck 0.4.1", "proc-macro2", "quote", - "syn 2.0.26", + "syn 2.0.32", ] [[package]] @@ -1295,7 +1296,7 @@ dependencies = [ "proc-macro2", "quote", "strsim", - "syn 2.0.26", + "syn 2.0.32", ] [[package]] @@ -1306,7 +1307,7 @@ checksum = "836a9bbc7ad63342d6d6e7b815ccab164bc77a2d95d84bc3117a8c0d5c98e2d5" dependencies = [ "darling_core", "quote", - "syn 2.0.26", + "syn 2.0.32", ] [[package]] @@ -1556,7 +1557,7 @@ dependencies = [ "darling", "proc-macro2", "quote", - "syn 2.0.26", + "syn 2.0.32", ] [[package]] @@ -1682,7 +1683,7 @@ dependencies = [ [[package]] name = "fm" -version = "0.22.0" +version = "0.23.0" dependencies = [ "codespan-reporting", "iter-extended", @@ -1774,7 +1775,7 @@ checksum = "89ca545a94061b6365f2c7355b4b32bd20df3ff95f02da9329b34ccc3bd6ee72" dependencies = [ "proc-macro2", "quote", - "syn 2.0.26", + "syn 2.0.32", ] [[package]] @@ -1944,9 +1945,9 @@ dependencies = [ [[package]] name = "h2" -version = "0.3.20" +version = "0.3.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "97ec8491ebaf99c8eaa73058b045fe58073cd6be7f596ac993ced0b0a0c01049" +checksum = "bb2c4422095b67ee78da96fbb51a4cc413b3b25883c7717ff7ca1ab31022c9c9" dependencies = [ "bytes", "fnv", @@ -1954,7 +1955,7 @@ dependencies = [ "futures-sink", "futures-util", "http", - "indexmap 1.9.3", + "indexmap 2.0.0", "slab", "tokio", "tokio-util 0.7.8", @@ -1991,7 +1992,7 @@ version = "0.13.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "43a3c133739dddd0d2990f9a4bdf8eb4b21ef50e4851ca85ab661199821d510e" dependencies = [ - "ahash 0.8.3", + "ahash 0.8.6", ] [[package]] @@ -2254,7 +2255,7 @@ version = "0.11.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2fb7c1b80a1dfa604bb4a649a5c5aeef3d913f7c520cb42b40e534e8a61bcdfc" dependencies = [ - "ahash 0.8.3", + "ahash 0.8.6", "indexmap 1.9.3", "is-terminal", "itoa", @@ -2294,7 +2295,7 @@ dependencies = [ [[package]] name = "iter-extended" -version = "0.22.0" +version = "0.23.0" [[package]] name = "itertools" @@ -2647,7 +2648,7 @@ checksum = "7843ec2de400bcbc6a6328c958dc38e5359da6e93e72e37bc5246bf1ae776389" [[package]] name = "nargo" -version = "0.22.0" +version = "0.23.0" dependencies = [ "acvm", "codespan-reporting", @@ -2675,7 +2676,7 @@ dependencies = [ [[package]] name = "nargo_cli" -version = "0.22.0" +version = "0.23.0" dependencies = [ "acvm", "assert_cmd", @@ -2726,7 +2727,7 @@ dependencies = [ [[package]] name = "nargo_fmt" -version = "0.22.0" +version = "0.23.0" dependencies = [ "bytecount", "noirc_frontend", @@ -2738,7 +2739,7 @@ dependencies = [ [[package]] name = "nargo_toml" -version = "0.22.0" +version = "0.23.0" dependencies = [ "dirs", "fm", @@ -2811,7 +2812,7 @@ dependencies = [ [[package]] name = "noir_debugger" -version = "0.22.0" +version = "0.23.0" dependencies = [ "acvm", "assert_cmd", @@ -2834,12 +2835,13 @@ dependencies = [ [[package]] name = "noir_lsp" -version = "0.22.0" +version = "0.23.0" dependencies = [ "acvm", "async-lsp", "codespan-lsp", "fm", + "fxhash", "lsp-types 0.94.1", "nargo", "nargo_fmt", @@ -2847,6 +2849,7 @@ dependencies = [ "noirc_driver", "noirc_errors", "noirc_frontend", + "rayon", "serde", "serde_json", "serde_with", @@ -2858,7 +2861,7 @@ dependencies = [ [[package]] name = "noir_wasm" -version = "0.22.0" +version = "0.23.0" dependencies = [ "acvm", "build-data", @@ -2881,7 +2884,7 @@ dependencies = [ [[package]] name = "noirc_abi" -version = "0.22.0" +version = "0.23.0" dependencies = [ "acvm", "iter-extended", @@ -2898,7 +2901,7 @@ dependencies = [ [[package]] name = "noirc_abi_wasm" -version = "0.22.0" +version = "0.23.0" dependencies = [ "acvm", "build-data", @@ -2915,7 +2918,7 @@ dependencies = [ [[package]] name = "noirc_driver" -version = "0.22.0" +version = "0.23.0" dependencies = [ "acvm", "aztec_macros", @@ -2935,7 +2938,7 @@ dependencies = [ [[package]] name = "noirc_errors" -version = "0.22.0" +version = "0.23.0" dependencies = [ "acvm", "base64 0.21.2", @@ -2952,7 +2955,7 @@ dependencies = [ [[package]] name = "noirc_evaluator" -version = "0.22.0" +version = "0.23.0" dependencies = [ "acvm", "fxhash", @@ -2968,7 +2971,7 @@ dependencies = [ [[package]] name = "noirc_frontend" -version = "0.22.0" +version = "0.23.0" dependencies = [ "acvm", "arena", @@ -2992,7 +2995,7 @@ dependencies = [ [[package]] name = "noirc_printable_type" -version = "0.22.0" +version = "0.23.0" dependencies = [ "acvm", "iter-extended", @@ -3844,7 +3847,7 @@ dependencies = [ "quote", "rust-embed-utils", "shellexpand", - "syn 2.0.26", + "syn 2.0.32", "walkdir", ] @@ -4154,7 +4157,7 @@ checksum = "741e124f5485c7e60c03b043f79f320bff3527f4bbf12cf3831750dc46a0ec2c" dependencies = [ "proc-macro2", "quote", - "syn 2.0.26", + "syn 2.0.32", ] [[package]] @@ -4176,7 +4179,7 @@ checksum = "1d89a8107374290037607734c0b73a85db7ed80cae314b3c5791f192a496e731" dependencies = [ "proc-macro2", "quote", - "syn 2.0.26", + "syn 2.0.32", ] [[package]] @@ -4226,7 +4229,7 @@ dependencies = [ "darling", "proc-macro2", "quote", - "syn 2.0.26", + "syn 2.0.32", ] [[package]] @@ -4251,7 +4254,7 @@ checksum = "91d129178576168c589c9ec973feedf7d3126c01ac2bf08795109aa35b69fb8f" dependencies = [ "proc-macro2", "quote", - "syn 2.0.26", + "syn 2.0.32", ] [[package]] @@ -4527,9 +4530,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.26" +version = "2.0.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "45c3457aacde3c65315de5031ec191ce46604304d2446e803d71ade03308d970" +checksum = "239814284fd6f1a4ffe4ca893952cdd93c224b6a1571c9a9eadd670295c0c9e2" dependencies = [ "proc-macro2", "quote", @@ -4600,9 +4603,9 @@ checksum = "3369f5ac52d5eb6ab48c6b4ffdc8efbcad6b89c765749064ba298f2c68a16a76" [[package]] name = "test-binary" -version = "3.0.1" +version = "3.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bb28771e7854f02e5705f2a1b09451d932a273f5a4ec1c9fa4c65882b8b7b6ca" +checksum = "6c7cb854285c40b61c0fade358bf63a2bb1226688a1ea11432ea65349209e6e3" dependencies = [ "camino", "cargo_metadata", @@ -4649,7 +4652,7 @@ checksum = "463fe12d7993d3b327787537ce8dd4dfa058de32fc2b195ef3cde03dc4771e8f" dependencies = [ "proc-macro2", "quote", - "syn 2.0.26", + "syn 2.0.32", ] [[package]] @@ -4740,7 +4743,7 @@ checksum = "5b8a1e28f2deaa14e508979454cb3a223b10b938b45af148bc0986de36f1923b" dependencies = [ "proc-macro2", "quote", - "syn 2.0.26", + "syn 2.0.32", ] [[package]] @@ -4891,7 +4894,7 @@ checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7" dependencies = [ "proc-macro2", "quote", - "syn 2.0.26", + "syn 2.0.32", ] [[package]] @@ -5163,7 +5166,7 @@ dependencies = [ "once_cell", "proc-macro2", "quote", - "syn 2.0.26", + "syn 2.0.32", "wasm-bindgen-shared", ] @@ -5197,7 +5200,7 @@ checksum = "e128beba882dd1eb6200e1dc92ae6c5dbaa4311aa7bb211ca035779e5efc39f8" dependencies = [ "proc-macro2", "quote", - "syn 2.0.26", + "syn 2.0.32", "wasm-bindgen-backend", "wasm-bindgen-shared", ] @@ -5678,6 +5681,26 @@ dependencies = [ "libc", ] +[[package]] +name = "zerocopy" +version = "0.7.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "74d4d3961e53fa4c9a25a8637fc2bfaf2595b3d3ae34875568a5cf64787716be" +dependencies = [ + "zerocopy-derive", +] + +[[package]] +name = "zerocopy-derive" +version = "0.7.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9ce1b18ccd8e73a9321186f97e46f9f04b778851177567b1975109d26a08d2a6" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.32", +] + [[package]] name = "zeroize" version = "1.6.0" @@ -5695,5 +5718,5 @@ checksum = "ce36e65b0d2999d2aafac989fb249189a141aee1f53c612c1f37d72631959f69" dependencies = [ "proc-macro2", "quote", - "syn 2.0.26", + "syn 2.0.32", ] diff --git a/noir/Cargo.toml b/noir/Cargo.toml index f0fc7249efc..8a827cacfcd 100644 --- a/noir/Cargo.toml +++ b/noir/Cargo.toml @@ -38,7 +38,7 @@ resolver = "2" [workspace.package] # x-release-please-start-version -version = "0.22.0" +version = "0.23.0" # x-release-please-end authors = ["The Noir Team "] edition = "2021" @@ -49,14 +49,14 @@ repository = "https://github.com/noir-lang/noir/" [workspace.dependencies] # ACVM workspace dependencies -acir_field = { version = "0.38.0", path = "acvm-repo/acir_field", default-features = false } -acir = { version = "0.38.0", path = "acvm-repo/acir", default-features = false } -acvm = { version = "0.38.0", path = "acvm-repo/acvm" } +acir_field = { version = "0.39.0", path = "acvm-repo/acir_field", default-features = false } +acir = { version = "0.39.0", path = "acvm-repo/acir", default-features = false } +acvm = { version = "0.39.0", path = "acvm-repo/acvm" } stdlib = { version = "0.37.1", package = "acvm_stdlib", path = "acvm-repo/stdlib", default-features = false } -brillig = { version = "0.38.0", path = "acvm-repo/brillig", default-features = false } -brillig_vm = { version = "0.38.0", path = "acvm-repo/brillig_vm", default-features = false } -acvm_blackbox_solver = { version = "0.38.0", path = "acvm-repo/blackbox_solver", default-features = false } -bn254_blackbox_solver = { version = "0.38.0", path = "acvm-repo/bn254_blackbox_solver", default-features = false } +brillig = { version = "0.39.0", path = "acvm-repo/brillig", default-features = false } +brillig_vm = { version = "0.39.0", path = "acvm-repo/brillig_vm", default-features = false } +acvm_blackbox_solver = { version = "0.39.0", path = "acvm-repo/blackbox_solver", default-features = false } +bn254_blackbox_solver = { version = "0.39.0", path = "acvm-repo/bn254_blackbox_solver", default-features = false } # Noir compiler workspace dependencies arena = { path = "compiler/utils/arena" } diff --git a/noir/acvm-repo/CHANGELOG.md b/noir/acvm-repo/CHANGELOG.md index d413bd390c4..7f68244a7eb 100644 --- a/noir/acvm-repo/CHANGELOG.md +++ b/noir/acvm-repo/CHANGELOG.md @@ -5,6 +5,38 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). +## [0.39.0](https://github.com/noir-lang/noir/compare/v0.38.0...v0.39.0) (2024-01-22) + + +### ⚠ BREAKING CHANGES + +* Breaking changes from aztec-packages ([#3955](https://github.com/noir-lang/noir/issues/3955)) +* Rename Arithmetic opcode to AssertZero ([#3840](https://github.com/noir-lang/noir/issues/3840)) +* Remove unused methods on ACIR opcodes ([#3841](https://github.com/noir-lang/noir/issues/3841)) +* Remove partial backend feature ([#3805](https://github.com/noir-lang/noir/issues/3805)) + +### Features + +* Aztec-packages ([#3754](https://github.com/noir-lang/noir/issues/3754)) ([c043265](https://github.com/noir-lang/noir/commit/c043265e550b59bd4296504826fe15d3ce3e9ad2)) +* Breaking changes from aztec-packages ([#3955](https://github.com/noir-lang/noir/issues/3955)) ([5be049e](https://github.com/noir-lang/noir/commit/5be049eee6c342649462282ee04f6411e6ea392c)) +* Remove range constraints from witnesses which are constrained to be constants ([#3928](https://github.com/noir-lang/noir/issues/3928)) ([afe9c7a](https://github.com/noir-lang/noir/commit/afe9c7a38bb9d4245205d3aa46d4ce23d70a5671)) +* Speed up transformation of debug messages ([#3815](https://github.com/noir-lang/noir/issues/3815)) ([2a8af1e](https://github.com/noir-lang/noir/commit/2a8af1e4141ffff61547ee1c2837a6392bd5db48)) +* Sync `aztec-packages` ([#4011](https://github.com/noir-lang/noir/issues/4011)) ([fee2452](https://github.com/noir-lang/noir/commit/fee24523c427c27f0bdaf98ea09a852a2da3e94c)) +* Sync commits from `aztec-packages` ([#4068](https://github.com/noir-lang/noir/issues/4068)) ([7a8f3a3](https://github.com/noir-lang/noir/commit/7a8f3a33b57875e681e3d81e667e3570a1cdbdcc)) + + +### Bug Fixes + +* Deserialize odd length hex literals ([#3747](https://github.com/noir-lang/noir/issues/3747)) ([4000fb2](https://github.com/noir-lang/noir/commit/4000fb279221eb07187d657bfaa7f1c7b311abf2)) +* Return error rather instead of panicking on invalid circuit ([#3976](https://github.com/noir-lang/noir/issues/3976)) ([67201bf](https://github.com/noir-lang/noir/commit/67201bfc21a9c8858aa86be9cd47d463fb78d925)) + + +### Miscellaneous Chores + +* Remove partial backend feature ([#3805](https://github.com/noir-lang/noir/issues/3805)) ([0383100](https://github.com/noir-lang/noir/commit/0383100853a80a5b28b797cdfeae0d271f1b7805)) +* Remove unused methods on ACIR opcodes ([#3841](https://github.com/noir-lang/noir/issues/3841)) ([9e5d0e8](https://github.com/noir-lang/noir/commit/9e5d0e813d61a0bfb5ee68174ed287c5a20f1579)) +* Rename Arithmetic opcode to AssertZero ([#3840](https://github.com/noir-lang/noir/issues/3840)) ([836f171](https://github.com/noir-lang/noir/commit/836f17145c2901060706294461c2d282dd121b3e)) + ## [0.38.0](https://github.com/noir-lang/noir/compare/v0.37.1...v0.38.0) (2023-12-18) diff --git a/noir/acvm-repo/acir/Cargo.toml b/noir/acvm-repo/acir/Cargo.toml index b44c64dd838..49b10c57cc8 100644 --- a/noir/acvm-repo/acir/Cargo.toml +++ b/noir/acvm-repo/acir/Cargo.toml @@ -2,7 +2,7 @@ name = "acir" description = "ACIR is the IR that the VM processes, it is analogous to LLVM IR" # x-release-please-start-version -version = "0.38.0" +version = "0.39.0" # x-release-please-end authors.workspace = true edition.workspace = true diff --git a/noir/acvm-repo/acir_field/Cargo.toml b/noir/acvm-repo/acir_field/Cargo.toml index cedfc66e734..dde121f4029 100644 --- a/noir/acvm-repo/acir_field/Cargo.toml +++ b/noir/acvm-repo/acir_field/Cargo.toml @@ -2,7 +2,7 @@ name = "acir_field" description = "The field implementation being used by ACIR." # x-release-please-start-version -version = "0.38.0" +version = "0.39.0" # x-release-please-end authors.workspace = true edition.workspace = true diff --git a/noir/acvm-repo/acvm/Cargo.toml b/noir/acvm-repo/acvm/Cargo.toml index be2391a3216..a40148a01ef 100644 --- a/noir/acvm-repo/acvm/Cargo.toml +++ b/noir/acvm-repo/acvm/Cargo.toml @@ -2,7 +2,7 @@ name = "acvm" description = "The virtual machine that processes ACIR given a backend/proof system." # x-release-please-start-version -version = "0.38.0" +version = "0.39.0" # x-release-please-end authors.workspace = true edition.workspace = true diff --git a/noir/acvm-repo/acvm_js/Cargo.toml b/noir/acvm-repo/acvm_js/Cargo.toml index e8d46b9717e..226e273c306 100644 --- a/noir/acvm-repo/acvm_js/Cargo.toml +++ b/noir/acvm-repo/acvm_js/Cargo.toml @@ -2,7 +2,7 @@ name = "acvm_js" description = "Typescript wrapper around the ACVM allowing execution of ACIR code" # x-release-please-start-version -version = "0.38.0" +version = "0.39.0" # x-release-please-end authors.workspace = true edition.workspace = true diff --git a/noir/acvm-repo/acvm_js/package.json b/noir/acvm-repo/acvm_js/package.json index 2d878e961da..4ec9b1a2da3 100644 --- a/noir/acvm-repo/acvm_js/package.json +++ b/noir/acvm-repo/acvm_js/package.json @@ -1,6 +1,6 @@ { "name": "@noir-lang/acvm_js", - "version": "0.38.0", + "version": "0.39.0", "publishConfig": { "access": "public" }, diff --git a/noir/acvm-repo/blackbox_solver/Cargo.toml b/noir/acvm-repo/blackbox_solver/Cargo.toml index 749ef8f289a..7359cf307e4 100644 --- a/noir/acvm-repo/blackbox_solver/Cargo.toml +++ b/noir/acvm-repo/blackbox_solver/Cargo.toml @@ -2,7 +2,7 @@ name = "acvm_blackbox_solver" description = "A solver for the blackbox functions found in ACIR and Brillig" # x-release-please-start-version -version = "0.38.0" +version = "0.39.0" # x-release-please-end authors.workspace = true edition.workspace = true diff --git a/noir/acvm-repo/bn254_blackbox_solver/Cargo.toml b/noir/acvm-repo/bn254_blackbox_solver/Cargo.toml index b98bb370f74..a73aded231f 100644 --- a/noir/acvm-repo/bn254_blackbox_solver/Cargo.toml +++ b/noir/acvm-repo/bn254_blackbox_solver/Cargo.toml @@ -2,7 +2,7 @@ name = "bn254_blackbox_solver" description = "Solvers for black box functions which are specific for the bn254 curve" # x-release-please-start-version -version = "0.38.0" +version = "0.39.0" # x-release-please-end authors.workspace = true edition.workspace = true diff --git a/noir/acvm-repo/brillig/Cargo.toml b/noir/acvm-repo/brillig/Cargo.toml index ee8651faeec..b9cedfe8d60 100644 --- a/noir/acvm-repo/brillig/Cargo.toml +++ b/noir/acvm-repo/brillig/Cargo.toml @@ -2,7 +2,7 @@ name = "brillig" description = "Brillig is the bytecode ACIR uses for non-determinism." # x-release-please-start-version -version = "0.38.0" +version = "0.39.0" # x-release-please-end authors.workspace = true edition.workspace = true diff --git a/noir/acvm-repo/brillig_vm/Cargo.toml b/noir/acvm-repo/brillig_vm/Cargo.toml index 91bef2572bb..5a8a34be881 100644 --- a/noir/acvm-repo/brillig_vm/Cargo.toml +++ b/noir/acvm-repo/brillig_vm/Cargo.toml @@ -2,7 +2,7 @@ name = "brillig_vm" description = "The virtual machine that processes Brillig bytecode, used to introduce non-determinism to the ACVM" # x-release-please-start-version -version = "0.38.0" +version = "0.39.0" # x-release-please-end authors.workspace = true edition.workspace = true diff --git a/noir/aztec_macros/src/lib.rs b/noir/aztec_macros/src/lib.rs index c985bbc367a..7c62f8f8169 100644 --- a/noir/aztec_macros/src/lib.rs +++ b/noir/aztec_macros/src/lib.rs @@ -40,6 +40,7 @@ pub enum AztecMacroError { AztecComputeNoteHashAndNullifierNotFound { span: Span }, AztecContractHasTooManyFunctions { span: Span }, AztecContractConstructorMissing { span: Span }, + UnsupportedFunctionArgumentType { span: Span, typ: UnresolvedTypeData }, } impl From for MacroError { @@ -65,6 +66,11 @@ impl From for MacroError { secondary_message: None, span: Some(span), }, + AztecMacroError::UnsupportedFunctionArgumentType { span, typ } => MacroError { + primary_message: format!("Provided parameter type `{typ:?}` is not supported in Aztec contract interface"), + secondary_message: None, + span: Some(span), + }, } } } @@ -341,11 +347,14 @@ fn transform_module( for func in module.functions.iter_mut() { for secondary_attribute in func.def.attributes.secondary.clone() { + let crate_graph = &context.crate_graph[crate_id]; if is_custom_attribute(&secondary_attribute, "aztec(private)") { - transform_function("Private", func, storage_defined); + transform_function("Private", func, storage_defined) + .map_err(|err| (err.into(), crate_graph.root_file_id))?; has_transformed_module = true; } else if is_custom_attribute(&secondary_attribute, "aztec(public)") { - transform_function("Public", func, storage_defined); + transform_function("Public", func, storage_defined) + .map_err(|err| (err.into(), crate_graph.root_file_id))?; has_transformed_module = true; } } @@ -384,7 +393,11 @@ fn transform_module( /// - A new Input that is provided for a kernel app circuit, named: {Public/Private}ContextInputs /// - Hashes all of the function input variables /// - This instantiates a helper function -fn transform_function(ty: &str, func: &mut NoirFunction, storage_defined: bool) { +fn transform_function( + ty: &str, + func: &mut NoirFunction, + storage_defined: bool, +) -> Result<(), AztecMacroError> { let context_name = format!("{}Context", ty); let inputs_name = format!("{}ContextInputs", ty); let return_type_name = format!("{}CircuitPublicInputs", ty); @@ -396,7 +409,7 @@ fn transform_function(ty: &str, func: &mut NoirFunction, storage_defined: bool) } // Insert the context creation as the first action - let create_context = create_context(&context_name, &func.def.parameters); + let create_context = create_context(&context_name, &func.def.parameters)?; func.def.body.0.splice(0..0, (create_context).iter().cloned()); // Add the inputs to the params @@ -423,6 +436,8 @@ fn transform_function(ty: &str, func: &mut NoirFunction, storage_defined: bool) "Public" => func.def.is_open = true, _ => (), } + + Ok(()) } /// Transform Unconstrained @@ -621,7 +636,7 @@ fn create_inputs(ty: &str) -> Param { /// let mut context = PrivateContext::new(inputs, hasher.hash()); /// } /// ``` -fn create_context(ty: &str, params: &[Param]) -> Vec { +fn create_context(ty: &str, params: &[Param]) -> Result, AztecMacroError> { let mut injected_expressions: Vec = vec![]; // `let mut hasher = Hasher::new();` @@ -637,7 +652,7 @@ fn create_context(ty: &str, params: &[Param]) -> Vec { injected_expressions.push(let_hasher); // Iterate over each of the function parameters, adding to them to the hasher - params.iter().for_each(|Param { pattern, typ, span: _, visibility: _ }| { + for Param { pattern, typ, span, .. } in params { match pattern { Pattern::Identifier(identifier) => { // Match the type to determine the padding to do @@ -666,16 +681,18 @@ fn create_context(ty: &str, params: &[Param]) -> Vec { }, ) } - _ => panic!( - "[Aztec Noir] Provided parameter type: {:?} is not supported", - unresolved_type - ), + _ => { + return Err(AztecMacroError::UnsupportedFunctionArgumentType { + typ: unresolved_type.clone(), + span: *span, + }) + } }; injected_expressions.push(expression); } _ => todo!(), // Maybe unreachable? } - }); + } // Create the inputs to the context let inputs_expression = variable("inputs"); @@ -697,7 +714,7 @@ fn create_context(ty: &str, params: &[Param]) -> Vec { injected_expressions.push(let_context); // Return all expressions that will be injected by the hasher - injected_expressions + Ok(injected_expressions) } /// Abstract Return Type diff --git a/noir/bootstrap_cache.sh b/noir/bootstrap_cache.sh index ac919f3ca65..672702416bd 100755 --- a/noir/bootstrap_cache.sh +++ b/noir/bootstrap_cache.sh @@ -8,3 +8,4 @@ echo -e "\033[1mRetrieving noir packages from remote cache...\033[0m" extract_repo noir-packages /usr/src/noir/packages ./ echo -e "\033[1mRetrieving nargo from remote cache...\033[0m" extract_repo noir /usr/src/noir/target/release ./target/ + diff --git a/noir/compiler/fm/src/file_map.rs b/noir/compiler/fm/src/file_map.rs index c4d7002a082..50412d352ec 100644 --- a/noir/compiler/fm/src/file_map.rs +++ b/noir/compiler/fm/src/file_map.rs @@ -75,6 +75,10 @@ impl FileMap { pub fn get_file_id(&self, file_name: &PathString) -> Option { self.name_to_id.get(file_name).cloned() } + + pub fn all_file_ids(&self) -> impl Iterator { + self.name_to_id.values() + } } impl Default for FileMap { fn default() -> Self { diff --git a/noir/compiler/noirc_driver/tests/stdlib_warnings.rs b/noir/compiler/noirc_driver/tests/stdlib_warnings.rs index e9153ec2f99..6f437621123 100644 --- a/noir/compiler/noirc_driver/tests/stdlib_warnings.rs +++ b/noir/compiler/noirc_driver/tests/stdlib_warnings.rs @@ -1,7 +1,7 @@ use std::path::Path; use noirc_driver::{file_manager_with_stdlib, prepare_crate, ErrorsAndWarnings}; -use noirc_frontend::hir::Context; +use noirc_frontend::hir::{def_map::parse_file, Context}; #[test] fn stdlib_does_not_produce_constant_warnings() -> Result<(), ErrorsAndWarnings> { @@ -15,8 +15,13 @@ fn stdlib_does_not_produce_constant_warnings() -> Result<(), ErrorsAndWarnings> file_manager.add_file_with_source(file_name, source.to_owned()).expect( "Adding source buffer to file manager should never fail when file manager is empty", ); + let parsed_files = file_manager + .as_file_map() + .all_file_ids() + .map(|&file_id| (file_id, parse_file(&file_manager, file_id))) + .collect(); - let mut context = Context::new(file_manager); + let mut context = Context::new(file_manager, parsed_files); let root_crate_id = prepare_crate(&mut context, file_name); let ((), warnings) = noirc_driver::check_crate(&mut context, root_crate_id, false, false)?; diff --git a/noir/compiler/noirc_evaluator/src/ssa.rs b/noir/compiler/noirc_evaluator/src/ssa.rs index f11c077d49a..e2da5652faf 100644 --- a/noir/compiler/noirc_evaluator/src/ssa.rs +++ b/noir/compiler/noirc_evaluator/src/ssa.rs @@ -45,7 +45,7 @@ pub(crate) fn optimize_into_acir( let ssa_gen_span = span!(Level::TRACE, "ssa_generation"); let ssa_gen_span_guard = ssa_gen_span.enter(); - let ssa_builder = SsaBuilder::new(program, print_ssa_passes)? + let ssa = SsaBuilder::new(program, print_ssa_passes)? .run_pass(Ssa::defunctionalize, "After Defunctionalization:") .run_pass(Ssa::inline_functions, "After Inlining:") // Run mem2reg with the CFG separated into blocks @@ -62,16 +62,12 @@ pub(crate) fn optimize_into_acir( // Run mem2reg once more with the flattened CFG to catch any remaining loads/stores .run_pass(Ssa::mem2reg, "After Mem2Reg:") .run_pass(Ssa::fold_constants, "After Constant Folding:") - .run_pass(Ssa::dead_instruction_elimination, "After Dead Instruction Elimination:"); + .run_pass(Ssa::dead_instruction_elimination, "After Dead Instruction Elimination:") + .run_pass(Ssa::bubble_up_constrains, "After Constraint Bubbling:") + .finish(); - let brillig = ssa_builder.to_brillig(print_brillig_trace); + let brillig = ssa.to_brillig(print_brillig_trace); - // Split off any passes the are not necessary for Brillig generation but are necessary for ACIR generation. - // We only need to fill out nested slices as we need to have a known length when dealing with memory operations - // in ACIR gen while this is not necessary in the Brillig IR. - let ssa = ssa_builder - .run_pass(Ssa::fill_internal_slices, "After Fill Internal Slice Dummy Data:") - .finish(); drop(ssa_gen_span_guard); let last_array_uses = ssa.find_last_array_uses(); diff --git a/noir/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs b/noir/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs index c0e3ed1ff66..d832b8d0fbb 100644 --- a/noir/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs +++ b/noir/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs @@ -88,10 +88,6 @@ struct Context { /// a new BlockId max_block_id: u32, - /// Maps SSA array values to their slice size and any nested slices internal to the parent slice. - /// This enables us to maintain the slice structure of a slice when performing an array get. - slice_sizes: HashMap, Vec>, - data_bus: DataBus, } @@ -202,7 +198,6 @@ impl Context { internal_memory_blocks: HashMap::default(), internal_mem_block_lengths: HashMap::default(), max_block_id: 0, - slice_sizes: HashMap::default(), data_bus: DataBus::default(), } } @@ -415,62 +410,10 @@ impl Context { self.define_result_var(dfg, instruction_id, result_acir_var); } Instruction::Constrain(lhs, rhs, assert_message) => { - let lhs = self.convert_value(*lhs, dfg); - let rhs = self.convert_value(*rhs, dfg); - - fn get_var_equality_assertions( - lhs: AcirValue, - rhs: AcirValue, - read_from_index: &mut impl FnMut(BlockId, usize) -> Result, - ) -> Result, InternalError> { - match (lhs, rhs) { - (AcirValue::Var(lhs, _), AcirValue::Var(rhs, _)) => Ok(vec![(lhs, rhs)]), - (AcirValue::Array(lhs_values), AcirValue::Array(rhs_values)) => { - let var_equality_assertions = lhs_values - .into_iter() - .zip(rhs_values) - .map(|(lhs, rhs)| { - get_var_equality_assertions(lhs, rhs, read_from_index) - }) - .collect::, _>>()? - .into_iter() - .flatten() - .collect(); - Ok(var_equality_assertions) - } - ( - AcirValue::DynamicArray(AcirDynamicArray { - block_id: lhs_block_id, - len, - .. - }), - AcirValue::DynamicArray(AcirDynamicArray { - block_id: rhs_block_id, - .. - }), - ) => try_vecmap(0..len, |i| { - let lhs_var = read_from_index(lhs_block_id, i)?; - let rhs_var = read_from_index(rhs_block_id, i)?; - Ok((lhs_var, rhs_var)) - }), - _ => { - unreachable!("ICE: lhs and rhs should be of the same type") - } - } - } - - let mut read_dynamic_array_index = - |block_id: BlockId, array_index: usize| -> Result { - let index_var = self.acir_context.add_constant(array_index); + let lhs = self.convert_numeric_value(*lhs, dfg)?; + let rhs = self.convert_numeric_value(*rhs, dfg)?; - self.acir_context.read_from_memory(block_id, &index_var) - }; - - for (lhs, rhs) in - get_var_equality_assertions(lhs, rhs, &mut read_dynamic_array_index)? - { - self.acir_context.assert_eq_var(lhs, rhs, assert_message.clone())?; - } + self.acir_context.assert_eq_var(lhs, rhs, assert_message.clone())?; } Instruction::Cast(value_id, _) => { let acir_var = self.convert_numeric_value(*value_id, dfg)?; @@ -683,21 +626,15 @@ impl Context { ) -> Result { let index_const = dfg.get_numeric_constant(index); let value_type = dfg.type_of_value(array); - let (Type::Array(element_types, _) | Type::Slice(element_types)) = &value_type else { + // Compiler sanity checks + assert!( + !value_type.is_nested_slice(), + "ICE: Nested slice type has reached ACIR generation" + ); + let (Type::Array(_, _) | Type::Slice(_)) = &value_type else { unreachable!("ICE: expected array or slice type"); - }; - // TODO(#3188): Need to be able to handle constant index for slices to seriously reduce - // constraint sizes of nested slices - // This can only be done if we accurately flatten nested slices as otherwise we will reach - // index out of bounds errors. If the slice is already flat then we can treat them similarly to arrays. - if matches!(value_type, Type::Slice(_)) - && element_types.iter().any(|element| element.contains_slice_element()) - { - return Ok(false); - } - match self.convert_value(array, dfg) { AcirValue::Var(acir_var, _) => { return Err(RuntimeError::InternalError(InternalError::Unexpected { @@ -788,24 +725,8 @@ impl Context { let mut dummy_predicate_index = predicate_index; // We must setup the dummy value to match the type of the value we wish to store - let slice_sizes = if store_type.contains_slice_element() { - self.compute_slice_sizes(store, None, dfg); - self.slice_sizes.get(&store).cloned().ok_or_else(|| { - InternalError::Unexpected { - expected: "Store value should have slice sizes computed".to_owned(), - found: "Missing key in slice sizes map".to_owned(), - call_stack: self.acir_context.get_call_stack(), - } - })? - } else { - vec![] - }; - let dummy = self.array_get_value( - &store_type, - block_id, - &mut dummy_predicate_index, - &slice_sizes, - )?; + let dummy = + self.array_get_value(&store_type, block_id, &mut dummy_predicate_index)?; Some(self.convert_array_set_store_value(&store_value, &dummy)?) } @@ -922,26 +843,12 @@ impl Context { } } - let value = if !res_typ.contains_slice_element() { - self.array_get_value(&res_typ, block_id, &mut var_index, &[])? - } else { - let slice_sizes = self - .slice_sizes - .get(&array_id) - .expect("ICE: Array with slices should have associated slice sizes"); - - // The first max size is going to be the length of the parent slice - // As we are fetching from the parent slice we just want its internal - // slice sizes. - let slice_sizes = slice_sizes[1..].to_vec(); - - let value = self.array_get_value(&res_typ, block_id, &mut var_index, &slice_sizes)?; - - // Insert the resulting slice sizes - self.slice_sizes.insert(results[0], slice_sizes); - - value - }; + // Compiler sanity check + assert!( + !res_typ.contains_slice_element(), + "ICE: Nested slice result found during ACIR generation" + ); + let value = self.array_get_value(&res_typ, block_id, &mut var_index)?; self.define_result(dfg, instruction, value.clone()); @@ -953,7 +860,6 @@ impl Context { ssa_type: &Type, block_id: BlockId, var_index: &mut AcirVar, - slice_sizes: &[usize], ) -> Result { let one = self.acir_context.add_constant(FieldElement::one()); match ssa_type.clone() { @@ -971,33 +877,12 @@ impl Context { let mut values = Vector::new(); for _ in 0..len { for typ in element_types.as_ref() { - values.push_back(self.array_get_value( - typ, - block_id, - var_index, - slice_sizes, - )?); + values.push_back(self.array_get_value(typ, block_id, var_index)?); } } Ok(AcirValue::Array(values)) } - Type::Slice(element_types) => { - // It is not enough to execute this loop and simply pass the size from the parent definition. - // We need the internal sizes of each type in case of a nested slice. - let mut values = Vector::new(); - - let (current_size, new_sizes) = - slice_sizes.split_first().expect("should be able to split"); - - for _ in 0..*current_size { - for typ in element_types.as_ref() { - values - .push_back(self.array_get_value(typ, block_id, var_index, new_sizes)?); - } - } - Ok(AcirValue::Array(values)) - } - _ => unreachable!("ICE - expected an array or slice"), + _ => unreachable!("ICE: Expected an array or numeric but got {ssa_type:?}"), } } @@ -1059,23 +944,6 @@ impl Context { self.array_set_value(&store_value, result_block_id, &mut var_index)?; - // Set new resulting array to have the same slice sizes as the instruction input - if let Type::Slice(element_types) = &array_typ { - let has_internal_slices = - element_types.as_ref().iter().any(|typ| typ.contains_slice_element()); - if has_internal_slices { - let slice_sizes = self - .slice_sizes - .get(&array_id) - .expect( - "ICE: Expected array with internal slices to have associated slice sizes", - ) - .clone(); - let results = dfg.instruction_results(instruction); - self.slice_sizes.insert(results[0], slice_sizes); - } - } - let element_type_sizes = if !can_omit_element_sizes_array(&array_typ) { Some(self.init_element_type_sizes_array(&array_typ, array_id, None, dfg)?) } else { @@ -1184,8 +1052,6 @@ impl Context { Type::Array(_, _) | Type::Slice(_) => { match &dfg[array_id] { Value::Array { array, .. } => { - self.compute_slice_sizes(array_id, None, dfg); - for (i, value) in array.iter().enumerate() { flat_elem_type_sizes.push( self.flattened_slice_size(*value, dfg) + flat_elem_type_sizes[i], @@ -1285,41 +1151,6 @@ impl Context { Ok(element_type_sizes) } - fn compute_slice_sizes( - &mut self, - current_array_id: ValueId, - parent_array: Option, - dfg: &DataFlowGraph, - ) { - let (array, typ) = match &dfg[current_array_id] { - Value::Array { array, typ } => (array, typ.clone()), - _ => return, - }; - - if !matches!(typ, Type::Slice(_)) { - return; - } - - let element_size = typ.element_size(); - let true_len = array.len() / element_size; - if let Some(parent_array) = parent_array { - let sizes_list = - self.slice_sizes.get_mut(&parent_array).expect("ICE: expected size list"); - sizes_list.push(true_len); - for value in array { - self.compute_slice_sizes(*value, Some(parent_array), dfg); - } - } else { - // This means the current_array_id is the parent array - // The slice sizes should follow the parent array's type structure - // thus we start our sizes list with the parent array size. - self.slice_sizes.insert(current_array_id, vec![true_len]); - for value in array { - self.compute_slice_sizes(*value, Some(current_array_id), dfg); - } - } - } - fn copy_dynamic_array( &mut self, source: BlockId, @@ -1772,23 +1603,21 @@ impl Context { let slice_length = self.convert_value(arguments[0], dfg).into_var()?; let (slice_contents, slice_typ, _) = self.check_array_is_initialized(arguments[1], dfg)?; - let slice = self.convert_value(slice_contents, dfg); + assert!(!slice_typ.is_nested_slice(), "ICE: Nested slice used in ACIR generation"); + let slice = self.convert_value(slice_contents, dfg); let mut new_elem_size = Self::flattened_value_size(&slice); let mut new_slice = Vector::new(); self.slice_intrinsic_input(&mut new_slice, slice)?; let elements_to_push = &arguments[2..]; - // We only fill internal slices for nested slices (a slice inside of a slice). - // So we must directly push back elements for slices which are not a nested slice. - if !slice_typ.is_nested_slice() { - for elem in elements_to_push { - let element = self.convert_value(*elem, dfg); - - new_elem_size += Self::flattened_value_size(&element); - new_slice.push_back(element); - } + // We must directly push back elements for non-nested slices + for elem in elements_to_push { + let element = self.convert_value(*elem, dfg); + + new_elem_size += Self::flattened_value_size(&element); + new_slice.push_back(element); } // Increase the slice length by one to enable accessing more elements in the slice. @@ -1800,20 +1629,6 @@ impl Context { self.initialize_array(result_block_id, new_elem_size, Some(new_slice_val.clone()))?; // The previous slice length represents the index we want to write into. let mut var_index = slice_length; - // Dynamic arrays are represented as flat memory. We must flatten the user facing index - // to a flattened index that matches the complex slice structure. - if slice_typ.is_nested_slice() { - let element_size = slice_typ.element_size(); - - // Multiply the element size against the var index before fetching the flattened index - // This operation makes sure our user-facing slice index matches the strategy for indexing in SSA, - // which is how `get_flattened_index` expects its index input. - let element_size_var = self.acir_context.add_constant(element_size); - var_index = self.acir_context.mul_var(slice_length, element_size_var)?; - var_index = - self.get_flattened_index(&slice_typ, slice_contents, var_index, dfg)?; - } - // Write the elements we wish to push back directly. // The slice's underlying array value should already be filled with dummy data // to enable this write to be within bounds. @@ -1847,8 +1662,9 @@ impl Context { let (slice_contents, slice_typ, _) = self.check_array_is_initialized(arguments[1], dfg)?; - let slice: AcirValue = self.convert_value(slice_contents, dfg); + assert!(!slice_typ.is_nested_slice(), "ICE: Nested slice used in ACIR generation"); + let slice: AcirValue = self.convert_value(slice_contents, dfg); let mut new_slice_size = Self::flattened_value_size(&slice); // Increase the slice length by one to enable accessing more elements in the slice. @@ -1860,33 +1676,14 @@ impl Context { let elements_to_push = &arguments[2..]; let mut elem_size = 0; - // We only fill internal slices for nested slices (a slice inside of a slice). - // So we must directly push front elements for slices which are not a nested slice. - if !slice_typ.is_nested_slice() { - for elem in elements_to_push.iter().rev() { - let element = self.convert_value(*elem, dfg); - - elem_size += Self::flattened_value_size(&element); - new_slice.push_front(element); - } - new_slice_size += elem_size; - } else { - // We have already filled the appropriate dummy values for nested slice during SSA gen. - // We need to account for that we do not go out of bounds by removing dummy data as we - // push elements to the front of our slice. - // Using this strategy we are able to avoid dynamic writes like we do for a SlicePushBack. - for elem in elements_to_push.iter().rev() { - let element = self.convert_value(*elem, dfg); - - let elem_size = Self::flattened_value_size(&element); - // Have to pop based off of the flattened value size as we read the - // slice intrinsic as a flat list of AcirValue::Var - for _ in 0..elem_size { - new_slice.pop_back(); - } - new_slice.push_front(element); - } + // We must directly push front elements for non-nested slices + for elem in elements_to_push.iter().rev() { + let element = self.convert_value(*elem, dfg); + + elem_size += Self::flattened_value_size(&element); + new_slice.push_front(element); } + new_slice_size += elem_size; let new_slice_val = AcirValue::Array(new_slice.clone()); @@ -1928,55 +1725,16 @@ impl Context { let (slice_contents, slice_typ, block_id) = self.check_array_is_initialized(arguments[1], dfg)?; - let slice = self.convert_value(slice_contents, dfg); - - let element_size = slice_typ.element_size(); + assert!(!slice_typ.is_nested_slice(), "ICE: Nested slice used in ACIR generation"); let mut popped_elements = Vec::new(); - // Fetch the values we are popping off of the slice. - // In the case of non-nested slice the logic is simple as we do not - // need to account for the internal slice sizes or flattening the index. - // - // The pop back operation results are of the format [slice length, slice contents, popped elements]. - // Thus, we look at the result ids at index 2 and onwards to determine the type of each popped element. - if !slice_typ.is_nested_slice() { - for res in &result_ids[2..] { - let elem = self.array_get_value( - &dfg.type_of_value(*res), - block_id, - &mut var_index, - &[], - )?; - popped_elements.push(elem); - } - } else { - // Fetch the slice sizes of the nested slice. - let slice_sizes = self.slice_sizes.get(&slice_contents); - let mut slice_sizes = - slice_sizes.expect("ICE: should have slice sizes").clone(); - // We want to remove the parent size as we are fetching the child - slice_sizes.remove(0); - - // Multiply the element size against the var index before fetching the flattened index - // This operation makes sure our user-facing slice index matches the strategy for indexing in SSA, - // which is how `get_flattened_index` expects its index input. - let element_size_var = self.acir_context.add_constant(element_size); - // We want to use an index one less than the slice length - var_index = self.acir_context.mul_var(var_index, element_size_var)?; - var_index = - self.get_flattened_index(&slice_typ, slice_contents, var_index, dfg)?; - - for res in &result_ids[2..] { - let elem = self.array_get_value( - &dfg.type_of_value(*res), - block_id, - &mut var_index, - &slice_sizes, - )?; - popped_elements.push(elem); - } + for res in &result_ids[2..] { + let elem = + self.array_get_value(&dfg.type_of_value(*res), block_id, &mut var_index)?; + popped_elements.push(elem); } + let slice = self.convert_value(slice_contents, dfg); let mut new_slice = Vector::new(); self.slice_intrinsic_input(&mut new_slice, slice)?; @@ -1994,11 +1752,13 @@ impl Context { let (slice_contents, slice_typ, block_id) = self.check_array_is_initialized(arguments[1], dfg)?; - let slice = self.convert_value(slice_contents, dfg); + assert!(!slice_typ.is_nested_slice(), "ICE: Nested slice used in ACIR generation"); let one = self.acir_context.add_constant(FieldElement::one()); let new_slice_length = self.acir_context.sub_var(slice_length, one)?; + let slice = self.convert_value(slice_contents, dfg); + let mut new_slice = Vector::new(); self.slice_intrinsic_input(&mut new_slice, slice)?; @@ -2010,40 +1770,14 @@ impl Context { // Fetch the values we are popping off of the slice. // In the case of non-nested slice the logic is simple as we do not // need to account for the internal slice sizes or flattening the index. - // - // The pop front operation results are of the format [popped elements, slice length, slice contents]. - // Thus, we look at the result ids up to the element size to determine the type of each popped element. - if !slice_typ.is_nested_slice() { - for res in &result_ids[..element_size] { - let element = self.array_get_value( - &dfg.type_of_value(*res), - block_id, - &mut var_index, - &[], - )?; - let elem_size = Self::flattened_value_size(&element); - popped_elements_size += elem_size; - popped_elements.push(element); - } - } else { - let slice_sizes = self.slice_sizes.get(&slice_contents); - let mut slice_sizes = - slice_sizes.expect("ICE: should have slice sizes").clone(); - // We want to remove the parent size as we are fetching the child - slice_sizes.remove(0); - - for res in &result_ids[..element_size] { - let element = self.array_get_value( - &dfg.type_of_value(*res), - block_id, - &mut var_index, - &slice_sizes, - )?; - let elem_size = Self::flattened_value_size(&element); - popped_elements_size += elem_size; - popped_elements.push(element); - } + for res in &result_ids[..element_size] { + let element = + self.array_get_value(&dfg.type_of_value(*res), block_id, &mut var_index)?; + let elem_size = Self::flattened_value_size(&element); + popped_elements_size += elem_size; + popped_elements.push(element); } + // It is expected that the `popped_elements_size` is the flattened size of the elements, // as the input slice should be a dynamic array which is represented by flat memory. new_slice = new_slice.slice(popped_elements_size..); @@ -2059,6 +1793,7 @@ impl Context { let (slice_contents, slice_typ, block_id) = self.check_array_is_initialized(arguments[1], dfg)?; + assert!(!slice_typ.is_nested_slice(), "ICE: Nested slice used in ACIR generation"); let slice = self.convert_value(slice_contents, dfg); let insert_index = self.convert_value(arguments[2], dfg).into_var()?; @@ -2164,7 +1899,6 @@ impl Context { } } - // let new_slice_val = AcirValue::Array(new_slice); let element_type_sizes = if !can_omit_element_sizes_array(&slice_typ) { Some(self.init_element_type_sizes_array( &slice_typ, @@ -2189,6 +1923,7 @@ impl Context { let (slice_contents, slice_typ, block_id) = self.check_array_is_initialized(arguments[1], dfg)?; + assert!(!slice_typ.is_nested_slice(), "ICE: Nested slice used in ACIR generation"); let slice = self.convert_value(slice_contents, dfg); let remove_index = self.convert_value(arguments[2], dfg).into_var()?; @@ -2217,8 +1952,6 @@ impl Context { self.get_flattened_index(&slice_typ, slice_contents, flat_remove_index, dfg)?; // Fetch the values we are remove from the slice. - // In the case of non-nested slice the logic is simple as we do not - // need to account for the internal slice sizes or flattening the index. // As we fetch the values we can determine the size of the removed values // which we will later use for writing the correct resulting slice. let mut popped_elements = Vec::new(); @@ -2226,36 +1959,12 @@ impl Context { // Set a temp index just for fetching from the original slice as `array_get_value` mutates // the index internally. let mut temp_index = flat_user_index; - if !slice_typ.is_nested_slice() { - for res in &result_ids[2..(2 + element_size)] { - let element = self.array_get_value( - &dfg.type_of_value(*res), - block_id, - &mut temp_index, - &[], - )?; - let elem_size = Self::flattened_value_size(&element); - popped_elements_size += elem_size; - popped_elements.push(element); - } - } else { - let slice_sizes = self.slice_sizes.get(&slice_contents); - let mut slice_sizes = - slice_sizes.expect("ICE: should have slice sizes").clone(); - // We want to remove the parent size as we are fetching the child - slice_sizes.remove(0); - - for res in &result_ids[2..(2 + element_size)] { - let element = self.array_get_value( - &dfg.type_of_value(*res), - block_id, - &mut temp_index, - &slice_sizes, - )?; - let elem_size = Self::flattened_value_size(&element); - popped_elements_size += elem_size; - popped_elements.push(element); - } + for res in &result_ids[2..(2 + element_size)] { + let element = + self.array_get_value(&dfg.type_of_value(*res), block_id, &mut temp_index)?; + let elem_size = Self::flattened_value_size(&element); + popped_elements_size += elem_size; + popped_elements.push(element); } // Go through the entire slice argument and determine what value should be written to the new slice. diff --git a/noir/compiler/noirc_evaluator/src/ssa/function_builder/mod.rs b/noir/compiler/noirc_evaluator/src/ssa/function_builder/mod.rs index 852848afb81..44be423be10 100644 --- a/noir/compiler/noirc_evaluator/src/ssa/function_builder/mod.rs +++ b/noir/compiler/noirc_evaluator/src/ssa/function_builder/mod.rs @@ -367,10 +367,12 @@ impl FunctionBuilder { let r_squared = self.insert_binary(r, BinaryOp::Mul, r); let a = self.insert_binary(r_squared, BinaryOp::Mul, lhs); let idx = self.field_constant(FieldElement::from((bit_size - i) as i128)); - let b = self.insert_array_get(rhs_bits, idx, Type::field()); + let b = self.insert_array_get(rhs_bits, idx, Type::bool()); + let not_b = self.insert_not(b); + let b = self.insert_cast(b, Type::field()); + let not_b = self.insert_cast(not_b, Type::field()); let r1 = self.insert_binary(a, BinaryOp::Mul, b); - let c = self.insert_binary(one, BinaryOp::Sub, b); - let r2 = self.insert_binary(c, BinaryOp::Mul, r_squared); + let r2 = self.insert_binary(r_squared, BinaryOp::Mul, not_b); r = self.insert_binary(r1, BinaryOp::Add, r2); } r diff --git a/noir/compiler/noirc_evaluator/src/ssa/ir/types.rs b/noir/compiler/noirc_evaluator/src/ssa/ir/types.rs index ae53c7705c2..f412def1e76 100644 --- a/noir/compiler/noirc_evaluator/src/ssa/ir/types.rs +++ b/noir/compiler/noirc_evaluator/src/ssa/ir/types.rs @@ -139,7 +139,7 @@ impl Type { } pub(crate) fn is_nested_slice(&self) -> bool { - if let Type::Slice(element_types) = self { + if let Type::Slice(element_types) | Type::Array(element_types, _) = self { element_types.as_ref().iter().any(|typ| typ.contains_slice_element()) } else { false diff --git a/noir/compiler/noirc_evaluator/src/ssa/opt/bubble_up_constrains.rs b/noir/compiler/noirc_evaluator/src/ssa/opt/bubble_up_constrains.rs new file mode 100644 index 00000000000..8a903cbd87b --- /dev/null +++ b/noir/compiler/noirc_evaluator/src/ssa/opt/bubble_up_constrains.rs @@ -0,0 +1,153 @@ +use std::collections::HashMap; + +use crate::ssa::{ + ir::instruction::{Instruction, InstructionId}, + ssa_gen::Ssa, +}; + +impl Ssa { + /// A simple SSA pass to go through each instruction and move every `Instruction::Constrain` to immediately + /// after when all of its inputs are available. + #[tracing::instrument(level = "trace", skip(self))] + pub(crate) fn bubble_up_constrains(mut self) -> Ssa { + for function in self.functions.values_mut() { + for block in function.reachable_blocks() { + let instructions = function.dfg[block].take_instructions(); + let mut filtered_instructions = Vec::with_capacity(instructions.len()); + + // Multiple constrains can bubble up to sit under a single instruction. We want to maintain the ordering of these constraints, + // so we need to keep track of how many constraints are attached to a given instruction. + // Some assertions don't operate on instruction results, so we use Option so we also track the None case + let mut inserted_at_instruction: HashMap, usize> = + HashMap::with_capacity(instructions.len()); + + let dfg = &function.dfg; + for instruction in instructions { + let (lhs, rhs) = match dfg[instruction] { + Instruction::Constrain(lhs, rhs, ..) => (lhs, rhs), + _ => { + filtered_instructions.push(instruction); + continue; + } + }; + + let last_instruction_that_creates_inputs = filtered_instructions + .iter() + .rev() + .position(|&instruction_id| { + let results = dfg.instruction_results(instruction_id).to_vec(); + results.contains(&lhs) || results.contains(&rhs) + }) + // We iterate through the previous instructions in reverse order so the index is from the + // back of the vector + .map(|reversed_index| filtered_instructions.len() - reversed_index - 1); + + let insertion_index = last_instruction_that_creates_inputs + .map(|index| { + // We want to insert just after the last instruction that creates the inputs + index + 1 + }) + // If it doesn't depend from the previous instructions, then we insert at the start + .unwrap_or_default(); + + let already_inserted_for_this_instruction = inserted_at_instruction + .entry( + last_instruction_that_creates_inputs + .map(|index| filtered_instructions[index]), + ) + .or_default(); + + filtered_instructions.insert( + insertion_index + *already_inserted_for_this_instruction, + instruction, + ); + + *already_inserted_for_this_instruction += 1; + } + + *function.dfg[block].instructions_mut() = filtered_instructions; + } + } + self + } +} + +#[cfg(test)] +mod test { + use crate::ssa::{ + function_builder::FunctionBuilder, + ir::{ + function::RuntimeType, + instruction::{Binary, BinaryOp, Instruction}, + map::Id, + types::Type, + }, + }; + + #[test] + fn check_bubble_up_constrains() { + // fn main f0 { + // b0(v0: Field): + // v1 = add v0, Field 1 + // v2 = add v1, Field 1 + // constrain v0 == Field 1 'With message' + // constrain v2 == Field 3 + // constrain v0 == Field 1 + // constrain v1 == Field 2 + // constrain v1 == Field 2 'With message' + // } + // + let main_id = Id::test_new(0); + + // Compiling main + let mut builder = FunctionBuilder::new("main".into(), main_id, RuntimeType::Acir); + let v0 = builder.add_parameter(Type::field()); + + let one = builder.field_constant(1u128); + let two = builder.field_constant(2u128); + let three = builder.field_constant(3u128); + + let v1 = builder.insert_binary(v0, BinaryOp::Add, one); + let v2 = builder.insert_binary(v1, BinaryOp::Add, one); + builder.insert_constrain(v0, one, Some("With message".to_string())); + builder.insert_constrain(v2, three, None); + builder.insert_constrain(v0, one, None); + builder.insert_constrain(v1, two, None); + builder.insert_constrain(v1, two, Some("With message".to_string())); + builder.terminate_with_return(vec![]); + + let ssa = builder.finish(); + + // Expected output: + // + // fn main f0 { + // b0(v0: Field): + // constrain v0 == Field 1 'With message' + // constrain v0 == Field 1 + // v1 = add v0, Field 1 + // constrain v1 == Field 2 + // constrain v1 == Field 2 'With message' + // v2 = add v1, Field 1 + // constrain v2 == Field 3 + // } + // + let ssa = ssa.bubble_up_constrains(); + let main = ssa.main(); + let block = &main.dfg[main.entry_block()]; + assert_eq!(block.instructions().len(), 7); + + let expected_instructions = vec![ + Instruction::Constrain(v0, one, Some("With message".to_string())), + Instruction::Constrain(v0, one, None), + Instruction::Binary(Binary { lhs: v0, rhs: one, operator: BinaryOp::Add }), + Instruction::Constrain(v1, two, None), + Instruction::Constrain(v1, two, Some("With message".to_string())), + Instruction::Binary(Binary { lhs: v1, rhs: one, operator: BinaryOp::Add }), + Instruction::Constrain(v2, three, None), + ]; + + for (index, instruction) in block.instructions().iter().enumerate() { + assert_eq!(&main.dfg[*instruction], &expected_instructions[index]); + } + } +} diff --git a/noir/compiler/noirc_evaluator/src/ssa/opt/fill_internal_slices.rs b/noir/compiler/noirc_evaluator/src/ssa/opt/fill_internal_slices.rs deleted file mode 100644 index 5ee8e42fe3a..00000000000 --- a/noir/compiler/noirc_evaluator/src/ssa/opt/fill_internal_slices.rs +++ /dev/null @@ -1,765 +0,0 @@ -//! This module defines the internal slices data fill pass. -//! The purpose of this pass is to fill out nested slice values represented by SSA array values. -//! "Filling out" a nested slice specifically refers to making a nested slice's internal slice types -//! match up in their size. This pass is necessary for dynamic array operations to work in ACIR gen -//! as we need to have a known size for any memory operations. As slice types do not carry a size we -//! need to make sure all nested internal slices have the same size in order to accurately -//! read from or write to a nested slice. This pass ultimately attaches dummy data to any smaller internal slice types. -//! -//! A simple example: -//! If we have a slice of the type [[Field]] which is of length 2. The internal slices themselves -//! could be of different sizes, such as 3 and 4. An array operation on this nested slice would look -//! something like below: -//! array_get [Field 3, [Field 1, Field 1, Field 1], Field 4, [Field 2, Field 2, Field 2, Field 2]], index Field v0 -//! Will get translated into a new instruction like such: -//! array_get [Field 3, [Field 1, Field 1, Field 1, Field 0], Field 4, [Field 2, Field 2, Field 2, Field 2]], index Field v0 -//! -//! -//! TODO(#3188): Currently the pass only works on a single flattened block. This should be updated in followup work. -//! The steps of the pass are as follows: -//! - Process each instruction of the block to collect relevant slice size information. We want to find the maximum size that a nested slice -//! potentially could be. Slices can potentially be set to larger array values or used in intrinsics that increase or shorten their size. -//! - Track all array constants and compute an initial map of their nested slice sizes. The slice sizes map is simply a map of an SSA array value -//! to its array size and then any child slice values that may exist. -//! - We also track a map to resolve a starting array constant to its final possible array value. This map is updated on the appropriate instructions -//! such as ArraySet or any slice intrinsics. -//! - On an ArrayGet operation add the resulting value as a possible child of the original slice. In SSA we will reuse the same memory block -//! for the nested slice and must account for an internal slice being fetched and set to a larger value, otherwise we may have an out of bounds error. -//! Also set the resulting fetched value to have the same internal slice size map as the children of the original array used in the operation. -//! - On an ArraySet operation we set the resulting value to have the same slice sizes map as the original array used in the operation. Like the result of -//! an ArrayGet we need to also add the `value` for an ArraySet as a possible child slice of the original array. -//! - For slice intrinsics we set the resulting value to have the same slice sizes map as the original array the same way as we do in an ArraySet. -//! However, with a slice intrinsic we also increase the size for the respective slice intrinsics. -//! We do not decrement the size on intrinsics that could remove values from a slice. This is because we could potentially go back to the smaller slice size, -//! not fill in the appropriate dummies and then get an out of bounds error later when executing the ACIR. We always want to compute -//! what a slice maximum size could be. -//! - Now we need to add each instruction back except with the updated original array values. -//! - Resolve the original slice value to what its final value would be using the previously computed map. -//! - Find the max size as each layer of the recursive nested slice type. -//! For instance in the example above we have a slice of depth 2 with the max sizes of [2, 4]. -//! - Follow the slice type to check whether the SSA value is under the specified max size. If a slice value -//! is under the max size we then attach dummy data. -//! - Construct a final nested slice with the now attached dummy data and replace the original array in the previously -//! saved ArrayGet and ArraySet instructions. - -use crate::ssa::{ - ir::{ - basic_block::BasicBlockId, - dfg::CallStack, - function::{Function, RuntimeType}, - function_inserter::FunctionInserter, - instruction::{Instruction, InstructionId, Intrinsic}, - post_order::PostOrder, - types::Type, - value::{Value, ValueId}, - }, - ssa_gen::Ssa, -}; - -use acvm::FieldElement; -use fxhash::FxHashMap as HashMap; - -impl Ssa { - #[tracing::instrument(level = "trace", skip(self))] - pub(crate) fn fill_internal_slices(mut self) -> Ssa { - for function in self.functions.values_mut() { - // This pass is only necessary for generating ACIR and thus we should not - // process Brillig functions. - // The pass is also currently only setup to handle a function with a single flattened block. - // For complex Brillig functions we can expect this pass to panic. - if function.runtime() == RuntimeType::Acir { - let databus = function.dfg.data_bus.clone(); - let mut context = Context::new(function); - context.process_blocks(); - // update the databus with the new array instructions - function.dfg.data_bus = databus.map_values(|t| context.inserter.resolve(t)); - } - } - self - } -} - -struct Context<'f> { - post_order: PostOrder, - inserter: FunctionInserter<'f>, - - /// Maps SSA array values representing a slice's contents to its updated array value - /// after an array set or a slice intrinsic operation. - /// Maps original value -> result - mapped_slice_values: HashMap, - - /// Maps an updated array value following an array operation to its previous value. - /// When used in conjunction with `mapped_slice_values` we form a two way map of all array - /// values being used in array operations. - /// Maps result -> original value - slice_parents: HashMap, -} - -impl<'f> Context<'f> { - fn new(function: &'f mut Function) -> Self { - let post_order = PostOrder::with_function(function); - let inserter = FunctionInserter::new(function); - - Context { - post_order, - inserter, - mapped_slice_values: HashMap::default(), - slice_parents: HashMap::default(), - } - } - - fn process_blocks(&mut self) { - let mut block_order = PostOrder::with_function(self.inserter.function).into_vec(); - block_order.reverse(); - for block in block_order { - self.process_block(block); - } - } - - fn process_block(&mut self, block: BasicBlockId) { - // Fetch SSA values potentially with internal slices - let instructions = self.inserter.function.dfg[block].take_instructions(); - - // Values containing nested slices to be replaced - let mut slice_values = Vec::new(); - // Maps SSA array ID representing slice contents to its length and a list of its potential internal slices - // This map is constructed once for an array constant and is then updated - // according to the rules in `collect_slice_information`. - let mut slice_sizes: HashMap)> = HashMap::default(); - - // Update the slice sizes map to help find the potential max size of each nested slice. - for instruction in instructions.iter() { - self.collect_slice_information(*instruction, &mut slice_values, &mut slice_sizes); - } - - // Add back every instruction with the updated nested slices. - for instruction in instructions { - self.push_updated_instruction(instruction, &slice_values, &slice_sizes, block); - } - - self.inserter.map_terminator_in_place(block); - } - - /// Determine how the slice sizes map needs to be updated according to the provided instruction. - fn collect_slice_information( - &mut self, - instruction: InstructionId, - slice_values: &mut Vec, - slice_sizes: &mut HashMap)>, - ) { - let results = self.inserter.function.dfg.instruction_results(instruction); - match &self.inserter.function.dfg[instruction] { - Instruction::ArrayGet { array, .. } => { - let array_typ = self.inserter.function.dfg.type_of_value(*array); - let array_value = &self.inserter.function.dfg[*array]; - // If we have an SSA value containing nested slices we should mark it - // as a slice that potentially requires to be filled with dummy data. - if matches!(array_value, Value::Array { .. }) && array_typ.contains_slice_element() - { - slice_values.push(*array); - // Initial insertion into the slice sizes map - // Any other insertions should only occur if the value is already - // a part of the map. - self.compute_slice_sizes(*array, slice_sizes); - } - - let res_typ = self.inserter.function.dfg.type_of_value(results[0]); - if res_typ.contains_slice_element() { - if let Some(inner_sizes) = slice_sizes.get_mut(array) { - // Include the result in the parent array potential children - // If the result has internal slices and is called in an array set - // we could potentially have a new larger slice which we need to account for - inner_sizes.1.push(results[0]); - self.slice_parents.insert(results[0], *array); - - let inner_sizes_iter = inner_sizes.1.clone(); - for slice_value in inner_sizes_iter { - let inner_slice = slice_sizes.get(&slice_value).unwrap_or_else(|| { - panic!("ICE: should have inner slice set for {slice_value}") - }); - slice_sizes.insert(results[0], inner_slice.clone()); - if slice_value != results[0] { - self.mapped_slice_values.insert(slice_value, results[0]); - } - } - } - } - } - Instruction::ArraySet { array, value, .. } => { - let array_typ = self.inserter.function.dfg.type_of_value(*array); - let array_value = &self.inserter.function.dfg[*array]; - // If we have an SSA value containing nested slices we should mark it - // as a slice that potentially requires to be filled with dummy data. - if matches!(array_value, Value::Array { .. }) && array_typ.contains_slice_element() - { - slice_values.push(*array); - // Initial insertion into the slice sizes map - // Any other insertions should only occur if the value is already - // a part of the map. - self.compute_slice_sizes(*array, slice_sizes); - } - - let value_typ = self.inserter.function.dfg.type_of_value(*value); - if value_typ.contains_slice_element() { - self.compute_slice_sizes(*value, slice_sizes); - - let inner_sizes = slice_sizes.get_mut(array).expect("ICE expected slice sizes"); - inner_sizes.1.push(*value); - } - - if let Some(inner_sizes) = slice_sizes.get_mut(array) { - let inner_sizes = inner_sizes.clone(); - - slice_sizes.insert(results[0], inner_sizes); - - self.mapped_slice_values.insert(*array, results[0]); - self.slice_parents.insert(results[0], *array); - } - } - Instruction::Call { func, arguments } => { - let func = &self.inserter.function.dfg[*func]; - if let Value::Intrinsic(intrinsic) = func { - let (argument_index, result_index) = match intrinsic { - Intrinsic::SlicePushBack - | Intrinsic::SlicePushFront - | Intrinsic::SlicePopBack - | Intrinsic::SliceInsert - | Intrinsic::SliceRemove => (1, 1), - // `pop_front` returns the popped element, and then the respective slice. - // This means in the case of a slice with structs, the result index of the popped slice - // will change depending on the number of elements in the struct. - // For example, a slice with four elements will look as such in SSA: - // v3, v4, v5, v6, v7, v8 = call slice_pop_front(v1, v2) - // where v7 is the slice length and v8 is the popped slice itself. - Intrinsic::SlicePopFront => (1, results.len() - 1), - _ => return, - }; - let slice_contents = arguments[argument_index]; - match intrinsic { - Intrinsic::SlicePushBack - | Intrinsic::SlicePushFront - | Intrinsic::SliceInsert => { - for arg in &arguments[(argument_index + 1)..] { - let element_typ = self.inserter.function.dfg.type_of_value(*arg); - if element_typ.contains_slice_element() { - slice_values.push(*arg); - self.compute_slice_sizes(*arg, slice_sizes); - } - } - if let Some(inner_sizes) = slice_sizes.get_mut(&slice_contents) { - inner_sizes.0 += 1; - - let inner_sizes = inner_sizes.clone(); - slice_sizes.insert(results[result_index], inner_sizes); - - self.mapped_slice_values - .insert(slice_contents, results[result_index]); - self.slice_parents.insert(results[result_index], slice_contents); - } - } - Intrinsic::SlicePopBack - | Intrinsic::SliceRemove - | Intrinsic::SlicePopFront => { - // We do not decrement the size on intrinsics that could remove values from a slice. - // This is because we could potentially go back to the smaller slice and not fill in dummies. - // This pass should be tracking the potential max that a slice ***could be*** - if let Some(inner_sizes) = slice_sizes.get(&slice_contents) { - let inner_sizes = inner_sizes.clone(); - slice_sizes.insert(results[result_index], inner_sizes); - - self.mapped_slice_values - .insert(slice_contents, results[result_index]); - self.slice_parents.insert(results[result_index], slice_contents); - } - } - _ => {} - } - } - } - _ => {} - } - } - - fn push_updated_instruction( - &mut self, - instruction: InstructionId, - slice_values: &[ValueId], - slice_sizes: &HashMap)>, - block: BasicBlockId, - ) { - match &self.inserter.function.dfg[instruction] { - Instruction::ArrayGet { array, .. } | Instruction::ArraySet { array, .. } => { - if slice_values.contains(array) { - let (new_array_op_instr, call_stack) = - self.get_updated_array_op_instr(*array, slice_sizes, instruction); - self.inserter.push_instruction_value( - new_array_op_instr, - instruction, - block, - call_stack, - ); - } else { - self.inserter.push_instruction(instruction, block); - } - } - Instruction::Call { func: _, arguments } => { - let mut args_to_replace = Vec::new(); - for (i, arg) in arguments.iter().enumerate() { - let element_typ = self.inserter.function.dfg.type_of_value(*arg); - if slice_values.contains(arg) && element_typ.contains_slice_element() { - args_to_replace.push((i, *arg)); - } - } - if args_to_replace.is_empty() { - self.inserter.push_instruction(instruction, block); - } else { - // Using the original slice is ok to do as during collection of slice information - // we guarantee that only the arguments to slice intrinsic calls can be replaced. - let slice_contents = arguments[1]; - - let element_typ = self.inserter.function.dfg.type_of_value(arguments[1]); - let elem_depth = Self::compute_nested_slice_depth(&element_typ); - - let mut max_sizes = Vec::new(); - max_sizes.resize(elem_depth, 0); - // We want the max for the parent of the argument - let parent = self.resolve_slice_parent(slice_contents); - self.compute_slice_max_sizes(parent, slice_sizes, &mut max_sizes, 0); - - for (index, arg) in args_to_replace { - let element_typ = self.inserter.function.dfg.type_of_value(arg); - max_sizes.remove(0); - let new_array = - self.attach_slice_dummies(&element_typ, Some(arg), false, &max_sizes); - - let instruction_id = instruction; - let (instruction, call_stack) = - self.inserter.map_instruction(instruction_id); - let new_call_instr = match instruction { - Instruction::Call { func, mut arguments } => { - arguments[index] = new_array; - Instruction::Call { func, arguments } - } - _ => panic!("Expected call instruction"), - }; - self.inserter.push_instruction_value( - new_call_instr, - instruction_id, - block, - call_stack, - ); - } - } - } - _ => { - self.inserter.push_instruction(instruction, block); - } - } - } - - /// Construct an updated ArrayGet or ArraySet instruction where the array value - /// has been replaced by a newly filled in array according to the max internal - /// slice sizes. - fn get_updated_array_op_instr( - &mut self, - array_id: ValueId, - slice_sizes: &HashMap)>, - instruction: InstructionId, - ) -> (Instruction, CallStack) { - let mapped_slice_value = self.resolve_slice_value(array_id); - - let (current_size, _) = slice_sizes - .get(&mapped_slice_value) - .unwrap_or_else(|| panic!("should have slice sizes: {mapped_slice_value}")); - - let mut max_sizes = Vec::new(); - - let typ = self.inserter.function.dfg.type_of_value(array_id); - let depth = Self::compute_nested_slice_depth(&typ); - max_sizes.resize(depth, 0); - - max_sizes[0] = *current_size; - self.compute_slice_max_sizes(array_id, slice_sizes, &mut max_sizes, 1); - - let new_array = self.attach_slice_dummies(&typ, Some(array_id), true, &max_sizes); - - let instruction_id = instruction; - let (instruction, call_stack) = self.inserter.map_instruction(instruction_id); - let new_array_op_instr = match instruction { - Instruction::ArrayGet { index, .. } => { - Instruction::ArrayGet { array: new_array, index } - } - Instruction::ArraySet { index, value, .. } => { - Instruction::ArraySet { array: new_array, index, value } - } - _ => panic!("Expected array set"), - }; - - (new_array_op_instr, call_stack) - } - - fn attach_slice_dummies( - &mut self, - typ: &Type, - value: Option, - is_parent_slice: bool, - max_sizes: &[usize], - ) -> ValueId { - match typ { - Type::Numeric(_) => { - if let Some(value) = value { - self.inserter.resolve(value) - } else { - let zero = FieldElement::zero(); - self.inserter.function.dfg.make_constant(zero, Type::field()) - } - } - Type::Array(element_types, len) => { - if let Some(value) = value { - self.inserter.resolve(value) - } else { - let mut array = im::Vector::new(); - for _ in 0..*len { - for typ in element_types.iter() { - array.push_back(self.attach_slice_dummies(typ, None, false, max_sizes)); - } - } - self.inserter.function.dfg.make_array(array, typ.clone()) - } - } - Type::Slice(element_types) => { - let (current_size, max_sizes) = - max_sizes.split_first().expect("ICE: Missing internal slice max size"); - let mut max_size = *current_size; - if let Some(value) = value { - let mut slice = im::Vector::new(); - - let value = self.inserter.function.dfg[value].clone(); - let array = match value { - Value::Array { array, .. } => array, - _ => { - panic!("Expected an array value"); - } - }; - - if is_parent_slice { - max_size = array.len() / element_types.len(); - } - for i in 0..max_size { - for (element_index, element_type) in element_types.iter().enumerate() { - let index_usize = i * element_types.len() + element_index; - let valid_index = index_usize < array.len(); - let maybe_value = - if valid_index { Some(array[index_usize]) } else { None }; - slice.push_back(self.attach_slice_dummies( - element_type, - maybe_value, - false, - max_sizes, - )); - } - } - - self.inserter.function.dfg.make_array(slice, typ.clone()) - } else { - let mut slice = im::Vector::new(); - for _ in 0..max_size { - for typ in element_types.iter() { - slice.push_back(self.attach_slice_dummies(typ, None, false, max_sizes)); - } - } - self.inserter.function.dfg.make_array(slice, typ.clone()) - } - } - Type::Reference(_) => { - unreachable!("ICE: Generating dummy data for references is unsupported") - } - Type::Function => { - unreachable!("ICE: Generating dummy data for functions is unsupported") - } - } - } - - // This methods computes a map representing a nested slice. - // The method also automatically computes the given max slice size - // at each depth of the recursive type. - // For example if we had a next slice - fn compute_slice_sizes( - &self, - array_id: ValueId, - slice_sizes: &mut HashMap)>, - ) { - if let Value::Array { array, typ } = &self.inserter.function.dfg[array_id].clone() { - if let Type::Slice(_) = typ { - let element_size = typ.element_size(); - let len = array.len() / element_size; - let mut slice_value = (len, vec![]); - for value in array { - let typ = self.inserter.function.dfg.type_of_value(*value); - if let Type::Slice(_) = typ { - slice_value.1.push(*value); - self.compute_slice_sizes(*value, slice_sizes); - } - } - // Mark the correct max size based upon an array values internal structure - let mut max_size = 0; - for inner_value in slice_value.1.iter() { - let inner_slice = - slice_sizes.get(inner_value).expect("ICE: should have inner slice set"); - if inner_slice.0 > max_size { - max_size = inner_slice.0; - } - } - for inner_value in slice_value.1.iter() { - let inner_slice = - slice_sizes.get_mut(inner_value).expect("ICE: should have inner slice set"); - if inner_slice.0 < max_size { - inner_slice.0 = max_size; - } - } - slice_sizes.insert(array_id, slice_value); - } - } - } - - /// Determine the maximum possible size of an internal slice at each - /// layer of a nested slice. - /// - /// If the slice map is incorrectly formed the function will exceed - /// the type's nested slice depth and panic. - fn compute_slice_max_sizes( - &self, - array_id: ValueId, - slice_sizes: &HashMap)>, - max_sizes: &mut Vec, - depth: usize, - ) { - let array_id = self.resolve_slice_value(array_id); - let (current_size, inner_slices) = slice_sizes - .get(&array_id) - .unwrap_or_else(|| panic!("should have slice sizes: {array_id}")); - - if inner_slices.is_empty() { - return; - } - - let mut max = *current_size; - for inner_slice in inner_slices.iter() { - let inner_slice = &self.resolve_slice_value(*inner_slice); - - let (inner_size, _) = slice_sizes[inner_slice]; - if inner_size > max { - max = inner_size; - } - self.compute_slice_max_sizes(*inner_slice, slice_sizes, max_sizes, depth + 1); - } - - if max > max_sizes[depth] { - max_sizes[depth] = max; - } - } - - /// Compute the depth of nested slices in a given Type. - /// The depth follows the recursive type structure of a slice. - fn compute_nested_slice_depth(typ: &Type) -> usize { - let mut depth = 0; - if let Type::Slice(element_types) = typ { - depth += 1; - for typ in element_types.as_ref() { - depth += Self::compute_nested_slice_depth(typ); - } - } - depth - } - - /// Resolves a ValueId representing a slice's contents to its updated value. - /// If there is no resolved value for the supplied value, the value which - /// was passed to the method is returned. - fn resolve_slice_value(&self, array_id: ValueId) -> ValueId { - match self.mapped_slice_values.get(&array_id) { - Some(value) => self.resolve_slice_value(*value), - None => array_id, - } - } - - /// Resolves a ValueId representing a slice's contents to its previous value. - /// If there is no resolved parent value it means we have the original slice value - /// and the value which was passed to the method is returned. - fn resolve_slice_parent(&self, array_id: ValueId) -> ValueId { - match self.slice_parents.get(&array_id) { - Some(value) => self.resolve_slice_parent(*value), - None => array_id, - } - } -} - -#[cfg(test)] -mod tests { - - use std::rc::Rc; - - use acvm::FieldElement; - use im::vector; - - use crate::ssa::{ - function_builder::FunctionBuilder, - ir::{ - dfg::DataFlowGraph, - function::RuntimeType, - instruction::{BinaryOp, Instruction}, - map::Id, - types::Type, - value::ValueId, - }, - }; - - #[test] - fn test_simple_nested_slice() { - // We want to test that a nested slice with two internal slices of primitive types - // fills the smaller internal slice with dummy data to match the length of the - // larger internal slice. - - // Note that slices are a represented by a tuple of (length, contents). - // The type of the nested slice in this test is [[Field]]. - // - // This is the original SSA: - // acir fn main f0 { - // b0(v0: Field): - // v2 = lt v0, Field 2 - // constrain v2 == Field 1 'Index out of bounds' - // v11 = array_get [[Field 3, [Field 1, Field 1, Field 1]], [Field 4, [Field 2, Field 2, Field 2, Field 2]]], index Field v0 - // constrain v11 == Field 4 - // return - // } - - let main_id = Id::test_new(0); - let mut builder = FunctionBuilder::new("main".into(), main_id, RuntimeType::Acir); - - let main_v0 = builder.add_parameter(Type::field()); - - let two = builder.field_constant(2_u128); - // Every slice access checks against the dynamic slice length - let slice_access_check = builder.insert_binary(main_v0, BinaryOp::Lt, two); - let one = builder.field_constant(1_u128); - builder.insert_constrain(slice_access_check, one, Some("Index out of bounds".to_owned())); - - let field_element_type = Rc::new(vec![Type::field()]); - let inner_slice_contents_type = Type::Slice(field_element_type); - - let inner_slice_small_len = builder.field_constant(3_u128); - let inner_slice_small_contents = - builder.array_constant(vector![one, one, one], inner_slice_contents_type.clone()); - - let inner_slice_big_len = builder.field_constant(4_u128); - let inner_slice_big_contents = - builder.array_constant(vector![two, two, two, two], inner_slice_contents_type.clone()); - - let outer_slice_element_type = Rc::new(vec![Type::field(), inner_slice_contents_type]); - let outer_slice_type = Type::Slice(outer_slice_element_type); - - let outer_slice_contents = builder.array_constant( - vector![ - inner_slice_small_len, - inner_slice_small_contents, - inner_slice_big_len, - inner_slice_big_contents - ], - outer_slice_type, - ); - // Fetching the length of the second nested slice - // We must use a parameter to main as we do not want the array operation to be simplified out during SSA gen. The filling of internal slices - // is necessary for dynamic nested slices and thus we want to generate the SSA that ACIR gen would be converting. - let array_get_res = builder.insert_array_get(outer_slice_contents, main_v0, Type::field()); - - let four = builder.field_constant(4_u128); - builder.insert_constrain(array_get_res, four, None); - builder.terminate_with_return(vec![]); - - // Note that now the smaller internal slice should have extra dummy data that matches the larger internal slice's size. - // - // Expected SSA: - // acir fn main f0 { - // b0(v0: Field): - // v10 = lt v0, Field 2 - // constrain v10 == Field 1 'Index out of bounds' - // v18 = array_get [Field 3, [Field 1, Field 1, Field 1, Field 0], Field 4, [Field 2, Field 2, Field 2, Field 2]], index v0 - // constrain v18 == Field 4 - // return - // } - - let ssa = builder.finish().fill_internal_slices(); - - let func = ssa.main(); - let block_id = func.entry_block(); - - // Check the array get expression has replaced its nested slice with a new slice - // where the internal slice has dummy data attached to it. - let instructions = func.dfg[block_id].instructions(); - let array_id = instructions - .iter() - .find_map(|instruction| { - if let Instruction::ArrayGet { array, .. } = func.dfg[*instruction] { - Some(array) - } else { - None - } - }) - .expect("Should find array_get instruction"); - - let (array_constant, _) = - func.dfg.get_array_constant(array_id).expect("should have an array constant"); - - let inner_slice_small_len = func - .dfg - .get_numeric_constant(array_constant[0]) - .expect("should have a numeric constant"); - assert_eq!( - inner_slice_small_len, - FieldElement::from(3u128), - "The length of the smaller internal slice should be unchanged" - ); - - let (inner_slice_small_contents, _) = - func.dfg.get_array_constant(array_constant[1]).expect("should have an array constant"); - let small_capacity = inner_slice_small_contents.len(); - assert_eq!(small_capacity, 4, "The inner slice contents should contain dummy element"); - - compare_array_constants(&inner_slice_small_contents, &[1, 1, 1, 0], &func.dfg); - - let inner_slice_big_len = func - .dfg - .get_numeric_constant(array_constant[2]) - .expect("should have a numeric constant"); - assert_eq!( - inner_slice_big_len, - FieldElement::from(4u128), - "The length of the larger internal slice should be unchanged" - ); - - let (inner_slice_big_contents, _) = - func.dfg.get_array_constant(array_constant[3]).expect("should have an array constant"); - let big_capacity = inner_slice_big_contents.len(); - assert_eq!( - small_capacity, big_capacity, - "The length of both internal slice contents should be the same" - ); - - compare_array_constants(&inner_slice_big_contents, &[2u128; 4], &func.dfg); - } - - fn compare_array_constants( - got_list: &im::Vector, - expected_list: &[u128], - dfg: &DataFlowGraph, - ) { - for i in 0..got_list.len() { - let got_value = - dfg.get_numeric_constant(got_list[i]).expect("should have a numeric constant"); - assert_eq!( - got_value, - FieldElement::from(expected_list[i]), - "Value is different than expected" - ); - } - } -} diff --git a/noir/compiler/noirc_evaluator/src/ssa/opt/mod.rs b/noir/compiler/noirc_evaluator/src/ssa/opt/mod.rs index 95784194d28..71725422a7a 100644 --- a/noir/compiler/noirc_evaluator/src/ssa/opt/mod.rs +++ b/noir/compiler/noirc_evaluator/src/ssa/opt/mod.rs @@ -5,10 +5,10 @@ //! Generally, these passes are also expected to minimize the final amount of instructions. mod array_use; mod assert_constant; +mod bubble_up_constrains; mod constant_folding; mod defunctionalize; mod die; -mod fill_internal_slices; pub(crate) mod flatten_cfg; mod inlining; mod mem2reg; diff --git a/noir/compiler/noirc_evaluator/src/ssa/ssa_gen/context.rs b/noir/compiler/noirc_evaluator/src/ssa/ssa_gen/context.rs index f1a2154d3a8..0e155776545 100644 --- a/noir/compiler/noirc_evaluator/src/ssa/ssa_gen/context.rs +++ b/noir/compiler/noirc_evaluator/src/ssa/ssa_gen/context.rs @@ -270,11 +270,12 @@ impl<'a> FunctionContext<'a> { /// helper function which add instructions to the block computing the absolute value of the /// given signed integer input. When the input is negative, we return its two complement, and itself when it is positive. fn absolute_value_helper(&mut self, input: ValueId, sign: ValueId, bit_size: u32) -> ValueId { + assert_eq!(self.builder.type_of_value(sign), Type::bool()); + // We compute the absolute value of lhs - let one = self.builder.numeric_constant(FieldElement::one(), Type::bool()); let bit_width = self.builder.numeric_constant(FieldElement::from(2_i128.pow(bit_size)), Type::field()); - let sign_not = self.builder.insert_binary(one, BinaryOp::Sub, sign); + let sign_not = self.builder.insert_not(sign); // We use unsafe casts here, this is fine as we're casting to a `field` type. let as_field = self.builder.insert_cast(input, Type::field()); @@ -472,7 +473,6 @@ impl<'a> FunctionContext<'a> { location: Location, ) { let is_sub = operator == BinaryOpKind::Subtract; - let one = self.builder.numeric_constant(FieldElement::one(), Type::bool()); let half_width = self.builder.numeric_constant( FieldElement::from(2_i128.pow(bit_size - 1)), Type::unsigned(bit_size), @@ -484,7 +484,7 @@ impl<'a> FunctionContext<'a> { let mut rhs_sign = self.builder.insert_binary(rhs_as_unsigned, BinaryOp::Lt, half_width); let message = if is_sub { // lhs - rhs = lhs + (-rhs) - rhs_sign = self.builder.insert_binary(one, BinaryOp::Sub, rhs_sign); + rhs_sign = self.builder.insert_not(rhs_sign); "attempt to subtract with overflow".to_string() } else { "attempt to add with overflow".to_string() @@ -518,13 +518,15 @@ impl<'a> FunctionContext<'a> { let product = self.builder.insert_cast(product_field, Type::unsigned(bit_size)); // Then we check the signed product fits in a signed integer of bit_size-bits - let not_same = self.builder.insert_binary(one, BinaryOp::Sub, same_sign); + let not_same = self.builder.insert_not(same_sign); let not_same_sign_field = self.insert_safe_cast(not_same, Type::unsigned(bit_size), location); let positive_maximum_with_offset = self.builder.insert_binary(half_width, BinaryOp::Add, not_same_sign_field); let product_overflow_check = self.builder.insert_binary(product, BinaryOp::Lt, positive_maximum_with_offset); + + let one = self.builder.numeric_constant(FieldElement::one(), Type::bool()); self.builder.set_location(location).insert_constrain( product_overflow_check, one, diff --git a/noir/compiler/noirc_frontend/src/hir/def_collector/dc_crate.rs b/noir/compiler/noirc_frontend/src/hir/def_collector/dc_crate.rs index f0fc482cae0..c768ea96f8f 100644 --- a/noir/compiler/noirc_frontend/src/hir/def_collector/dc_crate.rs +++ b/noir/compiler/noirc_frontend/src/hir/def_collector/dc_crate.rs @@ -491,7 +491,7 @@ pub(crate) fn check_methods_signatures( } // We also need to bind the traits generics to the trait's generics on the impl - for ((_, generic), binding) in the_trait.generics.iter().zip(trait_generics) { + for (generic, binding) in the_trait.generics.iter().zip(trait_generics) { generic.bind(binding); } @@ -599,7 +599,7 @@ pub(crate) fn check_methods_signatures( the_trait.set_methods(trait_methods); the_trait.self_type_typevar.unbind(the_trait.self_type_typevar_id); - for (old_id, generic) in &the_trait.generics { - generic.unbind(*old_id); + for generic in &the_trait.generics { + generic.unbind(generic.id()); } } diff --git a/noir/compiler/noirc_frontend/src/hir/def_collector/dc_mod.rs b/noir/compiler/noirc_frontend/src/hir/def_collector/dc_mod.rs index 2e6eb3992ff..3cd60c33b8b 100644 --- a/noir/compiler/noirc_frontend/src/hir/def_collector/dc_mod.rs +++ b/noir/compiler/noirc_frontend/src/hir/def_collector/dc_mod.rs @@ -20,7 +20,7 @@ use super::{ }, errors::{DefCollectorErrorKind, DuplicateType}, }; -use crate::hir::def_map::{parse_file, LocalModuleId, ModuleData, ModuleId}; +use crate::hir::def_map::{LocalModuleId, ModuleData, ModuleId}; use crate::hir::resolution::import::ImportDirective; use crate::hir::Context; @@ -555,7 +555,7 @@ impl<'a> ModCollector<'a> { context.visited_files.insert(child_file_id, location); // Parse the AST for the module we just found and then recursively look for it's defs - let (ast, parsing_errors) = parse_file(&context.file_manager, child_file_id); + let (ast, parsing_errors) = context.parsed_file_results(child_file_id); let ast = ast.into_sorted(); errors.extend( diff --git a/noir/compiler/noirc_frontend/src/hir/def_map/mod.rs b/noir/compiler/noirc_frontend/src/hir/def_map/mod.rs index d60ceffa9af..8c985e88e0b 100644 --- a/noir/compiler/noirc_frontend/src/hir/def_map/mod.rs +++ b/noir/compiler/noirc_frontend/src/hir/def_map/mod.rs @@ -87,7 +87,7 @@ impl CrateDefMap { // First parse the root file. let root_file_id = context.crate_graph[crate_id].root_file_id; - let (ast, parsing_errors) = parse_file(&context.file_manager, root_file_id); + let (ast, parsing_errors) = context.parsed_file_results(root_file_id); let mut ast = ast.into_sorted(); for macro_processor in ¯o_processors { diff --git a/noir/compiler/noirc_frontend/src/hir/mod.rs b/noir/compiler/noirc_frontend/src/hir/mod.rs index c62f357167f..2124b5281f4 100644 --- a/noir/compiler/noirc_frontend/src/hir/mod.rs +++ b/noir/compiler/noirc_frontend/src/hir/mod.rs @@ -7,18 +7,22 @@ pub mod type_check; use crate::graph::{CrateGraph, CrateId}; use crate::hir_def::function::FuncMeta; use crate::node_interner::{FuncId, NodeInterner, StructId}; +use crate::parser::ParserError; +use crate::ParsedModule; use def_map::{Contract, CrateDefMap}; use fm::FileManager; use noirc_errors::Location; use std::borrow::Cow; -use std::collections::BTreeMap; +use std::collections::{BTreeMap, HashMap}; use self::def_map::TestFunction; +pub type ParsedFiles = HashMap)>; + /// Helper object which groups together several useful context objects used /// during name resolution. Once name resolution is finished, only the /// def_interner is required for type inference and monomorphization. -pub struct Context<'file_manager> { +pub struct Context<'file_manager, 'parsed_files> { pub def_interner: NodeInterner, pub crate_graph: CrateGraph, pub(crate) def_maps: BTreeMap, @@ -30,6 +34,11 @@ pub struct Context<'file_manager> { /// A map of each file that already has been visited from a prior `mod foo;` declaration. /// This is used to issue an error if a second `mod foo;` is declared to the same file. pub visited_files: BTreeMap, + + // A map of all parsed files. + // Same as the file manager, we take ownership of the parsed files in the WASM context. + // Parsed files is also read only. + pub parsed_files: Cow<'parsed_files, ParsedFiles>, } #[derive(Debug, Copy, Clone)] @@ -39,27 +48,36 @@ pub enum FunctionNameMatch<'a> { Contains(&'a str), } -impl Context<'_> { - pub fn new(file_manager: FileManager) -> Context<'static> { +impl Context<'_, '_> { + pub fn new(file_manager: FileManager, parsed_files: ParsedFiles) -> Context<'static, 'static> { Context { def_interner: NodeInterner::default(), def_maps: BTreeMap::new(), visited_files: BTreeMap::new(), crate_graph: CrateGraph::default(), file_manager: Cow::Owned(file_manager), + parsed_files: Cow::Owned(parsed_files), } } - pub fn from_ref_file_manager(file_manager: &FileManager) -> Context<'_> { + pub fn from_ref_file_manager<'file_manager, 'parsed_files>( + file_manager: &'file_manager FileManager, + parsed_files: &'parsed_files ParsedFiles, + ) -> Context<'file_manager, 'parsed_files> { Context { def_interner: NodeInterner::default(), def_maps: BTreeMap::new(), visited_files: BTreeMap::new(), crate_graph: CrateGraph::default(), file_manager: Cow::Borrowed(file_manager), + parsed_files: Cow::Borrowed(parsed_files), } } + pub fn parsed_file_results(&self, file_id: fm::FileId) -> (ParsedModule, Vec) { + self.parsed_files.get(&file_id).expect("noir file wasn't parsed").clone() + } + /// Returns the CrateDefMap for a given CrateId. /// It is perfectly valid for the compiler to look /// up a CrateDefMap and it is not available. diff --git a/noir/compiler/noirc_frontend/src/hir/resolution/resolver.rs b/noir/compiler/noirc_frontend/src/hir/resolution/resolver.rs index bb7acf1037b..1d4f60ffd51 100644 --- a/noir/compiler/noirc_frontend/src/hir/resolution/resolver.rs +++ b/noir/compiler/noirc_frontend/src/hir/resolution/resolver.rs @@ -39,7 +39,7 @@ use crate::{ use crate::{ ArrayLiteral, ContractFunctionType, Distinctness, ForRange, FunctionDefinition, FunctionReturnType, FunctionVisibility, Generics, LValue, NoirStruct, NoirTypeAlias, Param, - Path, PathKind, Pattern, Shared, StructType, Type, TypeAliasType, TypeBinding, TypeVariable, + Path, PathKind, Pattern, Shared, StructType, Type, TypeAliasType, TypeVariable, TypeVariableKind, UnaryOp, UnresolvedGenerics, UnresolvedTraitConstraint, UnresolvedType, UnresolvedTypeData, UnresolvedTypeExpression, Visibility, ERROR_IDENT, }; @@ -558,6 +558,10 @@ impl<'a> Resolver<'a> { let result = self.interner.get_type_alias(id).get_type(&args); + // Collecting Type Alias references [Location]s to be used by LSP in order + // to resolve the definition of the type alias + self.interner.add_type_alias_ref(id, Location::new(span, self.file)); + // Because there is no ordering to when type aliases (and other globals) are resolved, // it is possible for one to refer to an Error type and issue no error if it is set // equal to another type alias. Fixing this fully requires an analysis to create a DFG @@ -643,7 +647,7 @@ impl<'a> Resolver<'a> { None => { let id = self.interner.next_type_variable_id(); let typevar = TypeVariable::unbound(id); - new_variables.push((id, typevar.clone())); + new_variables.push(typevar.clone()); // 'Named'Generic is a bit of a misnomer here, we want a type variable that // wont be bound over but this one has no name since we do not currently @@ -773,7 +777,7 @@ impl<'a> Resolver<'a> { self.generics.push((name, typevar.clone(), span)); } - (id, typevar) + typevar }) } @@ -783,7 +787,7 @@ impl<'a> Resolver<'a> { pub fn add_existing_generics(&mut self, names: &UnresolvedGenerics, generics: &Generics) { assert_eq!(names.len(), generics.len()); - for (name, (_id, typevar)) in names.iter().zip(generics) { + for (name, typevar) in names.iter().zip(generics) { self.add_existing_generic(&name.0.contents, name.0.span(), typevar.clone()); } } @@ -851,14 +855,7 @@ impl<'a> Resolver<'a> { let attributes = func.attributes().clone(); - let mut generics = - vecmap(self.generics.clone(), |(name, typevar, _)| match &*typevar.borrow() { - TypeBinding::Unbound(id) => (*id, typevar.clone()), - TypeBinding::Bound(binding) => { - unreachable!("Expected {} to be unbound, but it is bound to {}", name, binding) - } - }); - + let mut generics = vecmap(&self.generics, |(_, typevar, _)| typevar.clone()); let mut parameters = vec![]; let mut parameter_types = vec![]; diff --git a/noir/compiler/noirc_frontend/src/hir/resolution/traits.rs b/noir/compiler/noirc_frontend/src/hir/resolution/traits.rs index f08d9c50c84..8f966be312b 100644 --- a/noir/compiler/noirc_frontend/src/hir/resolution/traits.rs +++ b/noir/compiler/noirc_frontend/src/hir/resolution/traits.rs @@ -18,7 +18,7 @@ use crate::{ }, hir_def::traits::{TraitConstant, TraitFunction, TraitImpl, TraitType}, node_interner::{FuncId, NodeInterner, TraitId}, - Generics, Path, Shared, TraitItem, Type, TypeBinding, TypeVariable, TypeVariableKind, + Generics, Path, Shared, TraitItem, Type, TypeVariable, TypeVariableKind, }; use super::{ @@ -42,8 +42,7 @@ pub(crate) fn resolve_traits( for (trait_id, unresolved_trait) in traits { let generics = vecmap(&unresolved_trait.trait_def.generics, |_| { - let id = context.def_interner.next_type_variable_id(); - (id, TypeVariable::unbound(id)) + TypeVariable::unbound(context.def_interner.next_type_variable_id()) }); // Resolve order @@ -142,17 +141,7 @@ fn resolve_trait_methods( let arguments = vecmap(parameters, |param| resolver.resolve_type(param.1.clone())); let return_type = resolver.resolve_type(return_type.get_type().into_owned()); - let generics = - vecmap(resolver.get_generics(), |(_, type_var, _)| match &*type_var.borrow() { - TypeBinding::Unbound(id) => (*id, type_var.clone()), - TypeBinding::Bound(binding) => { - unreachable!("Trait generic was bound to {binding}") - } - }); - - // Ensure the trait is generic over the Self type as well - // let the_trait = resolver.interner.get_trait(trait_id); - // generics.push((the_trait.self_type_typevar_id, the_trait.self_type_typevar.clone())); + let generics = vecmap(resolver.get_generics(), |(_, type_var, _)| type_var.clone()); let default_impl_list: Vec<_> = unresolved_trait .fns_with_default_impl @@ -465,8 +454,7 @@ pub(crate) fn resolve_trait_impls( methods: vecmap(&impl_methods, |(_, func_id)| *func_id), }); - let impl_generics = - vecmap(impl_generics, |(_, type_variable, _)| (type_variable.id(), type_variable)); + let impl_generics = vecmap(impl_generics, |(_, type_variable, _)| type_variable); if let Err((prev_span, prev_file)) = interner.add_trait_implementation( self_type.clone(), diff --git a/noir/compiler/noirc_frontend/src/hir/type_check/expr.rs b/noir/compiler/noirc_frontend/src/hir/type_check/expr.rs index b583959bfb1..58cf4e7b289 100644 --- a/noir/compiler/noirc_frontend/src/hir/type_check/expr.rs +++ b/noir/compiler/noirc_frontend/src/hir/type_check/expr.rs @@ -595,7 +595,7 @@ impl<'interner> TypeChecker<'interner> { .generics .iter() .zip(generics) - .map(|((id, var), arg)| (*id, (var.clone(), arg))) + .map(|(var, arg)| (var.id(), (var.clone(), arg))) .collect(); (method.typ.clone(), method.arguments().len(), generic_bindings) diff --git a/noir/compiler/noirc_frontend/src/hir_def/traits.rs b/noir/compiler/noirc_frontend/src/hir_def/traits.rs index 85c292ac5f3..16b9899039f 100644 --- a/noir/compiler/noirc_frontend/src/hir_def/traits.rs +++ b/noir/compiler/noirc_frontend/src/hir_def/traits.rs @@ -147,7 +147,7 @@ impl TraitFunction { } } - pub fn generics(&self) -> &[(TypeVariableId, TypeVariable)] { + pub fn generics(&self) -> &[TypeVariable] { match &self.typ { Type::Function(..) => &[], Type::Forall(generics, _) => generics, diff --git a/noir/compiler/noirc_frontend/src/hir_def/types.rs b/noir/compiler/noirc_frontend/src/hir_def/types.rs index f59341e5b1c..8979d60c005 100644 --- a/noir/compiler/noirc_frontend/src/hir_def/types.rs +++ b/noir/compiler/noirc_frontend/src/hir_def/types.rs @@ -207,11 +207,8 @@ pub struct StructType { pub location: Location, } -/// Corresponds to generic lists such as `` in the source -/// program. The `TypeVariableId` portion is used to match two -/// type variables to check for equality, while the `TypeVariable` is -/// the actual part that can be mutated to bind it to another type. -pub type Generics = Vec<(TypeVariableId, TypeVariable)>; +/// Corresponds to generic lists such as `` in the source program. +pub type Generics = Vec; impl std::hash::Hash for StructType { fn hash(&self, state: &mut H) { @@ -260,7 +257,7 @@ impl StructType { .generics .iter() .zip(generic_args) - .map(|((old_id, old_var), new)| (*old_id, (old_var.clone(), new.clone()))) + .map(|(old, new)| (old.id(), (old.clone(), new.clone()))) .collect(); (typ.substitute(&substitutions), i) @@ -276,7 +273,7 @@ impl StructType { .generics .iter() .zip(generic_args) - .map(|((old_id, old_var), new)| (*old_id, (old_var.clone(), new.clone()))) + .map(|(old, new)| (old.id(), (old.clone(), new.clone()))) .collect(); vecmap(&self.fields, |(name, typ)| { @@ -317,7 +314,7 @@ pub struct TypeAliasType { pub id: TypeAliasId, pub typ: Type, pub generics: Generics, - pub span: Span, + pub location: Location, } impl std::hash::Hash for TypeAliasType { @@ -337,7 +334,7 @@ impl std::fmt::Display for TypeAliasType { write!(f, "{}", self.name)?; if !self.generics.is_empty() { - let generics = vecmap(&self.generics, |(_, binding)| binding.borrow().to_string()); + let generics = vecmap(&self.generics, |binding| binding.borrow().to_string()); write!(f, "{}", generics.join(", "))?; } @@ -349,11 +346,11 @@ impl TypeAliasType { pub fn new( id: TypeAliasId, name: Ident, - span: Span, + location: Location, typ: Type, generics: Generics, ) -> TypeAliasType { - TypeAliasType { id, typ, name, span, generics } + TypeAliasType { id, typ, name, location, generics } } pub fn set_type_and_generics(&mut self, new_typ: Type, new_generics: Generics) { @@ -369,7 +366,7 @@ impl TypeAliasType { .generics .iter() .zip(generic_args) - .map(|((old_id, old_var), new)| (*old_id, (old_var.clone(), new.clone()))) + .map(|(old, new)| (old.id(), (old.clone(), new.clone()))) .collect(); self.typ.substitute(&substitutions) @@ -707,7 +704,7 @@ impl Type { /// Takes a monomorphic type and generalizes it over each of the type variables in the /// given type bindings, ignoring what each type variable is bound to in the TypeBindings. pub(crate) fn generalize_from_substitutions(self, type_bindings: TypeBindings) -> Type { - let polymorphic_type_vars = vecmap(type_bindings, |(id, (type_var, _))| (id, type_var)); + let polymorphic_type_vars = vecmap(type_bindings, |(_, (type_var, _))| type_var); Type::Forall(polymorphic_type_vars, Box::new(self)) } @@ -801,7 +798,7 @@ impl std::fmt::Display for Type { }, Type::Constant(x) => x.fmt(f), Type::Forall(typevars, typ) => { - let typevars = vecmap(typevars, |(var, _)| var.to_string()); + let typevars = vecmap(typevars, |var| var.id().to_string()); write!(f, "forall {}. {}", typevars.join(" "), typ) } Type::Function(args, ret, env) => { @@ -1307,9 +1304,9 @@ impl Type { ) -> (Type, TypeBindings) { match self { Type::Forall(typevars, typ) => { - for (id, var) in typevars { + for var in typevars { bindings - .entry(*id) + .entry(var.id()) .or_insert_with(|| (var.clone(), interner.next_type_variable())); } @@ -1328,9 +1325,9 @@ impl Type { Type::Forall(typevars, typ) => { let replacements = typevars .iter() - .map(|(id, var)| { + .map(|var| { let new = interner.next_type_variable(); - (*id, (var.clone(), new)) + (var.id(), (var.clone(), new)) }) .collect(); @@ -1428,8 +1425,8 @@ impl Type { Type::Forall(typevars, typ) => { // Trying to substitute_helper a variable de, substitute_bound_typevarsfined within a nested Forall // is usually impossible and indicative of an error in the type checker somewhere. - for (var, _) in typevars { - assert!(!type_bindings.contains_key(var)); + for var in typevars { + assert!(!type_bindings.contains_key(&var.id())); } let typ = Box::new(typ.substitute_helper(type_bindings, substitute_bound_typevars)); Type::Forall(typevars.clone(), typ) @@ -1476,7 +1473,7 @@ impl Type { } } Type::Forall(typevars, typ) => { - !typevars.iter().any(|(id, _)| *id == target_id) && typ.occurs(target_id) + !typevars.iter().any(|var| var.id() == target_id) && typ.occurs(target_id) } Type::Function(args, ret, env) => { args.iter().any(|arg| arg.occurs(target_id)) @@ -1549,7 +1546,7 @@ impl Type { } pub fn from_generics(generics: &Generics) -> Vec { - vecmap(generics, |(_, var)| Type::TypeVariable(var.clone(), TypeVariableKind::Normal)) + vecmap(generics, |var| Type::TypeVariable(var.clone(), TypeVariableKind::Normal)) } } @@ -1620,7 +1617,7 @@ impl From<&Type> for PrintableType { match value { Type::FieldElement => PrintableType::Field, Type::Array(size, typ) => { - let length = size.evaluate_to_u64().expect("Cannot print variable sized arrays"); + let length = size.evaluate_to_u64(); let typ = typ.as_ref(); PrintableType::Array { length, typ: Box::new(typ.into()) } } @@ -1641,7 +1638,7 @@ impl From<&Type> for PrintableType { } Type::FmtString(_, _) => unreachable!("format strings cannot be printed"), Type::Error => unreachable!(), - Type::Unit => unreachable!(), + Type::Unit => PrintableType::Unit, Type::Constant(_) => unreachable!(), Type::Struct(def, ref args) => { let struct_type = def.borrow(); @@ -1649,13 +1646,17 @@ impl From<&Type> for PrintableType { let fields = vecmap(fields, |(name, typ)| (name, typ.into())); PrintableType::Struct { fields, name: struct_type.name.to_string() } } - Type::TraitAsType(..) => unreachable!(), - Type::Tuple(_) => todo!("printing tuple types is not yet implemented"), + Type::TraitAsType(_, _, _) => unreachable!(), + Type::Tuple(types) => PrintableType::Tuple { types: vecmap(types, |typ| typ.into()) }, Type::TypeVariable(_, _) => unreachable!(), Type::NamedGeneric(..) => unreachable!(), Type::Forall(..) => unreachable!(), - Type::Function(_, _, _) => unreachable!(), - Type::MutableReference(_) => unreachable!("cannot print &mut"), + Type::Function(_, _, env) => { + PrintableType::Function { env: Box::new(env.as_ref().into()) } + } + Type::MutableReference(typ) => { + PrintableType::MutableReference { typ: Box::new(typ.as_ref().into()) } + } Type::NotConstant => unreachable!(), } } diff --git a/noir/compiler/noirc_frontend/src/monomorphization/mod.rs b/noir/compiler/noirc_frontend/src/monomorphization/mod.rs index ac11e00ad20..67b246a02ce 100644 --- a/noir/compiler/noirc_frontend/src/monomorphization/mod.rs +++ b/noir/compiler/noirc_frontend/src/monomorphization/mod.rs @@ -27,7 +27,7 @@ use crate::{ node_interner::{self, DefinitionKind, NodeInterner, StmtId, TraitImplKind, TraitMethodId}, token::FunctionAttribute, ContractFunctionType, FunctionKind, Type, TypeBinding, TypeBindings, TypeVariable, - TypeVariableId, TypeVariableKind, UnaryOp, Visibility, + TypeVariableKind, UnaryOp, Visibility, }; use self::ast::{Definition, FuncId, Function, LocalId, Program}; @@ -1029,11 +1029,16 @@ impl<'interner> Monomorphizer<'interner> { } fn append_printable_type_info_inner(typ: &Type, arguments: &mut Vec) { + // Disallow printing slices and mutable references for consistency, + // since they cannot be passed from ACIR into Brillig if let HirType::Array(size, _) = typ { if let HirType::NotConstant = **size { unreachable!("println does not support slices. Convert the slice to an array before passing it to println"); } + } else if matches!(typ, HirType::MutableReference(_)) { + unreachable!("println does not support mutable references."); } + let printable_type: PrintableType = typ.into(); let abi_as_string = serde_json::to_string(&printable_type) .expect("ICE: expected PrintableType to serialize"); @@ -1533,8 +1538,8 @@ impl<'interner> Monomorphizer<'interner> { let (generics, impl_method_type) = self.interner.function_meta(&impl_method).typ.unwrap_forall(); - let replace_type_variable = |(id, var): &(TypeVariableId, TypeVariable)| { - (*id, (var.clone(), Type::TypeVariable(var.clone(), TypeVariableKind::Normal))) + let replace_type_variable = |var: &TypeVariable| { + (var.id(), (var.clone(), Type::TypeVariable(var.clone(), TypeVariableKind::Normal))) }; // Replace each NamedGeneric with a TypeVariable containing the same internal type variable diff --git a/noir/compiler/noirc_frontend/src/node_interner.rs b/noir/compiler/noirc_frontend/src/node_interner.rs index 173bac95877..e734161e360 100644 --- a/noir/compiler/noirc_frontend/src/node_interner.rs +++ b/noir/compiler/noirc_frontend/src/node_interner.rs @@ -78,7 +78,7 @@ pub struct NodeInterner { // // Map type aliases to the actual type. // When resolving types, check against this map to see if a type alias is defined. - type_aliases: Vec, + pub(crate) type_aliases: Vec, // Trait map. // @@ -142,6 +142,10 @@ pub struct NodeInterner { // For trait implementation functions, this is their self type and trait they belong to func_id_to_trait: HashMap, + + /// A list of all type aliases that are referenced in the program. + /// Searched by LSP to resolve [Location]s of [TypeAliasType]s + pub(crate) type_alias_ref: Vec<(TypeAliasId, Location)>, } /// A trait implementation is either a normal implementation that is present in the source @@ -450,6 +454,7 @@ impl Default for NodeInterner { globals: HashMap::new(), struct_methods: HashMap::new(), primitive_methods: HashMap::new(), + type_alias_ref: Vec::new(), }; // An empty block expression is used often, we add this into the `node` on startup @@ -499,8 +504,7 @@ impl NodeInterner { // This lets us record how many arguments the type expects so that other types // can refer to it with generic arguments before the generic parameters themselves // are resolved. - let id = TypeVariableId(0); - (id, TypeVariable::unbound(id)) + TypeVariable::unbound(TypeVariableId(0)) }), self_type_typevar_id, self_type_typevar: TypeVariable::unbound(self_type_typevar_id), @@ -530,8 +534,7 @@ impl NodeInterner { // This lets us record how many arguments the type expects so that other types // can refer to it with generic arguments before the generic parameters themselves // are resolved. - let id = TypeVariableId(0); - (id, TypeVariable::unbound(id)) + TypeVariable::unbound(TypeVariableId(0)) }); let location = Location::new(typ.struct_def.span, file_id); @@ -547,17 +550,19 @@ impl NodeInterner { self.type_aliases.push(TypeAliasType::new( type_id, typ.type_alias_def.name.clone(), - typ.type_alias_def.span, + Location::new(typ.type_alias_def.span, typ.file_id), Type::Error, - vecmap(&typ.type_alias_def.generics, |_| { - let id = TypeVariableId(0); - (id, TypeVariable::unbound(id)) - }), + vecmap(&typ.type_alias_def.generics, |_| TypeVariable::unbound(TypeVariableId(0))), )); type_id } + /// Adds [TypeLiasId] and [Location] to the type_alias_ref vector + /// So that we can later resolve [Location]s type aliases from the LSP requests + pub fn add_type_alias_ref(&mut self, type_id: TypeAliasId, location: Location) { + self.type_alias_ref.push((type_id, location)); + } pub fn update_struct(&mut self, type_id: StructId, f: impl FnOnce(&mut StructType)) { let mut value = self.structs.get_mut(&type_id).unwrap().borrow_mut(); f(&mut value); @@ -1195,19 +1200,18 @@ impl NodeInterner { self.trait_implementations.push(trait_impl.clone()); - // Ignoring overlapping TraitImplKind::Assumed impls here is perfectly fine. - // It should never happen since impls are defined at global scope, but even - // if they were, we should never prevent defining a new impl because a where - // clause already assumes it exists. - // Replace each generic with a fresh type variable let substitutions = impl_generics .into_iter() - .map(|(id, typevar)| (id, (typevar, self.next_type_variable()))) + .map(|typevar| (typevar.id(), (typevar, self.next_type_variable()))) .collect(); let instantiated_object_type = object_type.substitute(&substitutions); + // Ignoring overlapping `TraitImplKind::Assumed` impls here is perfectly fine. + // It should never happen since impls are defined at global scope, but even + // if they were, we should never prevent defining a new impl because a 'where' + // clause already assumes it exists. if let Ok((TraitImplKind::Normal(existing), _)) = self.try_lookup_trait_implementation( &instantiated_object_type, trait_id, diff --git a/noir/compiler/noirc_frontend/src/resolve_locations.rs b/noir/compiler/noirc_frontend/src/resolve_locations.rs index 95ced906984..02325de4da8 100644 --- a/noir/compiler/noirc_frontend/src/resolve_locations.rs +++ b/noir/compiler/noirc_frontend/src/resolve_locations.rs @@ -33,17 +33,22 @@ impl NodeInterner { /// Returns the [Location] of the definition of the given Ident found at [Span] of the given [FileId]. /// Returns [None] when definition is not found. - pub fn get_definition_location_from(&self, location: Location) -> Option { + pub fn get_definition_location_from( + &self, + location: Location, + return_type_location_instead: bool, + ) -> Option { self.find_location_index(location) - .and_then(|index| self.resolve_location(index)) + .and_then(|index| self.resolve_location(index, return_type_location_instead)) .or_else(|| self.try_resolve_trait_impl_location(location)) .or_else(|| self.try_resolve_trait_method_declaration(location)) + .or_else(|| self.try_resolve_type_alias(location)) } pub fn get_declaration_location_from(&self, location: Location) -> Option { self.try_resolve_trait_method_declaration(location).or_else(|| { self.find_location_index(location) - .and_then(|index| self.resolve_location(index)) + .and_then(|index| self.resolve_location(index, false)) .and_then(|found_impl_location| { self.try_resolve_trait_method_declaration(found_impl_location) }) @@ -53,12 +58,31 @@ impl NodeInterner { /// For a given [Index] we return [Location] to which we resolved to /// We currently return None for features not yet implemented /// TODO(#3659): LSP goto def should error when Ident at Location could not resolve - fn resolve_location(&self, index: impl Into) -> Option { + fn resolve_location( + &self, + index: impl Into, + return_type_location_instead: bool, + ) -> Option { + if return_type_location_instead { + return self.get_type_location_from_index(index); + } + let node = self.nodes.get(index.into())?; match node { - Node::Function(func) => self.resolve_location(func.as_expr()), - Node::Expression(expression) => self.resolve_expression_location(expression), + Node::Function(func) => { + self.resolve_location(func.as_expr(), return_type_location_instead) + } + Node::Expression(expression) => { + self.resolve_expression_location(expression, return_type_location_instead) + } + _ => None, + } + } + + fn get_type_location_from_index(&self, index: impl Into) -> Option { + match self.id_type(index.into()) { + Type::Struct(struct_type, _) => Some(struct_type.borrow().location), _ => None, } } @@ -66,7 +90,11 @@ impl NodeInterner { /// Resolves the [Location] of the definition for a given [HirExpression] /// /// Note: current the code returns None because some expressions are not yet implemented. - fn resolve_expression_location(&self, expression: &HirExpression) -> Option { + fn resolve_expression_location( + &self, + expression: &HirExpression, + return_type_location_instead: bool, + ) -> Option { match expression { HirExpression::Ident(ident) => { let definition_info = self.definition(ident.id); @@ -88,7 +116,7 @@ impl NodeInterner { } HirExpression::Call(expr_call) => { let func = expr_call.func; - self.resolve_location(func) + self.resolve_location(func, return_type_location_instead) } _ => None, @@ -167,4 +195,12 @@ impl NodeInterner { method.map(|method| method.location) }) } + + #[tracing::instrument(skip(self), ret)] + fn try_resolve_type_alias(&self, location: Location) -> Option { + self.type_alias_ref + .iter() + .find(|(_, named_type_location)| named_type_location.span.contains(&location.span)) + .map(|(type_alias_id, _found_location)| self.get_type_alias(*type_alias_id).location) + } } diff --git a/noir/compiler/noirc_frontend/src/tests.rs b/noir/compiler/noirc_frontend/src/tests.rs index a56c3a7755f..9ccbddab9ec 100644 --- a/noir/compiler/noirc_frontend/src/tests.rs +++ b/noir/compiler/noirc_frontend/src/tests.rs @@ -52,7 +52,7 @@ mod test { ) -> (ParsedModule, Context, Vec<(CompilationError, FileId)>) { let root = std::path::Path::new("/"); let fm = FileManager::new(root); - let mut context = Context::new(fm); + let mut context = Context::new(fm, Default::default()); context.def_interner.populate_dummy_operator_traits(); let root_file_id = FileId::dummy(); let root_crate_id = context.crate_graph.add_crate_root(root_file_id); diff --git a/noir/compiler/noirc_printable_type/src/lib.rs b/noir/compiler/noirc_printable_type/src/lib.rs index 273e2d512ea..18f2fe0a873 100644 --- a/noir/compiler/noirc_printable_type/src/lib.rs +++ b/noir/compiler/noirc_printable_type/src/lib.rs @@ -11,10 +11,13 @@ use thiserror::Error; pub enum PrintableType { Field, Array { - length: u64, + length: Option, #[serde(rename = "type")] typ: Box, }, + Tuple { + types: Vec, + }, SignedInteger { width: u32, }, @@ -29,23 +32,13 @@ pub enum PrintableType { String { length: u64, }, -} - -impl PrintableType { - /// Returns the number of field elements required to represent the type once encoded. - fn field_count(&self) -> u32 { - match self { - Self::Field - | Self::SignedInteger { .. } - | Self::UnsignedInteger { .. } - | Self::Boolean => 1, - Self::Array { length, typ } => typ.field_count() * (*length as u32), - Self::Struct { fields, .. } => { - fields.iter().fold(0, |acc, (_, field_type)| acc + field_type.field_count()) - } - Self::String { length } => *length as u32, - } - } + Function { + env: Box, + }, + MutableReference { + typ: Box, + }, + Unit, } /// This is what all formats eventually transform into @@ -114,43 +107,26 @@ fn convert_string_inputs( fn convert_fmt_string_inputs( foreign_call_inputs: &[ForeignCallParam], ) -> Result { - let (message, input_and_printable_values) = + let (message, input_and_printable_types) = foreign_call_inputs.split_first().ok_or(ForeignCallError::MissingForeignCallInputs)?; let message_as_fields = vecmap(message.values(), |value| value.to_field()); let message_as_string = decode_string_value(&message_as_fields); - let (num_values, input_and_printable_values) = input_and_printable_values + let (num_values, input_and_printable_types) = input_and_printable_types .split_first() .ok_or(ForeignCallError::MissingForeignCallInputs)?; let mut output = Vec::new(); let num_values = num_values.unwrap_value().to_field().to_u128() as usize; - for (i, printable_value) in input_and_printable_values + let types_start_at = input_and_printable_types.len() - num_values; + let mut input_iter = input_and_printable_types[0..types_start_at] .iter() - .skip(input_and_printable_values.len() - num_values) - .enumerate() - { - let printable_type = fetch_printable_type(printable_value)?; - let type_size = printable_type.field_count() as usize; - let value = match printable_type { - PrintableType::Array { .. } | PrintableType::String { .. } => { - // Arrays and strings are represented in a single value vector rather than multiple separate input values - let mut input_values_as_fields = input_and_printable_values[i] - .values() - .into_iter() - .map(|value| value.to_field()); - decode_value(&mut input_values_as_fields, &printable_type) - } - _ => { - // We must use a flat map here as each value in a struct will be in a separate input value - let mut input_values_as_fields = input_and_printable_values[i..(i + type_size)] - .iter() - .flat_map(|param| vecmap(param.values(), |value| value.to_field())); - decode_value(&mut input_values_as_fields, &printable_type) - } - }; + .flat_map(|param| vecmap(param.values(), |value| value.to_field())); + for printable_type in input_and_printable_types.iter().skip(types_start_at) { + let printable_type = fetch_printable_type(printable_type)?; + let value = decode_value(&mut input_iter, &printable_type); output.push((value, printable_type)); } @@ -196,6 +172,12 @@ fn to_string(value: &PrintableValue, typ: &PrintableType) -> Option { output.push_str("false"); } } + (PrintableValue::Field(_), PrintableType::Function { .. }) => { + output.push_str("<>"); + } + (_, PrintableType::MutableReference { .. }) => { + output.push_str("<>"); + } (PrintableValue::Vec(vector), PrintableType::Array { typ, .. }) => { output.push('['); let mut values = vector.iter().peekable(); @@ -233,6 +215,22 @@ fn to_string(value: &PrintableValue, typ: &PrintableType) -> Option { output.push_str(" }"); } + (PrintableValue::Vec(values), PrintableType::Tuple { types }) => { + output.push('('); + let mut elems = values.iter().zip(types).peekable(); + while let Some((value, typ)) = elems.next() { + output.push_str( + &PrintableValueDisplay::Plain(value.clone(), typ.clone()).to_string(), + ); + if elems.peek().is_some() { + output.push_str(", "); + } + } + output.push(')'); + } + + (_, PrintableType::Unit) => output.push_str("()"), + _ => return None, }; @@ -308,7 +306,19 @@ fn decode_value( PrintableValue::Field(field_element) } - PrintableType::Array { length, typ } => { + PrintableType::Array { length: None, typ } => { + let length = field_iterator + .next() + .expect("not enough data to decode variable array length") + .to_u128() as usize; + let mut array_elements = Vec::with_capacity(length); + for _ in 0..length { + array_elements.push(decode_value(field_iterator, typ)); + } + + PrintableValue::Vec(array_elements) + } + PrintableType::Array { length: Some(length), typ } => { let length = *length as usize; let mut array_elements = Vec::with_capacity(length); for _ in 0..length { @@ -317,6 +327,9 @@ fn decode_value( PrintableValue::Vec(array_elements) } + PrintableType::Tuple { types } => { + PrintableValue::Vec(vecmap(types, |typ| decode_value(field_iterator, typ))) + } PrintableType::String { length } => { let field_elements: Vec = field_iterator.take(*length as usize).collect(); @@ -333,6 +346,18 @@ fn decode_value( PrintableValue::Struct(struct_map) } + PrintableType::Function { env } => { + let field_element = field_iterator.next().unwrap(); + let func_ref = PrintableValue::Field(field_element); + // we want to consume the fields from the environment, but for now they are not actually printed + decode_value(field_iterator, env); + func_ref + } + PrintableType::MutableReference { typ } => { + // we decode the reference, but it's not really used for printing + decode_value(field_iterator, typ) + } + PrintableType::Unit => PrintableValue::Field(FieldElement::zero()), } } diff --git a/noir/compiler/wasm/package.json b/noir/compiler/wasm/package.json index 412e9c82f9a..2aaf4a494df 100644 --- a/noir/compiler/wasm/package.json +++ b/noir/compiler/wasm/package.json @@ -3,7 +3,7 @@ "contributors": [ "The Noir Team " ], - "version": "0.22.0", + "version": "0.23.0", "license": "(MIT OR Apache-2.0)", "main": "dist/main.js", "types": "./dist/types/src/index.d.cts", diff --git a/noir/compiler/wasm/src/compile.rs b/noir/compiler/wasm/src/compile.rs index 351f9ae8a86..498ffe447ce 100644 --- a/noir/compiler/wasm/src/compile.rs +++ b/noir/compiler/wasm/src/compile.rs @@ -13,7 +13,7 @@ use noirc_driver::{ use noirc_evaluator::errors::SsaReport; use noirc_frontend::{ graph::{CrateId, CrateName}, - hir::Context, + hir::{def_map::parse_file, Context, ParsedFiles}, }; use serde::Deserialize; use std::{collections::HashMap, path::Path}; @@ -140,6 +140,10 @@ impl PathToFileSourceMap { } } +pub(crate) fn parse_all(fm: &FileManager) -> ParsedFiles { + fm.as_file_map().all_file_ids().map(|&file_id| (file_id, parse_file(fm, file_id))).collect() +} + pub enum CompileResult { Contract { contract: ContractArtifact, warnings: Vec }, Program { program: ProgramArtifact, warnings: Vec }, @@ -162,8 +166,8 @@ pub fn compile( }; let fm = file_manager_with_source_map(file_source_map); - - let mut context = Context::new(fm); + let parsed_files = parse_all(&fm); + let mut context = Context::new(fm, parsed_files); let path = Path::new(&entry_point); let crate_id = prepare_crate(&mut context, path); @@ -291,15 +295,18 @@ mod test { use crate::compile::PathToFileSourceMap; - use super::{file_manager_with_source_map, process_dependency_graph, DependencyGraph}; + use super::{ + file_manager_with_source_map, parse_all, process_dependency_graph, DependencyGraph, + }; use std::{collections::HashMap, path::Path}; - fn setup_test_context(source_map: PathToFileSourceMap) -> Context<'static> { + fn setup_test_context(source_map: PathToFileSourceMap) -> Context<'static, 'static> { let mut fm = file_manager_with_source_map(source_map); // Add this due to us calling prepare_crate on "/main.nr" below fm.add_file_with_source(Path::new("/main.nr"), "fn foo() {}".to_string()); + let parsed_files = parse_all(&fm); - let mut context = Context::new(fm); + let mut context = Context::new(fm, parsed_files); prepare_crate(&mut context, Path::new("/main.nr")); context diff --git a/noir/compiler/wasm/src/compile_new.rs b/noir/compiler/wasm/src/compile_new.rs index 3cb20bd0b5c..6476f6d29bc 100644 --- a/noir/compiler/wasm/src/compile_new.rs +++ b/noir/compiler/wasm/src/compile_new.rs @@ -1,5 +1,5 @@ use crate::compile::{ - file_manager_with_source_map, generate_contract_artifact, generate_program_artifact, + file_manager_with_source_map, generate_contract_artifact, generate_program_artifact, parse_all, JsCompileResult, PathToFileSourceMap, }; use crate::errors::{CompileError, JsCompileError}; @@ -20,7 +20,7 @@ use wasm_bindgen::prelude::wasm_bindgen; pub struct CompilerContext { // `wasm_bindgen` currently doesn't allow lifetime parameters on structs so we must use a `'static` lifetime. // `Context` must then own the `FileManager` to satisfy this lifetime. - context: Context<'static>, + context: Context<'static, 'static>, } #[wasm_bindgen(js_name = "CrateId")] @@ -34,7 +34,9 @@ impl CompilerContext { console_error_panic_hook::set_once(); let fm = file_manager_with_source_map(source_map); - CompilerContext { context: Context::new(fm) } + let parsed_files = parse_all(&fm); + + CompilerContext { context: Context::new(fm, parsed_files) } } #[cfg(test)] @@ -231,7 +233,7 @@ mod test { use noirc_driver::prepare_crate; use noirc_frontend::hir::Context; - use crate::compile::{file_manager_with_source_map, PathToFileSourceMap}; + use crate::compile::{file_manager_with_source_map, parse_all, PathToFileSourceMap}; use std::path::Path; @@ -241,8 +243,9 @@ mod test { let mut fm = file_manager_with_source_map(source_map); // Add this due to us calling prepare_crate on "/main.nr" below fm.add_file_with_source(Path::new("/main.nr"), "fn foo() {}".to_string()); + let parsed_files = parse_all(&fm); - let mut context = Context::new(fm); + let mut context = Context::new(fm, parsed_files); prepare_crate(&mut context, Path::new("/main.nr")); CompilerContext { context } diff --git a/noir/docs/versioned_docs/version-v0.22.0/explainers/explainer-oracle.md b/noir/docs/versioned_docs/version-v0.22.0/explainers/explainer-oracle.md deleted file mode 100644 index 76dd0e36d6c..00000000000 --- a/noir/docs/versioned_docs/version-v0.22.0/explainers/explainer-oracle.md +++ /dev/null @@ -1,57 +0,0 @@ ---- -title: Oracles -description: This guide provides an in-depth understanding of how Oracles work in Noir programming. Learn how to use outside calculations in your programs, constrain oracles, and understand their uses and limitations. -keywords: - - Noir Programming - - Oracles - - JSON-RPC - - Foreign Call Handlers - - Constrained Functions - - Blockchain Programming -sidebar_position: 1 ---- - -If you've seen "The Matrix" you may recall "The Oracle" as Gloria Foster smoking cigarettes and baking cookies. While she appears to "know things", she is actually providing a calculation of a pre-determined future. Noir Oracles are similar, in a way. They don't calculate the future (yet), but they allow you to use outside calculations in your programs. - -![matrix oracle prediction](@site/static/img/memes/matrix_oracle.jpeg) - -A Noir program is usually self-contained. You can pass certain inputs to it, and it will generate a deterministic output for those inputs. But what if you wanted to defer some calculation to an outside process or source? - -Oracles are functions that provide this feature. - -## Use cases - -An example usage for Oracles is proving something on-chain. For example, proving that the ETH-USDC quote was below a certain target at a certain block time. Or even making more complex proofs like proving the ownership of an NFT as an anonymous login method. - -Another interesting use case is to defer expensive calculations to be made outside of the Noir program, and then constraining the result; similar to the use of [unconstrained functions](../noir/syntax/unconstrained.md). - -In short, anything that can be constrained in a Noir program but needs to be fetched from an external source is a great candidate to be used in oracles. - -## Constraining oracles - -Just like in The Matrix, Oracles are powerful. But with great power, comes great responsibility. Just because you're using them in a Noir program doesn't mean they're true. Noir has no superpowers. If you want to prove that Portugal won the Euro Cup 2016, you're still relying on potentially untrusted information. - -To give a concrete example, Alice wants to login to the [NounsDAO](https://nouns.wtf/) forum with her username "noir_nouner" by proving she owns a noun without revealing her ethereum address. Her Noir program could have a oracle call like this: - -```rust -#[oracle(getNoun)] -unconstrained fn get_noun(address: Field) -> Field -``` - -This oracle could naively resolve with the number of Nouns she possesses. However, it is useless as a trusted source, as the oracle could resolve to anything Alice wants. In order to make this oracle call actually useful, Alice would need to constrain the response from the oracle, by proving her address and the noun count belongs to the state tree of the contract. - -In short, **Oracles don't prove anything. Your Noir program does.** - -:::danger - -If you don't constrain the return of your oracle, you could be clearly opening an attack vector on your Noir program. Make double-triple sure that the return of an oracle call is constrained! - -::: - -## How to use Oracles - -On CLI, Nargo resolves oracles by making JSON RPC calls, which means it would require an RPC node to be running. - -In JavaScript, NoirJS accepts and resolves arbitrary call handlers (that is, not limited to JSON) as long as they matches the expected types the developer defines. Refer to [Foreign Call Handler](../reference/NoirJS/noir_js/type-aliases/ForeignCallHandler.md) to learn more about NoirJS's call handling. - -If you want to build using oracles, follow through to the [oracle guide](../how_to/how-to-oracles.md) for a simple example on how to do that. diff --git a/noir/docs/versioned_docs/version-v0.22.0/how_to/how-to-oracles.md b/noir/docs/versioned_docs/version-v0.22.0/how_to/how-to-oracles.md deleted file mode 100644 index 61cabe586e6..00000000000 --- a/noir/docs/versioned_docs/version-v0.22.0/how_to/how-to-oracles.md +++ /dev/null @@ -1,280 +0,0 @@ ---- -title: How to use Oracles -description: Learn how to use oracles in your Noir program with examples in both Nargo and NoirJS. This guide also covers writing a JSON RPC server and providing custom foreign call handlers for NoirJS. -keywords: - - Noir Programming - - Oracles - - Nargo - - NoirJS - - JSON RPC Server - - Foreign Call Handlers -sidebar_position: 1 ---- - -This guide shows you how to use oracles in your Noir program. For the sake of clarity, it assumes that: - -- You have read the [explainer on Oracles](../explainers/explainer-oracle.md) and are comfortable with the concept. -- You have a Noir program to add oracles to. You can create one using the [vite-hardhat starter](https://github.com/noir-lang/noir-starter/tree/main/vite-hardhat) as a boilerplate. -- You understand the concept of a JSON-RPC server. Visit the [JSON-RPC website](https://www.jsonrpc.org/) if you need a refresher. -- You are comfortable with server-side JavaScript (e.g. Node.js, managing packages, etc.). - -For reference, you can find the snippets used in this tutorial on the [Aztec DevRel Repository](https://github.com/AztecProtocol/dev-rel/tree/main/how_to_oracles/code-snippets/how-to-oracles). - -## Rundown - -This guide has 3 major steps: - -1. How to modify our Noir program to make use of oracle calls as unconstrained functions -2. How to write a JSON RPC Server to resolve these oracle calls with Nargo -3. How to use them in Nargo and how to provide a custom resolver in NoirJS - -## Step 1 - Modify your Noir program - -An oracle is defined in a Noir program by defining two methods: - -- An unconstrained method - This tells the compiler that it is executing an [unconstrained functions](../noir/syntax/unconstrained.md). -- A decorated oracle method - This tells the compiler that this method is an RPC call. - -An example of an oracle that returns a `Field` would be: - -```rust -#[oracle(getSqrt)] -unconstrained fn sqrt(number: Field) -> Field { } - -unconstrained fn get_sqrt(number: Field) -> Field { - sqrt(number) -} -``` - -In this example, we're wrapping our oracle function in a unconstrained method, and decorating it with `oracle(getSqrt)`. We can then call the unconstrained function as we would call any other function: - -```rust -fn main(input: Field) { - let sqrt = get_sqrt(input); -} -``` - -In the next section, we will make this `getSqrt` (defined on the `sqrt` decorator) be a method of the RPC server Noir will use. - -:::danger - -As explained in the [Oracle Explainer](../explainers/explainer-oracle.md), this `main` function is unsafe unless you constrain its return value. For example: - -```rust -fn main(input: Field) { - let sqrt = get_sqrt(input); - assert(sqrt[0].pow_32(2) as u64 == input as u64); // <---- constrain the return of an oracle! -} -``` - -::: - -:::info - -Currently, oracles only work with single params or array params. For example: - -```rust -#[oracle(getSqrt)] -unconstrained fn sqrt([Field; 2]) -> [Field; 2] { } -``` - -::: - -## Step 2 - Write an RPC server - -Brillig will call *one* RPC server. Most likely you will have to write your own, and you can do it in whatever language you prefer. In this guide, we will do it in Javascript. - -Let's use the above example of an oracle that consumes an array with two `Field` and returns their square roots: - -```rust -#[oracle(getSqrt)] -unconstrained fn sqrt(input: [Field; 2]) -> [Field; 2] { } - -unconstrained fn get_sqrt(input: [Field; 2]) -> [Field; 2] { - sqrt(input) -} - -fn main(input: [Field; 2]) { - let sqrt = get_sqrt(input); - assert(sqrt[0].pow_32(2) as u64 == input[0] as u64); - assert(sqrt[1].pow_32(2) as u64 == input[1] as u64); -} -``` - -:::info - -Why square root? - -In general, computing square roots is computationally more expensive than multiplications, which takes a toll when speaking about ZK applications. In this case, instead of calculating the square root in Noir, we are using our oracle to offload that computation to be made in plain. In our circuit we can simply multiply the two values. - -::: - -Now, we should write the correspondent RPC server, starting with the [default JSON-RPC 2.0 boilerplate](https://www.npmjs.com/package/json-rpc-2.0#example): - -```js -import { JSONRPCServer } from "json-rpc-2.0"; -import express from "express"; -import bodyParser from "body-parser"; - -const app = express(); -app.use(bodyParser.json()); - -const server = new JSONRPCServer(); -app.post("/", (req, res) => { - const jsonRPCRequest = req.body; - server.receive(jsonRPCRequest).then((jsonRPCResponse) => { - if (jsonRPCResponse) { - res.json(jsonRPCResponse); - } else { - res.sendStatus(204); - } - }); -}); - -app.listen(5555); -``` - -Now, we will add our `getSqrt` method, as expected by the `#[oracle(getSqrt)]` decorator in our Noir code. It maps through the params array and returns their square roots: - -```js -server.addMethod("getSqrt", async (params) => { - const values = params[0].Array.map(({ inner }) => { - return { inner: `${Math.sqrt(parseInt(inner, 16))}` }; - }); - return { values: [{ Array: values }] }; -}); -``` - -:::tip - -Brillig expects an object with an array of values. Each value is an object declaring to be `Single` or `Array` and returning a `inner` property *as a string*. For example: - -```json -{ "values": [{ "Array": [{ "inner": "1" }, { "inner": "2"}]}]} -{ "values": [{ "Single": { "inner": "1" }}]} -{ "values": [{ "Single": { "inner": "1" }}, { "Array": [{ "inner": "1", { "inner": "2" }}]}]} -``` - -If you're using Typescript, the following types may be helpful in understanding the expected return value and making sure they're easy to follow: - -```js -interface Value { - inner: string, -} - -interface SingleForeignCallParam { - Single: Value, -} - -interface ArrayForeignCallParam { - Array: Value[], -} - -type ForeignCallParam = SingleForeignCallParam | ArrayForeignCallParam; - -interface ForeignCallResult { - values: ForeignCallParam[], -} -``` - -::: - -## Step 3 - Usage with Nargo - -Using the [`nargo` CLI tool](../getting_started/installation/index.md), you can use oracles in the `nargo test`, `nargo execute` and `nargo prove` commands by passing a value to `--oracle-resolver`. For example: - -```bash -nargo test --oracle-resolver http://localhost:5555 -``` - -This tells `nargo` to use your RPC Server URL whenever it finds an oracle decorator. - -## Step 4 - Usage with NoirJS - -In a JS environment, an RPC server is not strictly necessary, as you may want to resolve your oracles without needing any JSON call at all. NoirJS simply expects that you pass a callback function when you generate proofs, and that callback function can be anything. - -For example, if your Noir program expects the host machine to provide CPU pseudo-randomness, you could simply pass it as the `foreignCallHandler`. You don't strictly need to create an RPC server to serve pseudo-randomness, as you may as well get it directly in your app: - -```js -const foreignCallHandler = (name, inputs) => crypto.randomBytes(16) // etc - -await noir.generateFinalProof(inputs, foreignCallHandler) -``` - -As one can see, in NoirJS, the [`foreignCallHandler`](../reference/NoirJS/noir_js/type-aliases/ForeignCallHandler.md) function simply means "a callback function that returns a value of type [`ForeignCallOutput`](../reference/NoirJS/noir_js/type-aliases/ForeignCallOutput.md). It doesn't have to be an RPC call like in the case for Nargo. - -:::tip - -Does this mean you don't have to write an RPC server like in [Step #2](#step-2---write-an-rpc-server)? - -You don't technically have to, but then how would you run `nargo test` or `nargo prove`? To use both `Nargo` and `NoirJS` in your development flow, you will have to write a JSON RPC server. - -::: - -In this case, let's make `foreignCallHandler` call the JSON RPC Server we created in [Step #2](#step-2---write-an-rpc-server), by making it a JSON RPC Client. - -For example, using the same `getSqrt` program in [Step #1](#step-1---modify-your-noir-program) (comments in the code): - -```js -import { JSONRPCClient } from "json-rpc-2.0"; - -// declaring the JSONRPCClient -const client = new JSONRPCClient((jsonRPCRequest) => { -// hitting the same JSON RPC Server we coded above - return fetch("http://localhost:5555", { - method: "POST", - headers: { - "content-type": "application/json", - }, - body: JSON.stringify(jsonRPCRequest), - }).then((response) => { - if (response.status === 200) { - return response - .json() - .then((jsonRPCResponse) => client.receive(jsonRPCResponse)); - } else if (jsonRPCRequest.id !== undefined) { - return Promise.reject(new Error(response.statusText)); - } - }); -}); - -// declaring a function that takes the name of the foreign call (getSqrt) and the inputs -const foreignCallHandler = async (name, input) => { - // notice that the "inputs" parameter contains *all* the inputs - // in this case we to make the RPC request with the first parameter "numbers", which would be input[0] - const oracleReturn = await client.request(name, [ - { Array: input[0].map((i) => ({ inner: i.toString("hex") })) }, - ]); - return [oracleReturn.values[0].Array.map((x) => x.inner)]; -}; - -// the rest of your NoirJS code -const input = { input: [4, 16] }; -const { witness } = await noir.execute(numbers, foreignCallHandler); -``` - -:::tip - -If you're in a NoirJS environment running your RPC server together with a frontend app, you'll probably hit a familiar problem in full-stack development: requests being blocked by [CORS](https://developer.mozilla.org/en-US/docs/Web/HTTP/CORS) policy. For development only, you can simply install and use the [`cors` npm package](https://www.npmjs.com/package/cors) to get around the problem: - -```bash -yarn add cors -``` - -and use it as a middleware: - -```js -import cors from "cors"; - -const app = express(); -app.use(cors()) -``` - -::: - -## Conclusion - -Hopefully by the end of this guide, you should be able to: - -- Write your own logic around Oracles and how to write a JSON RPC server to make them work with your Nargo commands. -- Provide custom foreign call handlers for NoirJS. diff --git a/noir/docs/versioned_docs/version-v0.22.0/noir/syntax/oracles.md b/noir/docs/versioned_docs/version-v0.22.0/noir/syntax/oracles.md deleted file mode 100644 index 2e6a6818d48..00000000000 --- a/noir/docs/versioned_docs/version-v0.22.0/noir/syntax/oracles.md +++ /dev/null @@ -1,23 +0,0 @@ ---- -title: Oracles -description: Dive into how Noir supports Oracles via RPC calls, and learn how to declare an Oracle in Noir with our comprehensive guide. -keywords: - - Noir - - Oracles - - RPC Calls - - Unconstrained Functions - - Programming - - Blockchain -sidebar_position: 6 ---- - -Noir has support for Oracles via RPC calls. This means Noir will make an RPC call and use the return value for proof generation. - -Since Oracles are not resolved by Noir, they are [`unconstrained` functions](./unconstrained.md) - -You can declare an Oracle through the `#[oracle()]` flag. Example: - -```rust -#[oracle(get_number_sequence)] -unconstrained fn get_number_sequence(_size: Field) -> [Field] {} -``` diff --git a/noir/flake.nix b/noir/flake.nix index 2300d009114..6849dc0a0ad 100644 --- a/noir/flake.nix +++ b/noir/flake.nix @@ -73,7 +73,7 @@ # Configuration shared between builds config = { # x-release-please-start-version - version = "0.22.0"; + version = "0.23.0"; # x-release-please-end src = pkgs.lib.cleanSourceWith { diff --git a/noir/test_programs/execution_success/bit_and/Prover.toml b/noir/test_programs/execution_success/bit_and/Prover.toml index 40ce2b0bc27..34a5b63e5b1 100644 --- a/noir/test_programs/execution_success/bit_and/Prover.toml +++ b/noir/test_programs/execution_success/bit_and/Prover.toml @@ -1,2 +1,4 @@ x = "0x00" y = "0x10" +a = "0x00" +b = "0x10" diff --git a/noir/test_programs/execution_success/bit_and/src/main.nr b/noir/test_programs/execution_success/bit_and/src/main.nr index 0bc1d9a49bd..5a0aa17e3ed 100644 --- a/noir/test_programs/execution_success/bit_and/src/main.nr +++ b/noir/test_programs/execution_success/bit_and/src/main.nr @@ -1,6 +1,6 @@ // You can only do bit operations with integers. // (Kobi/Daira/Circom/#37) https://github.com/iden3/circom/issues/37 -fn main(x: Field, y: Field) { +fn main(x: Field, y: Field, a: Field, b: Field) { let x_as_u8 = x as u8; let y_as_u8 = y as u8; @@ -9,8 +9,8 @@ fn main(x: Field, y: Field) { let flag = (x == 0) & (y == 16); assert(flag); //bitwise and with odd bits: - let x_as_u11 = x as u11; - let y_as_u11 = y as u11; - assert((x_as_u11 & y_as_u11) == x_as_u11); + let a_as_u8 = a as u8; + let b_as_u8 = b as u8; + assert((a_as_u8 & b_as_u8) == a_as_u8); } diff --git a/noir/test_programs/execution_success/debug_logs/src/main.nr b/noir/test_programs/execution_success/debug_logs/src/main.nr index 6accdf725d9..52c910065c1 100644 --- a/noir/test_programs/execution_success/debug_logs/src/main.nr +++ b/noir/test_programs/execution_success/debug_logs/src/main.nr @@ -39,7 +39,26 @@ fn main(x: Field, y: pub Field) { let struct_string = if x != 5 { f"{foo}" } else { f"{bar}" }; std::println(struct_string); + let one_tuple = (1, 2, 3); + let another_tuple = (4, 5, 6); + std::println(f"one_tuple: {one_tuple}, another_tuple: {another_tuple}"); + std::println(one_tuple); + + let tuples_nested = (one_tuple, another_tuple); + std::println(f"tuples_nested: {tuples_nested}"); + std::println(tuples_nested); + regression_2906(); + + let free_lambda = |x| x + 1; + let sentinel: u32 = 8888; + std::println(f"free_lambda: {free_lambda}, sentinel: {sentinel}"); + std::println(free_lambda); + + let one = 1; + let closured_lambda = |x| x + one; + std::println(f"closured_lambda: {closured_lambda}, sentinel: {sentinel}"); + std::println(closured_lambda); } fn string_identity(string: fmtstr<14, (Field, Field)>) -> fmtstr<14, (Field, Field)> { @@ -79,3 +98,4 @@ fn regression_2906() { dep::std::println(f"array_five_vals: {array_five_vals}, label_five_vals: {label_five_vals}"); } + diff --git a/noir/test_programs/execution_success/nested_slice_dynamic/Nargo.toml b/noir/test_programs/execution_success/nested_array_in_slice/Nargo.toml similarity index 62% rename from noir/test_programs/execution_success/nested_slice_dynamic/Nargo.toml rename to noir/test_programs/execution_success/nested_array_in_slice/Nargo.toml index c8925ed97b4..4f0748f79be 100644 --- a/noir/test_programs/execution_success/nested_slice_dynamic/Nargo.toml +++ b/noir/test_programs/execution_success/nested_array_in_slice/Nargo.toml @@ -1,5 +1,5 @@ [package] -name = "nested_slice_dynamic" +name = "nested_array_in_slice" type = "bin" authors = [""] [dependencies] \ No newline at end of file diff --git a/noir/test_programs/execution_success/nested_slice_dynamic/Prover.toml b/noir/test_programs/execution_success/nested_array_in_slice/Prover.toml similarity index 100% rename from noir/test_programs/execution_success/nested_slice_dynamic/Prover.toml rename to noir/test_programs/execution_success/nested_array_in_slice/Prover.toml diff --git a/noir/test_programs/execution_success/nested_slice_dynamic/src/main.nr b/noir/test_programs/execution_success/nested_array_in_slice/src/main.nr similarity index 100% rename from noir/test_programs/execution_success/nested_slice_dynamic/src/main.nr rename to noir/test_programs/execution_success/nested_array_in_slice/src/main.nr diff --git a/noir/tooling/backend_interface/test-binaries/mock_backend/Cargo.lock b/noir/tooling/backend_interface/test-binaries/mock_backend/Cargo.lock index c43d1b84915..3c14a936907 100644 --- a/noir/tooling/backend_interface/test-binaries/mock_backend/Cargo.lock +++ b/noir/tooling/backend_interface/test-binaries/mock_backend/Cargo.lock @@ -4,81 +4,67 @@ version = 3 [[package]] name = "anstream" -version = "0.3.2" +version = "0.6.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0ca84f3628370c59db74ee214b3263d58f9aadd9b4fe7e711fd87dc452b7f163" +checksum = "6e2e1ebcb11de5c03c67de28a7df593d32191b44939c482e97702baaaa6ab6a5" dependencies = [ "anstyle", "anstyle-parse", "anstyle-query", "anstyle-wincon", "colorchoice", - "is-terminal", "utf8parse", ] [[package]] name = "anstyle" -version = "1.0.1" +version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3a30da5c5f2d5e72842e00bcb57657162cdabef0931f40e2deb9b4140440cecd" +checksum = "7079075b41f533b8c61d2a4d073c4676e1f8b249ff94a393b0595db304e0dd87" [[package]] name = "anstyle-parse" -version = "0.2.1" +version = "0.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "938874ff5980b03a87c5524b3ae5b59cf99b1d6bc836848df7bc5ada9643c333" +checksum = "c75ac65da39e5fe5ab759307499ddad880d724eed2f6ce5b5e8a26f4f387928c" dependencies = [ "utf8parse", ] [[package]] name = "anstyle-query" -version = "1.0.0" +version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5ca11d4be1bab0c8bc8734a9aa7bf4ee8316d462a08c6ac5052f888fef5b494b" +checksum = "e28923312444cdd728e4738b3f9c9cac739500909bb3d3c94b43551b16517648" dependencies = [ "windows-sys", ] [[package]] name = "anstyle-wincon" -version = "1.0.1" +version = "3.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "180abfa45703aebe0093f79badacc01b8fd4ea2e35118747e5811127f926e188" +checksum = "1cd54b81ec8d6180e24654d0b371ad22fc3dd083b6ff8ba325b72e00c87660a7" dependencies = [ "anstyle", "windows-sys", ] -[[package]] -name = "bitflags" -version = "2.3.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "630be753d4e58660abd17930c71b647fe46c27ea6b63cc59e1e3851406972e42" - -[[package]] -name = "cc" -version = "1.0.79" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "50d30906286121d95be3d479533b458f87493b30a4b5f79a607db8f5d11aa91f" - [[package]] name = "clap" -version = "4.3.19" +version = "4.4.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5fd304a20bff958a57f04c4e96a2e7594cc4490a0e809cbd48bb6437edaa452d" +checksum = "1e578d6ec4194633722ccf9544794b71b1385c3c027efe0c55db226fc880865c" dependencies = [ "clap_builder", "clap_derive", - "once_cell", ] [[package]] name = "clap_builder" -version = "4.3.19" +version = "4.4.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "01c6a3f08f1fe5662a35cfe393aec09c4df95f60ee93b7556505260f75eee9e1" +checksum = "4df4df40ec50c46000231c914968278b1eb05098cf8f1b3a518a95030e71d1c7" dependencies = [ "anstream", "anstyle", @@ -88,9 +74,9 @@ dependencies = [ [[package]] name = "clap_derive" -version = "4.3.12" +version = "4.4.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "54a9bb5758fc5dfe728d1019941681eccaf0cf8a4189b692a0ee2f2ecf90a050" +checksum = "cf9804afaaf59a91e75b022a30fb7229a7901f60c755489cc61c9b423b836442" dependencies = [ "heck", "proc-macro2", @@ -100,9 +86,9 @@ dependencies = [ [[package]] name = "clap_lex" -version = "0.5.0" +version = "0.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2da6da31387c7e4ef160ffab6d5e7f00c42626fe39aea70a7b0f1773f7dd6c1b" +checksum = "702fc72eb24e5a1e48ce58027a675bc24edd52096d5397d4aea7c6dd9eca0bd1" [[package]] name = "colorchoice" @@ -110,62 +96,12 @@ version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "acbf1af155f9b9ef647e42cdc158db4b64a1b61f743629225fde6f3e0be2a7c7" -[[package]] -name = "errno" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4bcfec3a70f97c962c307b2d2c56e358cf1d00b558d74262b5f929ee8cc7e73a" -dependencies = [ - "errno-dragonfly", - "libc", - "windows-sys", -] - -[[package]] -name = "errno-dragonfly" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aa68f1b12764fab894d2755d2518754e71b4fd80ecfb822714a1206c2aab39bf" -dependencies = [ - "cc", - "libc", -] - [[package]] name = "heck" version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8" -[[package]] -name = "hermit-abi" -version = "0.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "443144c8cdadd93ebf52ddb4056d257f5b52c04d3c804e657d19eb73fc33668b" - -[[package]] -name = "is-terminal" -version = "0.4.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cb0889898416213fab133e1d33a0e5858a48177452750691bde3666d0fdbaf8b" -dependencies = [ - "hermit-abi", - "rustix", - "windows-sys", -] - -[[package]] -name = "libc" -version = "0.2.147" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b4668fb0ea861c1df094127ac5f1da3409a82116a4ba74fca2e58ef927159bb3" - -[[package]] -name = "linux-raw-sys" -version = "0.4.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "09fc20d2ca12cb9f044c93e3bd6d32d523e6e2ec3db4f7b2939cd99026ecd3f0" - [[package]] name = "mock_backend" version = "0.1.0" @@ -173,43 +109,24 @@ dependencies = [ "clap", ] -[[package]] -name = "once_cell" -version = "1.18.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dd8b5dd2ae5ed71462c540258bedcb51965123ad7e7ccf4b9a8cafaa4a63576d" - [[package]] name = "proc-macro2" -version = "1.0.66" +version = "1.0.76" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "18fb31db3f9bddb2ea821cde30a9f70117e3f119938b5ee630b7403aa6e2ead9" +checksum = "95fc56cda0b5c3325f5fbbd7ff9fda9e02bb00bb3dac51252d2f1bfa1cb8cc8c" dependencies = [ "unicode-ident", ] [[package]] name = "quote" -version = "1.0.31" +version = "1.0.35" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5fe8a65d69dd0808184ebb5f836ab526bb259db23c657efa38711b1072ee47f0" +checksum = "291ec9ab5efd934aaf503a6466c5d5251535d108ee747472c3977cc5acc868ef" dependencies = [ "proc-macro2", ] -[[package]] -name = "rustix" -version = "0.38.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0a962918ea88d644592894bc6dc55acc6c0956488adcebbfb6e273506b7fd6e5" -dependencies = [ - "bitflags", - "errno", - "libc", - "linux-raw-sys", - "windows-sys", -] - [[package]] name = "strsim" version = "0.10.0" @@ -218,9 +135,9 @@ checksum = "73473c0e59e6d5812c5dfe2a064a6444949f089e20eec9a2e5506596494e4623" [[package]] name = "syn" -version = "2.0.26" +version = "2.0.48" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "45c3457aacde3c65315de5031ec191ce46604304d2446e803d71ade03308d970" +checksum = "0f3531638e407dfc0814761abb7c00a5b54992b849452a0646b7f65c9f770f3f" dependencies = [ "proc-macro2", "quote", @@ -229,9 +146,9 @@ dependencies = [ [[package]] name = "unicode-ident" -version = "1.0.11" +version = "1.0.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "301abaae475aa91687eb82514b328ab47a211a533026cb25fc3e519b86adfc3c" +checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b" [[package]] name = "utf8parse" @@ -241,18 +158,18 @@ checksum = "711b9620af191e0cdc7468a8d14e709c3dcdb115b36f838e601583af800a370a" [[package]] name = "windows-sys" -version = "0.48.0" +version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9" +checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" dependencies = [ "windows-targets", ] [[package]] name = "windows-targets" -version = "0.48.1" +version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "05d4b17490f70499f20b9e791dcf6a299785ce8af4d709018206dc5b4953e95f" +checksum = "8a18201040b24831fbb9e4eb208f8892e1f50a37feb53cc7ff887feb8f50e7cd" dependencies = [ "windows_aarch64_gnullvm", "windows_aarch64_msvc", @@ -265,42 +182,42 @@ dependencies = [ [[package]] name = "windows_aarch64_gnullvm" -version = "0.48.0" +version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "91ae572e1b79dba883e0d315474df7305d12f569b400fcf90581b06062f7e1bc" +checksum = "cb7764e35d4db8a7921e09562a0304bf2f93e0a51bfccee0bd0bb0b666b015ea" [[package]] name = "windows_aarch64_msvc" -version = "0.48.0" +version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b2ef27e0d7bdfcfc7b868b317c1d32c641a6fe4629c171b8928c7b08d98d7cf3" +checksum = "bbaa0368d4f1d2aaefc55b6fcfee13f41544ddf36801e793edbbfd7d7df075ef" [[package]] name = "windows_i686_gnu" -version = "0.48.0" +version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "622a1962a7db830d6fd0a69683c80a18fda201879f0f447f065a3b7467daa241" +checksum = "a28637cb1fa3560a16915793afb20081aba2c92ee8af57b4d5f28e4b3e7df313" [[package]] name = "windows_i686_msvc" -version = "0.48.0" +version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4542c6e364ce21bf45d69fdd2a8e455fa38d316158cfd43b3ac1c5b1b19f8e00" +checksum = "ffe5e8e31046ce6230cc7215707b816e339ff4d4d67c65dffa206fd0f7aa7b9a" [[package]] name = "windows_x86_64_gnu" -version = "0.48.0" +version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ca2b8a661f7628cbd23440e50b05d705db3686f894fc9580820623656af974b1" +checksum = "3d6fa32db2bc4a2f5abeacf2b69f7992cd09dca97498da74a151a3132c26befd" [[package]] name = "windows_x86_64_gnullvm" -version = "0.48.0" +version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7896dbc1f41e08872e9d5e8f8baa8fdd2677f29468c4e156210174edc7f7b953" +checksum = "1a657e1e9d3f514745a572a6846d3c7aa7dbe1658c056ed9c3344c4109a6949e" [[package]] name = "windows_x86_64_msvc" -version = "0.48.0" +version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1a515f5799fe4961cb532f983ce2b23082366b898e52ffbce459c86f67c8378a" +checksum = "dff9641d1cd4be8d1a070daf9e3773c5f67e78b4d9d42263020c057706765c04" diff --git a/noir/tooling/lsp/Cargo.toml b/noir/tooling/lsp/Cargo.toml index 6371bcbac19..750e85694e2 100644 --- a/noir/tooling/lsp/Cargo.toml +++ b/noir/tooling/lsp/Cargo.toml @@ -25,6 +25,8 @@ async-lsp = { workspace = true, features = ["omni-trait"] } serde_with = "3.2.0" thiserror.workspace = true fm.workspace = true +rayon = "1.8.0" +fxhash.workspace = true [target.'cfg(all(target_arch = "wasm32", not(target_os = "wasi")))'.dependencies] wasm-bindgen.workspace = true diff --git a/noir/tooling/lsp/src/lib.rs b/noir/tooling/lsp/src/lib.rs index 1099ad60269..b64fc474b0b 100644 --- a/noir/tooling/lsp/src/lib.rs +++ b/noir/tooling/lsp/src/lib.rs @@ -17,16 +17,20 @@ use async_lsp::{ router::Router, AnyEvent, AnyNotification, AnyRequest, ClientSocket, Error, LspService, ResponseError, }; -use fm::codespan_files as files; +use fm::{codespan_files as files, FileManager}; +use fxhash::FxHashSet; use lsp_types::CodeLens; -use nargo::workspace::Workspace; +use nargo::{parse_all, workspace::Workspace}; use nargo_toml::{find_file_manifest, resolve_workspace_from_toml, PackageSelection}; use noirc_driver::{file_manager_with_stdlib, prepare_crate, NOIR_ARTIFACT_VERSION_STRING}; use noirc_frontend::{ graph::{CrateId, CrateName}, - hir::{Context, FunctionNameMatch}, + hir::{def_map::parse_file, Context, FunctionNameMatch, ParsedFiles}, node_interner::NodeInterner, + parser::ParserError, + ParsedModule, }; +use rayon::prelude::*; use notifications::{ on_did_change_configuration, on_did_change_text_document, on_did_close_text_document, @@ -34,7 +38,8 @@ use notifications::{ }; use requests::{ on_code_lens_request, on_formatting, on_goto_declaration_request, on_goto_definition_request, - on_initialize, on_profile_run_request, on_shutdown, on_test_run_request, on_tests_request, + on_goto_type_definition_request, on_initialize, on_profile_run_request, on_shutdown, + on_test_run_request, on_tests_request, }; use serde_json::Value as JsonValue; use thiserror::Error; @@ -64,6 +69,8 @@ pub struct LspState { input_files: HashMap, cached_lenses: HashMap>, cached_definitions: HashMap, + cached_parsed_files: HashMap))>, + parsing_cache_enabled: bool, } impl LspState { @@ -76,6 +83,8 @@ impl LspState { cached_lenses: HashMap::new(), cached_definitions: HashMap::new(), open_documents_count: 0, + cached_parsed_files: HashMap::new(), + parsing_cache_enabled: true, } } } @@ -98,6 +107,7 @@ impl NargoLspService { .request::(on_profile_run_request) .request::(on_goto_definition_request) .request::(on_goto_declaration_request) + .request::(on_goto_type_definition_request) .notification::(on_initialized) .notification::(on_did_change_configuration) .notification::(on_did_open_text_document) @@ -225,20 +235,78 @@ pub(crate) fn resolve_workspace_for_source_path(file_path: &Path) -> Result (Context<'static>, CrateId) { +fn prepare_source(source: String, state: &mut LspState) -> (Context<'static, 'static>, CrateId) { let root = Path::new(""); let file_name = Path::new("main.nr"); let mut file_manager = file_manager_with_stdlib(root); file_manager.add_file_with_source(file_name, source).expect( "Adding source buffer to file manager should never fail when file manager is empty", ); + let parsed_files = parse_diff(&file_manager, state); - let mut context = Context::new(file_manager); + let mut context = Context::new(file_manager, parsed_files); let root_crate_id = prepare_crate(&mut context, file_name); (context, root_crate_id) } +fn parse_diff(file_manager: &FileManager, state: &mut LspState) -> ParsedFiles { + if state.parsing_cache_enabled { + let noir_file_hashes: Vec<_> = file_manager + .as_file_map() + .all_file_ids() + .par_bridge() + .filter_map(|&file_id| { + let file_path = file_manager.path(file_id).expect("expected file to exist"); + let file_extension = + file_path.extension().expect("expected all file paths to have an extension"); + if file_extension == "nr" { + Some(( + file_id, + file_path.to_path_buf(), + fxhash::hash(file_manager.fetch_file(file_id).expect("file must exist")), + )) + } else { + None + } + }) + .collect(); + + let cache_hits: Vec<_> = noir_file_hashes + .par_iter() + .filter_map(|(file_id, file_path, current_hash)| { + let cached_version = state.cached_parsed_files.get(file_path); + if let Some((hash, cached_parsing)) = cached_version { + if hash == current_hash { + return Some((*file_id, cached_parsing.clone())); + } + } + None + }) + .collect(); + + let cache_hits_ids: FxHashSet<_> = cache_hits.iter().map(|(file_id, _)| *file_id).collect(); + + let cache_misses: Vec<_> = noir_file_hashes + .into_par_iter() + .filter(|(id, _, _)| !cache_hits_ids.contains(id)) + .map(|(file_id, path, hash)| (file_id, path, hash, parse_file(file_manager, file_id))) + .collect(); + + cache_misses.iter().for_each(|(_, path, hash, parse_results)| { + state.cached_parsed_files.insert(path.clone(), (*hash, parse_results.clone())); + }); + + cache_misses + .into_iter() + .map(|(id, _, _, parse_results)| (id, parse_results)) + .chain(cache_hits.into_iter()) + .collect() + } else { + parse_all(file_manager) + } +} + #[test] fn prepare_package_from_source_string() { let source = r#" @@ -249,7 +317,10 @@ fn prepare_package_from_source_string() { } "#; - let (mut context, crate_id) = crate::prepare_source(source.to_string()); + let client = ClientSocket::new_closed(); + let mut state = LspState::new(&client, acvm::blackbox_solver::StubbedBlackBoxSolver); + + let (mut context, crate_id) = crate::prepare_source(source.to_string(), &mut state); let _check_result = noirc_driver::check_crate(&mut context, crate_id, false, false); let main_func_id = context.get_main_function(&crate_id); assert!(main_func_id.is_some()); diff --git a/noir/tooling/lsp/src/notifications/mod.rs b/noir/tooling/lsp/src/notifications/mod.rs index 0cd86803efa..355bb7832c4 100644 --- a/noir/tooling/lsp/src/notifications/mod.rs +++ b/noir/tooling/lsp/src/notifications/mod.rs @@ -13,7 +13,7 @@ use crate::types::{ }; use crate::{ - byte_span_to_range, get_package_tests_in_crate, prepare_source, + byte_span_to_range, get_package_tests_in_crate, parse_diff, prepare_source, resolve_workspace_for_source_path, LspState, }; @@ -55,7 +55,7 @@ pub(super) fn on_did_change_text_document( let text = params.content_changes.into_iter().next().unwrap().text; state.input_files.insert(params.text_document.uri.to_string(), text.clone()); - let (mut context, crate_id) = prepare_source(text); + let (mut context, crate_id) = prepare_source(text, state); let _ = check_crate(&mut context, crate_id, false, false); let workspace = match resolve_workspace_for_source_path( @@ -131,10 +131,13 @@ fn process_noir_document( let mut workspace_file_manager = file_manager_with_stdlib(&workspace.root_dir); insert_all_files_for_workspace_into_file_manager(&workspace, &mut workspace_file_manager); + let parsed_files = parse_diff(&workspace_file_manager, state); + let diagnostics: Vec<_> = workspace .into_iter() .flat_map(|package| -> Vec { - let (mut context, crate_id) = prepare_package(&workspace_file_manager, package); + let (mut context, crate_id) = + prepare_package(&workspace_file_manager, &parsed_files, package); let file_diagnostics = match check_crate(&mut context, crate_id, false, false) { Ok(((), warnings)) => warnings, diff --git a/noir/tooling/lsp/src/requests/code_lens_request.rs b/noir/tooling/lsp/src/requests/code_lens_request.rs index b16c19457f0..893ba33d845 100644 --- a/noir/tooling/lsp/src/requests/code_lens_request.rs +++ b/noir/tooling/lsp/src/requests/code_lens_request.rs @@ -64,7 +64,7 @@ fn on_code_lens_request_inner( let workspace = resolve_workspace_for_source_path(file_path.as_path()).unwrap(); let package = workspace.members.first().unwrap(); - let (mut context, crate_id) = prepare_source(source_string); + let (mut context, crate_id) = prepare_source(source_string, state); // We ignore the warnings and errors produced by compilation for producing code lenses // because we can still get the test functions even if compilation fails let _ = check_crate(&mut context, crate_id, false, false); diff --git a/noir/tooling/lsp/src/requests/goto_declaration.rs b/noir/tooling/lsp/src/requests/goto_declaration.rs index 6e3664804f6..8e6d519b895 100644 --- a/noir/tooling/lsp/src/requests/goto_declaration.rs +++ b/noir/tooling/lsp/src/requests/goto_declaration.rs @@ -1,8 +1,8 @@ use std::future::{self, Future}; -use crate::resolve_workspace_for_source_path; use crate::types::GotoDeclarationResult; use crate::LspState; +use crate::{parse_diff, resolve_workspace_for_source_path}; use async_lsp::{ErrorCode, ResponseError}; use lsp_types::request::{GotoDeclarationParams, GotoDeclarationResponse}; @@ -21,7 +21,7 @@ pub(crate) fn on_goto_declaration_request( } fn on_goto_definition_inner( - _state: &mut LspState, + state: &mut LspState, params: GotoDeclarationParams, ) -> Result { let file_path = @@ -36,11 +36,13 @@ fn on_goto_definition_inner( let mut workspace_file_manager = file_manager_with_stdlib(&workspace.root_dir); insert_all_files_for_workspace_into_file_manager(&workspace, &mut workspace_file_manager); + let parsed_files = parse_diff(&workspace_file_manager, state); - let (mut context, crate_id) = nargo::prepare_package(&workspace_file_manager, package); + let (mut context, crate_id) = + nargo::prepare_package(&workspace_file_manager, &parsed_files, package); let interner; - if let Some(def_interner) = _state.cached_definitions.get(&package_root_path) { + if let Some(def_interner) = state.cached_definitions.get(&package_root_path) { interner = def_interner; } else { // We ignore the warnings and errors produced by compilation while resolving the definition diff --git a/noir/tooling/lsp/src/requests/goto_definition.rs b/noir/tooling/lsp/src/requests/goto_definition.rs index 277bbf013f9..88bb667f2e8 100644 --- a/noir/tooling/lsp/src/requests/goto_definition.rs +++ b/noir/tooling/lsp/src/requests/goto_definition.rs @@ -1,9 +1,10 @@ use std::future::{self, Future}; -use crate::resolve_workspace_for_source_path; +use crate::{parse_diff, resolve_workspace_for_source_path}; use crate::{types::GotoDefinitionResult, LspState}; use async_lsp::{ErrorCode, ResponseError}; +use lsp_types::request::GotoTypeDefinitionParams; use lsp_types::{GotoDefinitionParams, GotoDefinitionResponse}; use nargo::insert_all_files_for_workspace_into_file_manager; use noirc_driver::file_manager_with_stdlib; @@ -14,13 +15,22 @@ pub(crate) fn on_goto_definition_request( state: &mut LspState, params: GotoDefinitionParams, ) -> impl Future> { - let result = on_goto_definition_inner(state, params); + let result = on_goto_definition_inner(state, params, false); + future::ready(result) +} + +pub(crate) fn on_goto_type_definition_request( + state: &mut LspState, + params: GotoTypeDefinitionParams, +) -> impl Future> { + let result = on_goto_definition_inner(state, params, true); future::ready(result) } fn on_goto_definition_inner( - _state: &mut LspState, + state: &mut LspState, params: GotoDefinitionParams, + return_type_location_instead: bool, ) -> Result { let file_path = params.text_document_position_params.text_document.uri.to_file_path().map_err(|_| { @@ -34,11 +44,13 @@ fn on_goto_definition_inner( let mut workspace_file_manager = file_manager_with_stdlib(&workspace.root_dir); insert_all_files_for_workspace_into_file_manager(&workspace, &mut workspace_file_manager); + let parsed_files = parse_diff(&workspace_file_manager, state); - let (mut context, crate_id) = nargo::prepare_package(&workspace_file_manager, package); + let (mut context, crate_id) = + nargo::prepare_package(&workspace_file_manager, &parsed_files, package); let interner; - if let Some(def_interner) = _state.cached_definitions.get(&package_root_path) { + if let Some(def_interner) = state.cached_definitions.get(&package_root_path) { interner = def_interner; } else { // We ignore the warnings and errors produced by compilation while resolving the definition @@ -65,8 +77,9 @@ fn on_goto_definition_inner( span: noirc_errors::Span::single_char(byte_index as u32), }; - let goto_definition_response = - interner.get_definition_location_from(search_for_location).and_then(|found_location| { + let goto_definition_response = interner + .get_definition_location_from(search_for_location, return_type_location_instead) + .and_then(|found_location| { let file_id = found_location.file; let definition_position = to_lsp_location(files, file_id, found_location.span)?; let response: GotoDefinitionResponse = diff --git a/noir/tooling/lsp/src/requests/mod.rs b/noir/tooling/lsp/src/requests/mod.rs index 9a4738e1985..ec56cf5045a 100644 --- a/noir/tooling/lsp/src/requests/mod.rs +++ b/noir/tooling/lsp/src/requests/mod.rs @@ -5,7 +5,7 @@ use async_lsp::ResponseError; use fm::codespan_files::Error; use lsp_types::{ DeclarationCapability, Location, Position, TextDocumentSyncCapability, TextDocumentSyncKind, - Url, + TypeDefinitionProviderCapability, Url, }; use nargo_fmt::Config; use serde::{Deserialize, Serialize}; @@ -35,7 +35,8 @@ mod tests; pub(crate) use { code_lens_request::collect_lenses_for_package, code_lens_request::on_code_lens_request, goto_declaration::on_goto_declaration_request, goto_definition::on_goto_definition_request, - profile_run::on_profile_run_request, test_run::on_test_run_request, tests::on_tests_request, + goto_definition::on_goto_type_definition_request, profile_run::on_profile_run_request, + test_run::on_test_run_request, tests::on_tests_request, }; /// LSP client will send initialization request after the server has started. @@ -46,15 +47,25 @@ struct LspInitializationOptions { /// By default this will be set to true (enabled). #[serde(rename = "enableCodeLens", default = "default_enable_code_lens")] enable_code_lens: bool, + + #[serde(rename = "enableParsingCache", default = "default_enable_parsing_cache")] + enable_parsing_cache: bool, } fn default_enable_code_lens() -> bool { true } +fn default_enable_parsing_cache() -> bool { + true +} + impl Default for LspInitializationOptions { fn default() -> Self { - Self { enable_code_lens: default_enable_code_lens() } + Self { + enable_code_lens: default_enable_code_lens(), + enable_parsing_cache: default_enable_parsing_cache(), + } } } @@ -63,11 +74,11 @@ pub(crate) fn on_initialize( params: InitializeParams, ) -> impl Future> { state.root_path = params.root_uri.and_then(|root_uri| root_uri.to_file_path().ok()); - let initialization_options: LspInitializationOptions = params .initialization_options .and_then(|value| serde_json::from_value(value).ok()) .unwrap_or_default(); + state.parsing_cache_enabled = initialization_options.enable_parsing_cache; async move { let text_document_sync = TextDocumentSyncCapability::Kind(TextDocumentSyncKind::FULL); @@ -94,6 +105,7 @@ pub(crate) fn on_initialize( nargo: Some(nargo), definition_provider: Some(lsp_types::OneOf::Left(true)), declaration_provider: Some(DeclarationCapability::Simple(true)), + type_definition_provider: Some(TypeDefinitionProviderCapability::Simple(true)), }, server_info: None, }) diff --git a/noir/tooling/lsp/src/requests/profile_run.rs b/noir/tooling/lsp/src/requests/profile_run.rs index 6664475a68c..8ba91338f55 100644 --- a/noir/tooling/lsp/src/requests/profile_run.rs +++ b/noir/tooling/lsp/src/requests/profile_run.rs @@ -13,6 +13,7 @@ use noirc_driver::{ use noirc_errors::{debug_info::OpCodesCount, Location}; use crate::{ + parse_diff, types::{NargoProfileRunParams, NargoProfileRunResult}, LspState, }; @@ -26,7 +27,7 @@ pub(crate) fn on_profile_run_request( } fn on_profile_run_request_inner( - state: &LspState, + state: &mut LspState, params: NargoProfileRunParams, ) -> Result { let root_path = state.root_path.as_deref().ok_or_else(|| { @@ -52,23 +53,17 @@ fn on_profile_run_request_inner( let mut workspace_file_manager = file_manager_with_stdlib(&workspace.root_dir); insert_all_files_for_workspace_into_file_manager(&workspace, &mut workspace_file_manager); + let parsed_files = parse_diff(&workspace_file_manager, state); // Since we filtered on crate name, this should be the only item in the iterator match workspace.into_iter().next() { Some(_package) => { - let (binary_packages, contract_packages): (Vec<_>, Vec<_>) = workspace - .into_iter() - .filter(|package| !package.is_library()) - .cloned() - .partition(|package| package.is_binary()); - let expression_width = ExpressionWidth::Bounded { width: 3 }; let (compiled_programs, compiled_contracts) = nargo::ops::compile_workspace( &workspace_file_manager, + &parsed_files, &workspace, - &binary_packages, - &contract_packages, expression_width, &CompileOptions::default(), ) diff --git a/noir/tooling/lsp/src/requests/test_run.rs b/noir/tooling/lsp/src/requests/test_run.rs index c2181d7839d..135090d7ed9 100644 --- a/noir/tooling/lsp/src/requests/test_run.rs +++ b/noir/tooling/lsp/src/requests/test_run.rs @@ -13,6 +13,7 @@ use noirc_driver::{ use noirc_frontend::hir::FunctionNameMatch; use crate::{ + parse_diff, types::{NargoTestRunParams, NargoTestRunResult}, LspState, }; @@ -25,7 +26,7 @@ pub(crate) fn on_test_run_request( } fn on_test_run_request_inner( - state: &LspState, + state: &mut LspState, params: NargoTestRunParams, ) -> Result { let root_path = state.root_path.as_deref().ok_or_else(|| { @@ -52,11 +53,13 @@ fn on_test_run_request_inner( let mut workspace_file_manager = file_manager_with_stdlib(&workspace.root_dir); insert_all_files_for_workspace_into_file_manager(&workspace, &mut workspace_file_manager); + let parsed_files = parse_diff(&workspace_file_manager, state); // Since we filtered on crate name, this should be the only item in the iterator match workspace.into_iter().next() { Some(package) => { - let (mut context, crate_id) = prepare_package(&workspace_file_manager, package); + let (mut context, crate_id) = + prepare_package(&workspace_file_manager, &parsed_files, package); if check_crate(&mut context, crate_id, false, false).is_err() { let result = NargoTestRunResult { id: params.id.clone(), diff --git a/noir/tooling/lsp/src/requests/tests.rs b/noir/tooling/lsp/src/requests/tests.rs index 0f717b9fb9e..5b78fcc65c3 100644 --- a/noir/tooling/lsp/src/requests/tests.rs +++ b/noir/tooling/lsp/src/requests/tests.rs @@ -7,7 +7,7 @@ use nargo_toml::{find_package_manifest, resolve_workspace_from_toml, PackageSele use noirc_driver::{check_crate, file_manager_with_stdlib, NOIR_ARTIFACT_VERSION_STRING}; use crate::{ - get_package_tests_in_crate, + get_package_tests_in_crate, parse_diff, types::{NargoPackageTests, NargoTestsParams, NargoTestsResult}, LspState, }; @@ -52,11 +52,13 @@ fn on_tests_request_inner( let mut workspace_file_manager = file_manager_with_stdlib(&workspace.root_dir); insert_all_files_for_workspace_into_file_manager(&workspace, &mut workspace_file_manager); + let parsed_files = parse_diff(&workspace_file_manager, state); let package_tests: Vec<_> = workspace .into_iter() .filter_map(|package| { - let (mut context, crate_id) = prepare_package(&workspace_file_manager, package); + let (mut context, crate_id) = + prepare_package(&workspace_file_manager, &parsed_files, package); // We ignore the warnings and errors produced by compilation for producing tests // because we can still get the test functions even if compilation fails let _ = check_crate(&mut context, crate_id, false, false); diff --git a/noir/tooling/lsp/src/types.rs b/noir/tooling/lsp/src/types.rs index 8dbc51ec83c..e3492f21346 100644 --- a/noir/tooling/lsp/src/types.rs +++ b/noir/tooling/lsp/src/types.rs @@ -1,5 +1,7 @@ use fm::FileId; -use lsp_types::{DeclarationCapability, DefinitionOptions, OneOf}; +use lsp_types::{ + DeclarationCapability, DefinitionOptions, OneOf, TypeDefinitionProviderCapability, +}; use noirc_driver::DebugFile; use noirc_errors::{debug_info::OpCodesCount, Location}; use noirc_frontend::graph::CrateName; @@ -25,7 +27,8 @@ pub(crate) mod request { // Re-providing lsp_types that we don't need to override pub(crate) use lsp_types::request::{ - CodeLensRequest as CodeLens, Formatting, GotoDeclaration, GotoDefinition, Shutdown, + CodeLensRequest as CodeLens, Formatting, GotoDeclaration, GotoDefinition, + GotoTypeDefinition, Shutdown, }; #[derive(Debug)] @@ -118,6 +121,10 @@ pub(crate) struct ServerCapabilities { #[serde(skip_serializing_if = "Option::is_none")] pub(crate) definition_provider: Option>, + /// The server provides goto type definition support. + #[serde(skip_serializing_if = "Option::is_none")] + pub(crate) type_definition_provider: Option, + /// The server provides code lens. #[serde(skip_serializing_if = "Option::is_none")] pub(crate) code_lens_provider: Option, diff --git a/noir/tooling/nargo/src/lib.rs b/noir/tooling/nargo/src/lib.rs index 62ff4325a23..0fdff8b202f 100644 --- a/noir/tooling/nargo/src/lib.rs +++ b/noir/tooling/nargo/src/lib.rs @@ -20,9 +20,10 @@ use fm::FileManager; use noirc_driver::{add_dep, prepare_crate, prepare_dependency}; use noirc_frontend::{ graph::{CrateId, CrateName}, - hir::Context, + hir::{def_map::parse_file, Context, ParsedFiles}, }; use package::{Dependency, Package}; +use rayon::prelude::*; pub use self::errors::NargoError; @@ -95,11 +96,27 @@ fn insert_all_files_for_packages_dependencies_into_file_manager( } } -pub fn prepare_package<'file_manager>( +pub fn parse_all(file_manager: &FileManager) -> ParsedFiles { + file_manager + .as_file_map() + .all_file_ids() + .par_bridge() + .filter(|&&file_id| { + let file_path = file_manager.path(file_id).expect("expected file to exist"); + let file_extension = + file_path.extension().expect("expected all file paths to have an extension"); + file_extension == "nr" + }) + .map(|&file_id| (file_id, parse_file(file_manager, file_id))) + .collect() +} + +pub fn prepare_package<'file_manager, 'parsed_files>( file_manager: &'file_manager FileManager, + parsed_files: &'parsed_files ParsedFiles, package: &Package, -) -> (Context<'file_manager>, CrateId) { - let mut context = Context::from_ref_file_manager(file_manager); +) -> (Context<'file_manager, 'parsed_files>, CrateId) { + let mut context = Context::from_ref_file_manager(file_manager, parsed_files); let crate_id = prepare_crate(&mut context, &package.entry_path); diff --git a/noir/tooling/nargo/src/ops/compile.rs b/noir/tooling/nargo/src/ops/compile.rs index 043e2a367a5..866bfe39d7b 100644 --- a/noir/tooling/nargo/src/ops/compile.rs +++ b/noir/tooling/nargo/src/ops/compile.rs @@ -1,6 +1,7 @@ use acvm::ExpressionWidth; use fm::FileManager; use noirc_driver::{CompilationResult, CompileOptions, CompiledContract, CompiledProgram}; +use noirc_frontend::hir::ParsedFiles; use crate::errors::CompileError; use crate::prepare_package; @@ -15,22 +16,36 @@ use rayon::prelude::*; /// This function will return an error if there are any compilations errors reported. pub fn compile_workspace( file_manager: &FileManager, + parsed_files: &ParsedFiles, workspace: &Workspace, - binary_packages: &[Package], - contract_packages: &[Package], expression_width: ExpressionWidth, compile_options: &CompileOptions, ) -> Result<(Vec, Vec), CompileError> { + let (binary_packages, contract_packages): (Vec<_>, Vec<_>) = workspace + .into_iter() + .filter(|package| !package.is_library()) + .cloned() + .partition(|package| package.is_binary()); + // Compile all of the packages in parallel. let program_results: Vec> = binary_packages .par_iter() .map(|package| { - compile_program(file_manager, workspace, package, compile_options, expression_width) + compile_program( + file_manager, + parsed_files, + package, + compile_options, + expression_width, + None, + ) }) .collect(); let contract_results: Vec> = contract_packages .par_iter() - .map(|package| compile_contract(file_manager, package, compile_options, expression_width)) + .map(|package| { + compile_contract(file_manager, parsed_files, package, compile_options, expression_width) + }) .collect(); // Report any warnings/errors which were encountered during compilation. @@ -62,19 +77,16 @@ pub fn compile_workspace( pub fn compile_program( file_manager: &FileManager, - workspace: &Workspace, + parsed_files: &ParsedFiles, package: &Package, compile_options: &CompileOptions, expression_width: ExpressionWidth, + cached_program: Option, ) -> CompilationResult { - let (mut context, crate_id) = prepare_package(file_manager, package); - - let program_artifact_path = workspace.package_build_path(package); - let mut debug_artifact_path = program_artifact_path.clone(); - debug_artifact_path.set_file_name(format!("debug_{}.json", package.name)); + let (mut context, crate_id) = prepare_package(file_manager, parsed_files, package); let (program, warnings) = - noirc_driver::compile_main(&mut context, crate_id, compile_options, None)?; + noirc_driver::compile_main(&mut context, crate_id, compile_options, cached_program)?; // Apply backend specific optimizations. let optimized_program = crate::ops::optimize_program(program, expression_width); @@ -82,20 +94,16 @@ pub fn compile_program( Ok((optimized_program, warnings)) } -fn compile_contract( +pub fn compile_contract( file_manager: &FileManager, + parsed_files: &ParsedFiles, package: &Package, compile_options: &CompileOptions, expression_width: ExpressionWidth, ) -> CompilationResult { - let (mut context, crate_id) = prepare_package(file_manager, package); + let (mut context, crate_id) = prepare_package(file_manager, parsed_files, package); let (contract, warnings) = - match noirc_driver::compile_contract(&mut context, crate_id, compile_options) { - Ok(contracts_and_warnings) => contracts_and_warnings, - Err(errors) => { - return Err(errors); - } - }; + noirc_driver::compile_contract(&mut context, crate_id, compile_options)?; let optimized_contract = crate::ops::optimize_contract(contract, expression_width); diff --git a/noir/tooling/nargo/src/ops/mod.rs b/noir/tooling/nargo/src/ops/mod.rs index 34487ed9770..4912c84839e 100644 --- a/noir/tooling/nargo/src/ops/mod.rs +++ b/noir/tooling/nargo/src/ops/mod.rs @@ -1,4 +1,4 @@ -pub use self::compile::{compile_program, compile_workspace}; +pub use self::compile::{compile_contract, compile_program, compile_workspace}; pub use self::execute::execute_circuit; pub use self::foreign_calls::{DefaultForeignCallExecutor, ForeignCallExecutor}; pub use self::optimize::{optimize_contract, optimize_program}; diff --git a/noir/tooling/nargo_cli/Cargo.toml b/noir/tooling/nargo_cli/Cargo.toml index 2652adaf327..6e022f090f0 100644 --- a/noir/tooling/nargo_cli/Cargo.toml +++ b/noir/tooling/nargo_cli/Cargo.toml @@ -74,7 +74,7 @@ pprof = { version = "0.12", features = [ "criterion", ] } iai = "0.1.1" -test-binary = "3.0.1" +test-binary = "3.0.2" [[bench]] name = "criterion" diff --git a/noir/tooling/nargo_cli/build.rs b/noir/tooling/nargo_cli/build.rs index 9a0492c99ad..57aa487f66a 100644 --- a/noir/tooling/nargo_cli/build.rs +++ b/noir/tooling/nargo_cli/build.rs @@ -75,7 +75,7 @@ fn execution_success_{test_name}() {{ let mut cmd = Command::cargo_bin("nargo").unwrap(); cmd.env("NARGO_BACKEND_PATH", path_to_mock_backend()); cmd.arg("--program-dir").arg(test_program_dir); - cmd.arg("execute"); + cmd.arg("execute").arg("--force"); cmd.assert().success(); }} @@ -194,11 +194,12 @@ fn compile_success_empty_{test_name}() {{ cmd.arg("--program-dir").arg(test_program_dir); cmd.arg("info"); cmd.arg("--json"); + cmd.arg("--force"); let output = cmd.output().expect("Failed to execute command"); if !output.status.success() {{ - panic!("`nargo info` failed with: {{}}", String::from_utf8(output.stderr).unwrap()); + panic!("`nargo info` failed with: {{}}", String::from_utf8(output.stderr).unwrap_or_default()); }} // `compile_success_empty` tests should be able to compile down to an empty circuit. @@ -206,7 +207,7 @@ fn compile_success_empty_{test_name}() {{ panic!("JSON was not well-formatted {{:?}}",output.stdout) }}); let num_opcodes = &json["programs"][0]["acir_opcodes"]; - assert_eq!(num_opcodes.as_u64().unwrap(), 0); + assert_eq!(num_opcodes.as_u64().expect("number of opcodes should fit in a u64"), 0); }} "#, test_dir = test_dir.display(), @@ -242,7 +243,7 @@ fn compile_success_contract_{test_name}() {{ let mut cmd = Command::cargo_bin("nargo").unwrap(); cmd.env("NARGO_BACKEND_PATH", path_to_mock_backend()); cmd.arg("--program-dir").arg(test_program_dir); - cmd.arg("compile"); + cmd.arg("compile").arg("--force"); cmd.assert().success(); }} @@ -280,7 +281,7 @@ fn compile_failure_{test_name}() {{ let mut cmd = Command::cargo_bin("nargo").unwrap(); cmd.env("NARGO_BACKEND_PATH", path_to_mock_backend()); cmd.arg("--program-dir").arg(test_program_dir); - cmd.arg("execute"); + cmd.arg("execute").arg("--force"); cmd.assert().failure().stderr(predicate::str::contains("The application panicked (crashed).").not()); }} diff --git a/noir/tooling/nargo_cli/src/cli/check_cmd.rs b/noir/tooling/nargo_cli/src/cli/check_cmd.rs index e2db492fe9c..a8b9dbdeeb2 100644 --- a/noir/tooling/nargo_cli/src/cli/check_cmd.rs +++ b/noir/tooling/nargo_cli/src/cli/check_cmd.rs @@ -6,7 +6,7 @@ use fm::FileManager; use iter_extended::btree_map; use nargo::{ errors::CompileError, insert_all_files_for_workspace_into_file_manager, package::Package, - prepare_package, + parse_all, prepare_package, }; use nargo_toml::{get_package_manifest, resolve_workspace_from_toml, PackageSelection}; use noirc_abi::{AbiParameter, AbiType, MAIN_RETURN_NAME}; @@ -16,7 +16,7 @@ use noirc_driver::{ }; use noirc_frontend::{ graph::{CrateId, CrateName}, - hir::Context, + hir::{Context, ParsedFiles}, }; use super::fs::write_to_file; @@ -54,9 +54,10 @@ pub(crate) fn run( let mut workspace_file_manager = file_manager_with_stdlib(&workspace.root_dir); insert_all_files_for_workspace_into_file_manager(&workspace, &mut workspace_file_manager); + let parsed_files = parse_all(&workspace_file_manager); for package in &workspace { - check_package(&workspace_file_manager, package, &args.compile_options)?; + check_package(&workspace_file_manager, &parsed_files, package, &args.compile_options)?; println!("[{}] Constraint system successfully built!", package.name); } Ok(()) @@ -64,10 +65,11 @@ pub(crate) fn run( fn check_package( file_manager: &FileManager, + parsed_files: &ParsedFiles, package: &Package, compile_options: &CompileOptions, ) -> Result<(), CompileError> { - let (mut context, crate_id) = prepare_package(file_manager, package); + let (mut context, crate_id) = prepare_package(file_manager, parsed_files, package); check_crate_and_report_errors( &mut context, crate_id, diff --git a/noir/tooling/nargo_cli/src/cli/codegen_verifier_cmd.rs b/noir/tooling/nargo_cli/src/cli/codegen_verifier_cmd.rs index 1eb8153ce9b..8bf12ee4100 100644 --- a/noir/tooling/nargo_cli/src/cli/codegen_verifier_cmd.rs +++ b/noir/tooling/nargo_cli/src/cli/codegen_verifier_cmd.rs @@ -6,12 +6,8 @@ use super::{ use crate::backends::Backend; use crate::errors::CliError; -use acvm::ExpressionWidth; use clap::Args; -use fm::FileManager; -use nargo::insert_all_files_for_workspace_into_file_manager; -use nargo::package::Package; -use nargo::workspace::Workspace; +use nargo::{insert_all_files_for_workspace_into_file_manager, parse_all}; use nargo_toml::{get_package_manifest, resolve_workspace_from_toml, PackageSelection}; use noirc_driver::{file_manager_with_stdlib, CompileOptions, NOIR_ARTIFACT_VERSION_STRING}; use noirc_frontend::graph::CrateName; @@ -48,18 +44,20 @@ pub(crate) fn run( let mut workspace_file_manager = file_manager_with_stdlib(&workspace.root_dir); insert_all_files_for_workspace_into_file_manager(&workspace, &mut workspace_file_manager); + let parsed_files = parse_all(&workspace_file_manager); let expression_width = backend.get_backend_info()?; for package in &workspace { - let smart_contract_string = smart_contract_for_package( + let program = compile_bin_package( &workspace_file_manager, - &workspace, - backend, + &parsed_files, package, &args.compile_options, expression_width, )?; + let smart_contract_string = backend.eth_contract(&program.circuit)?; + let contract_dir = workspace.contracts_directory_path(package); create_named_dir(&contract_dir, "contract"); let contract_path = contract_dir.join("plonk_vk").with_extension("sol"); @@ -70,17 +68,3 @@ pub(crate) fn run( Ok(()) } - -fn smart_contract_for_package( - file_manager: &FileManager, - workspace: &Workspace, - backend: &Backend, - package: &Package, - compile_options: &CompileOptions, - expression_width: ExpressionWidth, -) -> Result { - let program = - compile_bin_package(file_manager, workspace, package, compile_options, expression_width)?; - - Ok(backend.eth_contract(&program.circuit)?) -} diff --git a/noir/tooling/nargo_cli/src/cli/compile_cmd.rs b/noir/tooling/nargo_cli/src/cli/compile_cmd.rs index 9e739f5c818..aa9a46f39ef 100644 --- a/noir/tooling/nargo_cli/src/cli/compile_cmd.rs +++ b/noir/tooling/nargo_cli/src/cli/compile_cmd.rs @@ -5,10 +5,10 @@ use acvm::ExpressionWidth; use fm::FileManager; use nargo::artifacts::program::ProgramArtifact; use nargo::errors::CompileError; -use nargo::insert_all_files_for_workspace_into_file_manager; +use nargo::ops::{compile_contract, compile_program}; use nargo::package::Package; -use nargo::prepare_package; use nargo::workspace::Workspace; +use nargo::{insert_all_files_for_workspace_into_file_manager, parse_all}; use nargo_toml::{get_package_manifest, resolve_workspace_from_toml, PackageSelection}; use noirc_driver::file_manager_with_stdlib; use noirc_driver::NOIR_ARTIFACT_VERSION_STRING; @@ -17,6 +17,7 @@ use noirc_driver::{CompilationResult, CompileOptions, CompiledContract, Compiled use noirc_frontend::graph::CrateName; use clap::Args; +use noirc_frontend::hir::ParsedFiles; use crate::backends::Backend; use crate::errors::CliError; @@ -60,24 +61,28 @@ pub(crate) fn run( let mut workspace_file_manager = file_manager_with_stdlib(&workspace.root_dir); insert_all_files_for_workspace_into_file_manager(&workspace, &mut workspace_file_manager); - - let (binary_packages, contract_packages): (Vec<_>, Vec<_>) = workspace - .into_iter() - .filter(|package| !package.is_library()) - .cloned() - .partition(|package| package.is_binary()); + let parsed_files = parse_all(&workspace_file_manager); let expression_width = backend.get_backend_info_or_default(); - let (_, compiled_contracts) = compile_workspace( + let (compiled_program, compiled_contracts) = compile_workspace( &workspace_file_manager, + &parsed_files, &workspace, - &binary_packages, - &contract_packages, expression_width, &args.compile_options, )?; + let (binary_packages, contract_packages): (Vec<_>, Vec<_>) = workspace + .into_iter() + .filter(|package| !package.is_library()) + .cloned() + .partition(|package| package.is_binary()); + // Save build artifacts to disk. + let only_acir = args.compile_options.only_acir; + for (package, program) in binary_packages.into_iter().zip(compiled_program) { + save_program(program.clone(), &package, &workspace.target_directory_path(), only_acir); + } for (package, contract) in contract_packages.into_iter().zip(compiled_contracts) { save_contract(contract, &package, &circuit_dir); } @@ -87,22 +92,43 @@ pub(crate) fn run( pub(super) fn compile_workspace( file_manager: &FileManager, + parsed_files: &ParsedFiles, workspace: &Workspace, - binary_packages: &[Package], - contract_packages: &[Package], expression_width: ExpressionWidth, compile_options: &CompileOptions, ) -> Result<(Vec, Vec), CliError> { + let (binary_packages, contract_packages): (Vec<_>, Vec<_>) = workspace + .into_iter() + .filter(|package| !package.is_library()) + .cloned() + .partition(|package| package.is_binary()); + // Compile all of the packages in parallel. let program_results: Vec> = binary_packages .par_iter() .map(|package| { - compile_program(file_manager, workspace, package, compile_options, expression_width) + let program_artifact_path = workspace.package_build_path(package); + let cached_program: Option = + read_program_from_file(program_artifact_path) + .ok() + .filter(|p| p.noir_version == NOIR_ARTIFACT_VERSION_STRING) + .map(|p| p.into()); + + compile_program( + file_manager, + parsed_files, + package, + compile_options, + expression_width, + cached_program, + ) }) .collect(); let contract_results: Vec> = contract_packages .par_iter() - .map(|package| compile_contract(file_manager, package, compile_options, expression_width)) + .map(|package| { + compile_contract(file_manager, parsed_files, package, compile_options, expression_width) + }) .collect(); // Report any warnings/errors which were encountered during compilation. @@ -134,7 +160,7 @@ pub(super) fn compile_workspace( pub(crate) fn compile_bin_package( file_manager: &FileManager, - workspace: &Workspace, + parsed_files: &ParsedFiles, package: &Package, compile_options: &CompileOptions, expression_width: ExpressionWidth, @@ -143,8 +169,14 @@ pub(crate) fn compile_bin_package( return Err(CompileError::LibraryCrate(package.name.clone()).into()); } - let compilation_result = - compile_program(file_manager, workspace, package, compile_options, expression_width); + let compilation_result = compile_program( + file_manager, + parsed_files, + package, + compile_options, + expression_width, + None, + ); let program = report_errors( compilation_result, @@ -156,53 +188,6 @@ pub(crate) fn compile_bin_package( Ok(program) } -fn compile_program( - file_manager: &FileManager, - workspace: &Workspace, - package: &Package, - compile_options: &CompileOptions, - expression_width: ExpressionWidth, -) -> CompilationResult { - let (mut context, crate_id) = prepare_package(file_manager, package); - - let program_artifact_path = workspace.package_build_path(package); - let cached_program: Option = - read_program_from_file(program_artifact_path) - .ok() - .filter(|p| p.noir_version == NOIR_ARTIFACT_VERSION_STRING) - .map(|p| p.into()); - - let (program, warnings) = - noirc_driver::compile_main(&mut context, crate_id, compile_options, cached_program)?; - - // Apply backend specific optimizations. - let optimized_program = nargo::ops::optimize_program(program, expression_width); - let only_acir = compile_options.only_acir; - save_program(optimized_program.clone(), package, &workspace.target_directory_path(), only_acir); - - Ok((optimized_program, warnings)) -} - -fn compile_contract( - file_manager: &FileManager, - package: &Package, - compile_options: &CompileOptions, - expression_width: ExpressionWidth, -) -> CompilationResult { - let (mut context, crate_id) = prepare_package(file_manager, package); - let (contract, warnings) = - match noirc_driver::compile_contract(&mut context, crate_id, compile_options) { - Ok(contracts_and_warnings) => contracts_and_warnings, - Err(errors) => { - return Err(errors); - } - }; - - let optimized_contract = nargo::ops::optimize_contract(contract, expression_width); - - Ok((optimized_contract, warnings)) -} - pub(super) fn save_program( program: CompiledProgram, package: &Package, diff --git a/noir/tooling/nargo_cli/src/cli/dap_cmd.rs b/noir/tooling/nargo_cli/src/cli/dap_cmd.rs index 29e696ea608..9798cbedfeb 100644 --- a/noir/tooling/nargo_cli/src/cli/dap_cmd.rs +++ b/noir/tooling/nargo_cli/src/cli/dap_cmd.rs @@ -2,8 +2,8 @@ use acvm::acir::native_types::WitnessMap; use backend_interface::Backend; use clap::Args; use nargo::constants::PROVER_INPUT_FILE; -use nargo::insert_all_files_for_workspace_into_file_manager; use nargo::workspace::Workspace; +use nargo::{insert_all_files_for_workspace_into_file_manager, parse_all}; use nargo_toml::{get_package_manifest, resolve_workspace_from_toml, PackageSelection}; use noirc_abi::input_parser::Format; use noirc_driver::{ @@ -70,10 +70,11 @@ fn load_and_compile_project( let mut workspace_file_manager = file_manager_with_stdlib(std::path::Path::new("")); insert_all_files_for_workspace_into_file_manager(&workspace, &mut workspace_file_manager); + let parsed_files = parse_all(&workspace_file_manager); let compiled_program = compile_bin_package( &workspace_file_manager, - &workspace, + &parsed_files, package, &CompileOptions::default(), expression_width, diff --git a/noir/tooling/nargo_cli/src/cli/debug_cmd.rs b/noir/tooling/nargo_cli/src/cli/debug_cmd.rs index f78a683aa8f..e62cbc11ec8 100644 --- a/noir/tooling/nargo_cli/src/cli/debug_cmd.rs +++ b/noir/tooling/nargo_cli/src/cli/debug_cmd.rs @@ -6,8 +6,8 @@ use clap::Args; use nargo::artifacts::debug::DebugArtifact; use nargo::constants::PROVER_INPUT_FILE; -use nargo::insert_all_files_for_workspace_into_file_manager; use nargo::package::Package; +use nargo::{insert_all_files_for_workspace_into_file_manager, parse_all}; use nargo_toml::{get_package_manifest, resolve_workspace_from_toml, PackageSelection}; use noirc_abi::input_parser::{Format, InputValue}; use noirc_abi::InputMap; @@ -57,6 +57,7 @@ pub(crate) fn run( let mut workspace_file_manager = file_manager_with_stdlib(std::path::Path::new("")); insert_all_files_for_workspace_into_file_manager(&workspace, &mut workspace_file_manager); + let parsed_files = parse_all(&workspace_file_manager); let Some(package) = workspace.into_iter().find(|p| p.is_binary()) else { println!( @@ -67,7 +68,7 @@ pub(crate) fn run( let compiled_program = compile_bin_package( &workspace_file_manager, - &workspace, + &parsed_files, package, &args.compile_options, expression_width, diff --git a/noir/tooling/nargo_cli/src/cli/execute_cmd.rs b/noir/tooling/nargo_cli/src/cli/execute_cmd.rs index 7f695c42fa4..cf0d46a0718 100644 --- a/noir/tooling/nargo_cli/src/cli/execute_cmd.rs +++ b/noir/tooling/nargo_cli/src/cli/execute_cmd.rs @@ -5,9 +5,9 @@ use clap::Args; use nargo::artifacts::debug::DebugArtifact; use nargo::constants::PROVER_INPUT_FILE; use nargo::errors::try_to_diagnose_runtime_error; -use nargo::insert_all_files_for_workspace_into_file_manager; use nargo::ops::DefaultForeignCallExecutor; use nargo::package::Package; +use nargo::{insert_all_files_for_workspace_into_file_manager, parse_all}; use nargo_toml::{get_package_manifest, resolve_workspace_from_toml, PackageSelection}; use noirc_abi::input_parser::{Format, InputValue}; use noirc_abi::InputMap; @@ -66,12 +66,13 @@ pub(crate) fn run( let mut workspace_file_manager = file_manager_with_stdlib(&workspace.root_dir); insert_all_files_for_workspace_into_file_manager(&workspace, &mut workspace_file_manager); + let parsed_files = parse_all(&workspace_file_manager); let expression_width = backend.get_backend_info_or_default(); for package in &workspace { let compiled_program = compile_bin_package( &workspace_file_manager, - &workspace, + &parsed_files, package, &args.compile_options, expression_width, diff --git a/noir/tooling/nargo_cli/src/cli/export_cmd.rs b/noir/tooling/nargo_cli/src/cli/export_cmd.rs index ac3e93e09b7..feaa55857e5 100644 --- a/noir/tooling/nargo_cli/src/cli/export_cmd.rs +++ b/noir/tooling/nargo_cli/src/cli/export_cmd.rs @@ -1,13 +1,14 @@ use nargo::errors::CompileError; use noirc_errors::FileDiagnostic; +use noirc_frontend::hir::ParsedFiles; use rayon::prelude::*; use fm::FileManager; use iter_extended::try_vecmap; -use nargo::insert_all_files_for_workspace_into_file_manager; use nargo::package::Package; use nargo::prepare_package; use nargo::workspace::Workspace; +use nargo::{insert_all_files_for_workspace_into_file_manager, parse_all}; use nargo_toml::{get_package_manifest, resolve_workspace_from_toml, PackageSelection}; use noirc_driver::{ compile_no_check, file_manager_with_stdlib, CompileOptions, CompiledProgram, @@ -60,6 +61,7 @@ pub(crate) fn run( let mut workspace_file_manager = file_manager_with_stdlib(&workspace.root_dir); insert_all_files_for_workspace_into_file_manager(&workspace, &mut workspace_file_manager); + let parsed_files = parse_all(&workspace_file_manager); let library_packages: Vec<_> = workspace.into_iter().filter(|package| package.is_library()).collect(); @@ -69,6 +71,7 @@ pub(crate) fn run( .map(|package| { compile_exported_functions( &workspace_file_manager, + &parsed_files, &workspace, package, &args.compile_options, @@ -79,11 +82,12 @@ pub(crate) fn run( fn compile_exported_functions( file_manager: &FileManager, + parsed_files: &ParsedFiles, workspace: &Workspace, package: &Package, compile_options: &CompileOptions, ) -> Result<(), CliError> { - let (mut context, crate_id) = prepare_package(file_manager, package); + let (mut context, crate_id) = prepare_package(file_manager, parsed_files, package); check_crate_and_report_errors( &mut context, crate_id, diff --git a/noir/tooling/nargo_cli/src/cli/info_cmd.rs b/noir/tooling/nargo_cli/src/cli/info_cmd.rs index f983a19c0fd..8dfff67b47f 100644 --- a/noir/tooling/nargo_cli/src/cli/info_cmd.rs +++ b/noir/tooling/nargo_cli/src/cli/info_cmd.rs @@ -6,7 +6,7 @@ use clap::Args; use iter_extended::vecmap; use nargo::{ artifacts::debug::DebugArtifact, insert_all_files_for_workspace_into_file_manager, - package::Package, + package::Package, parse_all, }; use nargo_toml::{get_package_manifest, resolve_workspace_from_toml, PackageSelection}; use noirc_driver::{ @@ -67,19 +67,13 @@ pub(crate) fn run( let mut workspace_file_manager = file_manager_with_stdlib(&workspace.root_dir); insert_all_files_for_workspace_into_file_manager(&workspace, &mut workspace_file_manager); - - let (binary_packages, contract_packages): (Vec<_>, Vec<_>) = workspace - .into_iter() - .filter(|package| !package.is_library()) - .cloned() - .partition(|package| package.is_binary()); + let parsed_files = parse_all(&workspace_file_manager); let expression_width = backend.get_backend_info_or_default(); let (compiled_programs, compiled_contracts) = compile_workspace( &workspace_file_manager, + &parsed_files, &workspace, - &binary_packages, - &contract_packages, expression_width, &args.compile_options, )?; @@ -101,11 +95,12 @@ pub(crate) fn run( } } + let binary_packages = + workspace.into_iter().filter(|package| package.is_binary()).zip(compiled_programs); let program_info = binary_packages - .into_par_iter() - .zip(compiled_programs) + .par_bridge() .map(|(package, program)| { - count_opcodes_and_gates_in_program(backend, program, &package, expression_width) + count_opcodes_and_gates_in_program(backend, program, package, expression_width) }) .collect::>()?; diff --git a/noir/tooling/nargo_cli/src/cli/prove_cmd.rs b/noir/tooling/nargo_cli/src/cli/prove_cmd.rs index 167ab541bc5..d02464fd6df 100644 --- a/noir/tooling/nargo_cli/src/cli/prove_cmd.rs +++ b/noir/tooling/nargo_cli/src/cli/prove_cmd.rs @@ -1,8 +1,8 @@ use clap::Args; use nargo::constants::{PROVER_INPUT_FILE, VERIFIER_INPUT_FILE}; -use nargo::insert_all_files_for_workspace_into_file_manager; use nargo::package::Package; use nargo::workspace::Workspace; +use nargo::{insert_all_files_for_workspace_into_file_manager, parse_all}; use nargo_toml::{get_package_manifest, resolve_workspace_from_toml, PackageSelection}; use noirc_abi::input_parser::Format; use noirc_driver::{ @@ -66,12 +66,13 @@ pub(crate) fn run( let mut workspace_file_manager = file_manager_with_stdlib(&workspace.root_dir); insert_all_files_for_workspace_into_file_manager(&workspace, &mut workspace_file_manager); + let parsed_files = parse_all(&workspace_file_manager); let expression_width = backend.get_backend_info()?; for package in &workspace { let program = compile_bin_package( &workspace_file_manager, - &workspace, + &parsed_files, package, &args.compile_options, expression_width, diff --git a/noir/tooling/nargo_cli/src/cli/test_cmd.rs b/noir/tooling/nargo_cli/src/cli/test_cmd.rs index 69f03b49cbd..5db842609e5 100644 --- a/noir/tooling/nargo_cli/src/cli/test_cmd.rs +++ b/noir/tooling/nargo_cli/src/cli/test_cmd.rs @@ -8,11 +8,14 @@ use nargo::{ insert_all_files_for_workspace_into_file_manager, ops::{run_test, TestStatus}, package::Package, - prepare_package, + parse_all, prepare_package, }; use nargo_toml::{get_package_manifest, resolve_workspace_from_toml, PackageSelection}; use noirc_driver::{file_manager_with_stdlib, CompileOptions, NOIR_ARTIFACT_VERSION_STRING}; -use noirc_frontend::{graph::CrateName, hir::FunctionNameMatch}; +use noirc_frontend::{ + graph::CrateName, + hir::{FunctionNameMatch, ParsedFiles}, +}; use termcolor::{Color, ColorChoice, ColorSpec, StandardStream, WriteColor}; use crate::{backends::Backend, cli::check_cmd::check_crate_and_report_errors, errors::CliError}; @@ -66,6 +69,7 @@ pub(crate) fn run( let mut workspace_file_manager = file_manager_with_stdlib(&workspace.root_dir); insert_all_files_for_workspace_into_file_manager(&workspace, &mut workspace_file_manager); + let parsed_files = parse_all(&workspace_file_manager); let pattern = match &args.test_name { Some(name) => { @@ -84,6 +88,7 @@ pub(crate) fn run( // TODO: We should run the whole suite even if there are failures in a package run_tests( &workspace_file_manager, + &parsed_files, &blackbox_solver, package, pattern, @@ -96,8 +101,10 @@ pub(crate) fn run( Ok(()) } +#[allow(clippy::too_many_arguments)] fn run_tests( file_manager: &FileManager, + parsed_files: &ParsedFiles, blackbox_solver: &S, package: &Package, fn_name: FunctionNameMatch, @@ -105,7 +112,7 @@ fn run_tests( foreign_call_resolver_url: Option<&str>, compile_options: &CompileOptions, ) -> Result<(), CliError> { - let (mut context, crate_id) = prepare_package(file_manager, package); + let (mut context, crate_id) = prepare_package(file_manager, parsed_files, package); check_crate_and_report_errors( &mut context, crate_id, diff --git a/noir/tooling/nargo_cli/src/cli/verify_cmd.rs b/noir/tooling/nargo_cli/src/cli/verify_cmd.rs index 86d5e774cbe..1701b9e063c 100644 --- a/noir/tooling/nargo_cli/src/cli/verify_cmd.rs +++ b/noir/tooling/nargo_cli/src/cli/verify_cmd.rs @@ -7,9 +7,9 @@ use crate::{backends::Backend, errors::CliError}; use clap::Args; use nargo::constants::{PROOF_EXT, VERIFIER_INPUT_FILE}; -use nargo::insert_all_files_for_workspace_into_file_manager; use nargo::package::Package; use nargo::workspace::Workspace; +use nargo::{insert_all_files_for_workspace_into_file_manager, parse_all}; use nargo_toml::{get_package_manifest, resolve_workspace_from_toml, PackageSelection}; use noirc_abi::input_parser::Format; use noirc_driver::{ @@ -53,12 +53,13 @@ pub(crate) fn run( let mut workspace_file_manager = file_manager_with_stdlib(&workspace.root_dir); insert_all_files_for_workspace_into_file_manager(&workspace, &mut workspace_file_manager); + let parsed_files = parse_all(&workspace_file_manager); let expression_width = backend.get_backend_info()?; for package in &workspace { let program = compile_bin_package( &workspace_file_manager, - &workspace, + &parsed_files, package, &args.compile_options, expression_width, diff --git a/noir/tooling/noir_codegen/package.json b/noir/tooling/noir_codegen/package.json index 7d76b1a9138..60ccf5ec2a5 100644 --- a/noir/tooling/noir_codegen/package.json +++ b/noir/tooling/noir_codegen/package.json @@ -3,7 +3,7 @@ "contributors": [ "The Noir Team " ], - "version": "0.22.0", + "version": "0.23.0", "packageManager": "yarn@3.5.1", "license": "(MIT OR Apache-2.0)", "type": "module", diff --git a/noir/tooling/noir_js/package.json b/noir/tooling/noir_js/package.json index ed2fd225810..356909a1e35 100644 --- a/noir/tooling/noir_js/package.json +++ b/noir/tooling/noir_js/package.json @@ -3,7 +3,7 @@ "contributors": [ "The Noir Team " ], - "version": "0.22.0", + "version": "0.23.0", "packageManager": "yarn@3.5.1", "license": "(MIT OR Apache-2.0)", "type": "module", diff --git a/noir/tooling/noir_js_backend_barretenberg/package.json b/noir/tooling/noir_js_backend_barretenberg/package.json index e22ea2ff49d..cd2a6354ac4 100644 --- a/noir/tooling/noir_js_backend_barretenberg/package.json +++ b/noir/tooling/noir_js_backend_barretenberg/package.json @@ -3,7 +3,7 @@ "contributors": [ "The Noir Team " ], - "version": "0.22.0", + "version": "0.23.0", "packageManager": "yarn@3.5.1", "license": "(MIT OR Apache-2.0)", "type": "module", diff --git a/noir/tooling/noir_js_types/package.json b/noir/tooling/noir_js_types/package.json index 0276b8d087c..ef75f3d2fb3 100644 --- a/noir/tooling/noir_js_types/package.json +++ b/noir/tooling/noir_js_types/package.json @@ -4,7 +4,7 @@ "The Noir Team " ], "packageManager": "yarn@3.5.1", - "version": "0.22.0", + "version": "0.23.0", "license": "(MIT OR Apache-2.0)", "homepage": "https://noir-lang.org/", "repository": { diff --git a/noir/tooling/noirc_abi_wasm/package.json b/noir/tooling/noirc_abi_wasm/package.json index d023e1e4391..db0f6c29153 100644 --- a/noir/tooling/noirc_abi_wasm/package.json +++ b/noir/tooling/noirc_abi_wasm/package.json @@ -3,7 +3,7 @@ "contributors": [ "The Noir Team " ], - "version": "0.22.0", + "version": "0.23.0", "license": "(MIT OR Apache-2.0)", "homepage": "https://noir-lang.org/", "repository": { diff --git a/noir/yarn.lock b/noir/yarn.lock index e7822f59bdc..db3f493bc62 100644 --- a/noir/yarn.lock +++ b/noir/yarn.lock @@ -221,6 +221,20 @@ __metadata: languageName: node linkType: hard +"@aztec/bb.js@npm:0.16.0": + version: 0.16.0 + resolution: "@aztec/bb.js@npm:0.16.0" + dependencies: + comlink: ^4.4.1 + commander: ^10.0.1 + debug: ^4.3.4 + tslib: ^2.4.0 + bin: + bb.js: dest/node/main.js + checksum: 5f68b4ad16284a3a871e0ad21fea05aed670383bc639c9d07ab3bf9b7a9d15cc8a4e5cda404a9290775ad5023924739543a8aac37d602892dd1fb5087521970b + languageName: node + linkType: hard + "@aztec/bb.js@npm:0.19.0": version: 0.19.0 resolution: "@aztec/bb.js@npm:0.19.0" @@ -4381,6 +4395,13 @@ __metadata: languageName: node linkType: hard +"@noir-lang/acvm_js@npm:0.38.0": + version: 0.38.0 + resolution: "@noir-lang/acvm_js@npm:0.38.0" + checksum: 42a5bba45135d1df0d0eb3f7b65439733e016580bad610e859e140638d42200d6b856ff11c4b30417b74ce011da7c39861aafb1c5b8c7211de2172aea449c635 + languageName: node + linkType: hard + "@noir-lang/acvm_js@workspace:*, @noir-lang/acvm_js@workspace:acvm-repo/acvm_js": version: 0.0.0-use.local resolution: "@noir-lang/acvm_js@workspace:acvm-repo/acvm_js" @@ -4399,7 +4420,18 @@ __metadata: languageName: unknown linkType: soft -"@noir-lang/backend_barretenberg@^0.22.0, @noir-lang/backend_barretenberg@workspace:*, @noir-lang/backend_barretenberg@workspace:tooling/noir_js_backend_barretenberg": +"@noir-lang/backend_barretenberg@npm:^0.22.0": + version: 0.22.0 + resolution: "@noir-lang/backend_barretenberg@npm:0.22.0" + dependencies: + "@aztec/bb.js": 0.16.0 + "@noir-lang/types": 0.22.0 + fflate: ^0.8.0 + checksum: ead456218ba61d925e0fc5b47d1b94272e980b44a220f1262fb6cdc73cff7cd4232ddc69dd67bb21e50f0b43e7696d4a96fde15e3eadc0bf223ec6d59e014e23 + languageName: node + linkType: hard + +"@noir-lang/backend_barretenberg@workspace:*, @noir-lang/backend_barretenberg@workspace:tooling/noir_js_backend_barretenberg": version: 0.0.0-use.local resolution: "@noir-lang/backend_barretenberg@workspace:tooling/noir_js_backend_barretenberg" dependencies: @@ -4443,7 +4475,18 @@ __metadata: languageName: unknown linkType: soft -"@noir-lang/noir_js@^0.22.0, @noir-lang/noir_js@workspace:*, @noir-lang/noir_js@workspace:tooling/noir_js": +"@noir-lang/noir_js@npm:^0.22.0": + version: 0.22.0 + resolution: "@noir-lang/noir_js@npm:0.22.0" + dependencies: + "@noir-lang/acvm_js": 0.38.0 + "@noir-lang/noirc_abi": 0.22.0 + "@noir-lang/types": 0.22.0 + checksum: 3b0873ad87521415af11208bebe5690191d03fa06dcd515789f0a63f7641146cdcb01d292b208452856ea3967e196c8332cb2618e013f9e7e5ce7d6e09de043d + languageName: node + linkType: hard + +"@noir-lang/noir_js@workspace:*, @noir-lang/noir_js@workspace:tooling/noir_js": version: 0.0.0-use.local resolution: "@noir-lang/noir_js@workspace:tooling/noir_js" dependencies: @@ -4466,7 +4509,14 @@ __metadata: languageName: unknown linkType: soft -"@noir-lang/noir_wasm@^0.22.0, @noir-lang/noir_wasm@workspace:*, @noir-lang/noir_wasm@workspace:compiler/wasm": +"@noir-lang/noir_wasm@npm:^0.22.0": + version: 0.22.0 + resolution: "@noir-lang/noir_wasm@npm:0.22.0" + checksum: 7ac0ca170bf312df761d7ccfd32a67a27f88f15ad4eed1807864295d761d3b2176ffb82f4c6931e1bc06b225d6f738519962c79ffbce9a33d5ef8a6a2bdea82c + languageName: node + linkType: hard + +"@noir-lang/noir_wasm@workspace:*, @noir-lang/noir_wasm@workspace:compiler/wasm": version: 0.0.0-use.local resolution: "@noir-lang/noir_wasm@workspace:compiler/wasm" dependencies: @@ -4510,6 +4560,13 @@ __metadata: languageName: unknown linkType: soft +"@noir-lang/noirc_abi@npm:0.22.0": + version: 0.22.0 + resolution: "@noir-lang/noirc_abi@npm:0.22.0" + checksum: a250c6cc5ca37fcf02663f8d6b027776f0e58920fb8f8a84efcf74f079f235bb11bbad682ba332211d9b9a79b6a3eb7faede7701cd88582b682971a41ca6212d + languageName: node + linkType: hard + "@noir-lang/noirc_abi@workspace:*, @noir-lang/noirc_abi@workspace:tooling/noirc_abi_wasm": version: 0.0.0-use.local resolution: "@noir-lang/noirc_abi@workspace:tooling/noirc_abi_wasm" @@ -4540,7 +4597,16 @@ __metadata: languageName: unknown linkType: soft -"@noir-lang/types@^0.22.0, @noir-lang/types@workspace:*, @noir-lang/types@workspace:tooling/noir_js_types": +"@noir-lang/types@npm:0.22.0, @noir-lang/types@npm:^0.22.0": + version: 0.22.0 + resolution: "@noir-lang/types@npm:0.22.0" + dependencies: + "@noir-lang/noirc_abi": 0.22.0 + checksum: 5dd1badf0449c518e755172de1d2f2c1b95bfaf7b7328b7de00b8ce9ba68bd447ca65e827185da7d737e7e88dcaf296b29687ffe2e1f5b4d5cc31ce3e3b4f208 + languageName: node + linkType: hard + +"@noir-lang/types@workspace:*, @noir-lang/types@workspace:tooling/noir_js_types": version: 0.0.0-use.local resolution: "@noir-lang/types@workspace:tooling/noir_js_types" dependencies: diff --git a/yarn-project/yarn.lock b/yarn-project/yarn.lock index 5e7bbb29474..53d6be96445 100644 --- a/yarn-project/yarn.lock +++ b/yarn-project/yarn.lock @@ -2784,7 +2784,7 @@ __metadata: resolution: "@noir-lang/backend_barretenberg@portal:../noir/packages/backend_barretenberg::locator=%40aztec%2Faztec3-packages%40workspace%3A." dependencies: "@aztec/bb.js": 0.19.0 - "@noir-lang/types": 0.22.0 + "@noir-lang/types": 0.23.0 fflate: ^0.8.0 languageName: node linkType: soft @@ -2793,9 +2793,9 @@ __metadata: version: 0.0.0-use.local resolution: "@noir-lang/noir_js@portal:../noir/packages/noir_js::locator=%40aztec%2Faztec3-packages%40workspace%3A." dependencies: - "@noir-lang/acvm_js": 0.38.0 - "@noir-lang/noirc_abi": 0.22.0 - "@noir-lang/types": 0.22.0 + "@noir-lang/acvm_js": 0.39.0 + "@noir-lang/noirc_abi": 0.23.0 + "@noir-lang/types": 0.23.0 languageName: node linkType: soft @@ -2809,7 +2809,7 @@ __metadata: version: 0.0.0-use.local resolution: "@noir-lang/types@portal:../noir/packages/types::locator=%40aztec%2Faztec3-packages%40workspace%3A." dependencies: - "@noir-lang/noirc_abi": 0.22.0 + "@noir-lang/noirc_abi": 0.23.0 languageName: node linkType: soft