diff --git a/.github/critical_libraries_status/AztecProtocol/aztec-packages/noir-projects/aztec-nr.failures.jsonl b/.github/critical_libraries_status/AztecProtocol/aztec-packages/noir-projects/aztec-nr.failures.jsonl new file mode 100644 index 00000000000..e69de29bb2d diff --git a/.github/critical_libraries_status/AztecProtocol/aztec-packages/noir-projects/noir-contracts.failures.jsonl b/.github/critical_libraries_status/AztecProtocol/aztec-packages/noir-projects/noir-contracts.failures.jsonl new file mode 100644 index 00000000000..e69de29bb2d diff --git a/.github/critical_libraries_status/AztecProtocol/aztec-packages/noir-projects/noir-protocol-circuits/crates/blob.failures.jsonl b/.github/critical_libraries_status/AztecProtocol/aztec-packages/noir-projects/noir-protocol-circuits/crates/blob.failures.jsonl new file mode 100644 index 00000000000..e69de29bb2d diff --git a/.github/critical_libraries_status/AztecProtocol/aztec-packages/noir-projects/noir-protocol-circuits/crates/parity-lib.failures.jsonl b/.github/critical_libraries_status/AztecProtocol/aztec-packages/noir-projects/noir-protocol-circuits/crates/parity-lib.failures.jsonl new file mode 100644 index 00000000000..e69de29bb2d diff --git a/.github/critical_libraries_status/AztecProtocol/aztec-packages/noir-projects/noir-protocol-circuits/crates/private-kernel-lib.failures.jsonl b/.github/critical_libraries_status/AztecProtocol/aztec-packages/noir-projects/noir-protocol-circuits/crates/private-kernel-lib.failures.jsonl new file mode 100644 index 00000000000..e69de29bb2d diff --git a/.github/critical_libraries_status/AztecProtocol/aztec-packages/noir-projects/noir-protocol-circuits/crates/reset-kernel-lib.failures.jsonl b/.github/critical_libraries_status/AztecProtocol/aztec-packages/noir-projects/noir-protocol-circuits/crates/reset-kernel-lib.failures.jsonl new file mode 100644 index 00000000000..e69de29bb2d diff --git a/.github/critical_libraries_status/AztecProtocol/aztec-packages/noir-projects/noir-protocol-circuits/crates/rollup-lib.failures.jsonl b/.github/critical_libraries_status/AztecProtocol/aztec-packages/noir-projects/noir-protocol-circuits/crates/rollup-lib.failures.jsonl new file mode 100644 index 00000000000..e69de29bb2d diff --git a/.github/critical_libraries_status/AztecProtocol/aztec-packages/noir-projects/noir-protocol-circuits/crates/types.failures.jsonl b/.github/critical_libraries_status/AztecProtocol/aztec-packages/noir-projects/noir-protocol-circuits/crates/types.failures.jsonl new file mode 100644 index 00000000000..e69de29bb2d diff --git a/.github/critical_libraries_status/README.md b/.github/critical_libraries_status/README.md new file mode 100644 index 00000000000..47e9d7ad6ed --- /dev/null +++ b/.github/critical_libraries_status/README.md @@ -0,0 +1,20 @@ +# Critical Libraries Status + +This directory contains one `.failures.jsonl` file per external directory that is checked by CI. +CI will run the external repository tests and compare the test failures against those recorded +in these files. If there's a difference, CI will fail. + +This allows us to mark some tests as expected to fail if we introduce breaking changes. +When tests are fixed on the external repository, CI will let us know that we need to remove +the `.failures.jsonl` failures on our side. + +The format of the `.failures.jsonl` files is one JSON per line with a failure: + +```json +{"suite":"one","name":"foo"} +``` + +If it's expected that an external repository doesn't compile (because a PR introduces breaking changes +to, say, the type system) you can remove the `.failures.jsonl` file for that repository and CI +will pass again. Once the repository compiles again, CI will let us know and require us to put +back the `.failures.jsonl` file. \ No newline at end of file diff --git a/.github/critical_libraries_status/noir-lang/ec/.actual.jsonl b/.github/critical_libraries_status/noir-lang/ec/.actual.jsonl new file mode 100644 index 00000000000..cb56f792778 --- /dev/null +++ b/.github/critical_libraries_status/noir-lang/ec/.actual.jsonl @@ -0,0 +1,4 @@ +{"event":"started","name":"one","test_count":1,"type":"suite"} +{"event":"started","name":"foo","suite":"one","type":"test"} +{"event":"failed","exec_time":0.05356625,"name":"foo","suite":"one","type":"test"} +{"event":"ok","failed":0,"ignored":0,"passed":1,"type":"suite"} \ No newline at end of file diff --git a/.github/critical_libraries_status/noir-lang/ec/.actual.jsonl.jq b/.github/critical_libraries_status/noir-lang/ec/.actual.jsonl.jq new file mode 100644 index 00000000000..1123e7e68e4 --- /dev/null +++ b/.github/critical_libraries_status/noir-lang/ec/.actual.jsonl.jq @@ -0,0 +1 @@ +{"suite":"one","name":"foo"} diff --git a/.github/critical_libraries_status/noir-lang/ec/.failures.jsonl b/.github/critical_libraries_status/noir-lang/ec/.failures.jsonl new file mode 100644 index 00000000000..e69de29bb2d diff --git a/.github/critical_libraries_status/noir-lang/ec/.failures.jsonl.jq b/.github/critical_libraries_status/noir-lang/ec/.failures.jsonl.jq new file mode 100644 index 00000000000..e69de29bb2d diff --git a/.github/critical_libraries_status/noir-lang/eddsa/.failures.jsonl b/.github/critical_libraries_status/noir-lang/eddsa/.failures.jsonl new file mode 100644 index 00000000000..e69de29bb2d diff --git a/.github/critical_libraries_status/noir-lang/mimc/.failures.jsonl b/.github/critical_libraries_status/noir-lang/mimc/.failures.jsonl new file mode 100644 index 00000000000..e69de29bb2d diff --git a/.github/critical_libraries_status/noir-lang/noir-bignum/.failures.jsonl b/.github/critical_libraries_status/noir-lang/noir-bignum/.failures.jsonl new file mode 100644 index 00000000000..e69de29bb2d diff --git a/.github/critical_libraries_status/noir-lang/noir-edwards/.failures.jsonl b/.github/critical_libraries_status/noir-lang/noir-edwards/.failures.jsonl new file mode 100644 index 00000000000..e69de29bb2d diff --git a/.github/critical_libraries_status/noir-lang/noir_base64/.failures.jsonl b/.github/critical_libraries_status/noir-lang/noir_base64/.failures.jsonl new file mode 100644 index 00000000000..e69de29bb2d diff --git a/.github/critical_libraries_status/noir-lang/noir_bigcurve/.failures.jsonl b/.github/critical_libraries_status/noir-lang/noir_bigcurve/.failures.jsonl new file mode 100644 index 00000000000..e69de29bb2d diff --git a/.github/critical_libraries_status/noir-lang/noir_json_parser/.failures.jsonl b/.github/critical_libraries_status/noir-lang/noir_json_parser/.failures.jsonl new file mode 100644 index 00000000000..e69de29bb2d diff --git a/.github/critical_libraries_status/noir-lang/noir_rsa/.failures.jsonl b/.github/critical_libraries_status/noir-lang/noir_rsa/.failures.jsonl new file mode 100644 index 00000000000..e69de29bb2d diff --git a/.github/critical_libraries_status/noir-lang/noir_sort/.failures.jsonl b/.github/critical_libraries_status/noir-lang/noir_sort/.failures.jsonl new file mode 100644 index 00000000000..e69de29bb2d diff --git a/.github/critical_libraries_status/noir-lang/noir_string_search/.failures.jsonl b/.github/critical_libraries_status/noir-lang/noir_string_search/.failures.jsonl new file mode 100644 index 00000000000..e69de29bb2d diff --git a/.github/critical_libraries_status/noir-lang/schnorr/.failures.jsonl b/.github/critical_libraries_status/noir-lang/schnorr/.failures.jsonl new file mode 100644 index 00000000000..e69de29bb2d diff --git a/.github/critical_libraries_status/noir-lang/sparse_array/.failures.jsonl b/.github/critical_libraries_status/noir-lang/sparse_array/.failures.jsonl new file mode 100644 index 00000000000..e69de29bb2d diff --git a/.github/scripts/check_test_results.sh b/.github/scripts/check_test_results.sh new file mode 100755 index 00000000000..c844c025876 --- /dev/null +++ b/.github/scripts/check_test_results.sh @@ -0,0 +1,38 @@ +#!/bin/bash +set -eu + +# Usage: ./check_test_results.sh +# Compares the output of two test results of the same repository. +# If any of the files doesn't exist or is empty, the script will consider that the test suite +# couldn't be compiled. + +function process_json_lines() { + cat $1 | jq -c 'select(.type == "test" and .event == "failed") | {suite: .suite, name: .name}' | jq -s -c 'sort_by(.suite, .name) | .[]' > $1.jq +} + +if [ -f $1 ] && [ -f $2 ]; then + # Both files exist, let's compare them + $(process_json_lines $2) + if ! diff $1 $2.jq; then + echo "Error: test failures don't match expected failures" + echo "Lines prefixed with '>' are new test failures (you could add them to '$1')" + echo "Lines prefixed with '<' are tests that were expected to fail but passed (you could remove them from '$1')" + fi +elif [ -f $1 ]; then + # Only the expected file exists, which means the actual test couldn't be compiled. + echo "Error: external library tests couldn't be compiled." + echo "You could rename '$1' to '$1.does_not_compile' if it's expected that the external library can't be compiled." + exit -1 +elif [ -f $2 ]; then + # Only the actual file exists, which means we are expecting the external library + # not to compile but it did. + echo "Error: expected external library not to compile, but it did." + echo "You could create '$1' with these contents:" + $(process_json_lines $2) + cat $2.jq + exit -1 +else + # Both files don't exists, which means we are expecting the external library not + # to compile, and it didn't, so all is good. + exit 0 +fi \ No newline at end of file diff --git a/.github/workflows/reports.yml b/.github/workflows/reports.yml index 0a03add8338..37864dd9b68 100644 --- a/.github/workflows/reports.yml +++ b/.github/workflows/reports.yml @@ -225,8 +225,8 @@ jobs: retention-days: 3 overwrite: true - generate_compilation_report: - name: Compilation time + generate_compilation_and_execution_report: + name: Compilation and execution time needs: [build-nargo] runs-on: ubuntu-22.04 permissions: @@ -253,9 +253,14 @@ jobs: working-directory: ./test_programs run: | ./compilation_report.sh - cat compilation_report.json mv compilation_report.json ../compilation_report.json + - name: Generate Execution report + working-directory: ./test_programs + run: | + ./execution_report.sh + mv execution_report.json ../execution_report.json + - name: Upload compilation report uses: actions/upload-artifact@v4 with: @@ -263,8 +268,16 @@ jobs: path: compilation_report.json retention-days: 3 overwrite: true + + - name: Upload execution report + uses: actions/upload-artifact@v4 + with: + name: in_progress_execution_report + path: execution_report.json + retention-days: 3 + overwrite: true - external_repo_compilation_report: + external_repo_compilation_and_execution_report: needs: [build-nargo] runs-on: ubuntu-latest timeout-minutes: 15 @@ -272,7 +285,7 @@ jobs: fail-fast: false matrix: include: - - project: { repo: AztecProtocol/aztec-packages, path: noir-projects/noir-contracts } + - project: { repo: AztecProtocol/aztec-packages, path: noir-projects/noir-contracts, is_library: true } - project: { repo: AztecProtocol/aztec-packages, path: noir-projects/noir-protocol-circuits/crates/parity-root } - project: { repo: AztecProtocol/aztec-packages, path: noir-projects/noir-protocol-circuits/crates/private-kernel-inner } - project: { repo: AztecProtocol/aztec-packages, path: noir-projects/noir-protocol-circuits/crates/private-kernel-tail } @@ -280,7 +293,7 @@ jobs: - project: { repo: AztecProtocol/aztec-packages, path: noir-projects/noir-protocol-circuits/crates/rollup-base-private } - project: { repo: AztecProtocol/aztec-packages, path: noir-projects/noir-protocol-circuits/crates/rollup-base-public } - name: External repo compilation report - ${{ matrix.project.repo }}/${{ matrix.project.path }} + name: External repo compilation and execution reports - ${{ matrix.project.repo }}/${{ matrix.project.path }} steps: - name: Download nargo binary uses: actions/download-artifact@v4 @@ -302,6 +315,13 @@ jobs: sparse-checkout: | test_programs/compilation_report.sh sparse-checkout-cone-mode: false + + - uses: actions/checkout@v4 + with: + path: scripts + sparse-checkout: | + test_programs/execution_report.sh + sparse-checkout-cone-mode: false - name: Checkout uses: actions/checkout@v4 @@ -316,9 +336,16 @@ jobs: mv /home/runner/work/noir/noir/scripts/test_programs/compilation_report.sh ./compilation_report.sh chmod +x ./compilation_report.sh ./compilation_report.sh 1 + + - name: Generate execution report + working-directory: ./test-repo/${{ matrix.project.path }} + if: ${{ !matrix.project.is_library }} + run: | + mv /home/runner/work/noir/noir/scripts/test_programs/execution_report.sh ./execution_report.sh + ./execution_report.sh 1 - name: Move compilation report - id: report + id: compilation_report shell: bash run: | PACKAGE_NAME=${{ matrix.project.path }} @@ -326,18 +353,36 @@ jobs: mv ./test-repo/${{ matrix.project.path }}/compilation_report.json ./compilation_report_$PACKAGE_NAME.json echo "compilation_report_name=$PACKAGE_NAME" >> $GITHUB_OUTPUT + - name: Move execution report + id: execution_report + shell: bash + if: ${{ !matrix.project.is_library }} + run: | + PACKAGE_NAME=${{ matrix.project.path }} + PACKAGE_NAME=$(basename $PACKAGE_NAME) + mv ./test-repo/${{ matrix.project.path }}/execution_report.json ./execution_report_$PACKAGE_NAME.json + echo "execution_report_name=$PACKAGE_NAME" >> $GITHUB_OUTPUT + - name: Upload compilation report uses: actions/upload-artifact@v4 with: - name: compilation_report_${{ steps.report.outputs.compilation_report_name }} - path: compilation_report_${{ steps.report.outputs.compilation_report_name }}.json + name: compilation_report_${{ steps.compilation_report.outputs.compilation_report_name }} + path: compilation_report_${{ steps.compilation_report.outputs.compilation_report_name }}.json + retention-days: 3 + overwrite: true + + - name: Upload execution report + uses: actions/upload-artifact@v4 + with: + name: execution_report_${{ steps.execution_report.outputs.execution_report_name }} + path: execution_report_${{ steps.execution_report.outputs.execution_report_name }}.json retention-days: 3 overwrite: true upload_compilation_report: name: Upload compilation report - needs: [generate_compilation_report, external_repo_compilation_report] - # We want this job to run even if one variation of the matrix in `external_repo_compilation_report` fails + needs: [generate_compilation_and_execution_report, external_repo_compilation_and_execution_report] + # We want this job to run even if one variation of the matrix in `external_repo_compilation_and_execution_report` fails if: always() runs-on: ubuntu-latest permissions: @@ -490,3 +535,48 @@ jobs: with: header: memory message: ${{ steps.memory_report.outputs.markdown }} + + upload_execution_report: + name: Upload execution report + needs: [generate_compilation_and_execution_report, external_repo_compilation_and_execution_report] + # We want this job to run even if one variation of the matrix in `external_repo_compilation_and_execution_report` fails + if: always() + runs-on: ubuntu-latest + permissions: + pull-requests: write + + steps: + - uses: actions/checkout@v4 + + - name: Download initial execution report + uses: actions/download-artifact@v4 + with: + name: in_progress_execution_report + + - name: Download matrix execution reports + uses: actions/download-artifact@v4 + with: + pattern: execution_report_* + path: ./reports + + - name: Merge execution reports using jq + run: | + mv ./.github/scripts/merge-bench-reports.sh merge-bench-reports.sh + ./merge-bench-reports.sh execution_report + + - name: Parse execution report + id: execution_report + uses: noir-lang/noir-bench-report@0954121203ee55dcda5c7397b9c669c439a20922 + with: + report: execution_report.json + header: | + # Execution Report + execution_report: true + + - name: Add memory report to sticky comment + if: github.event_name == 'pull_request' || github.event_name == 'pull_request_target' + uses: marocchino/sticky-pull-request-comment@v2 + with: + header: execution_time + message: ${{ steps.execution_report.outputs.markdown }} + diff --git a/.github/workflows/test-js-packages.yml b/.github/workflows/test-js-packages.yml index d227b4eb00b..e593389a971 100644 --- a/.github/workflows/test-js-packages.yml +++ b/.github/workflows/test-js-packages.yml @@ -543,8 +543,6 @@ jobs: external-repo-checks: needs: [build-nargo, critical-library-list] runs-on: ubuntu-22.04 - # Only run when 'run-external-checks' label is present - if: contains(github.event.pull_request.labels.*.name, 'run-external-checks') timeout-minutes: 30 strategy: fail-fast: false @@ -557,11 +555,17 @@ jobs: - project: { repo: AztecProtocol/aztec-packages, path: noir-projects/noir-protocol-circuits/crates/parity-lib } - project: { repo: AztecProtocol/aztec-packages, path: noir-projects/noir-protocol-circuits/crates/private-kernel-lib } - project: { repo: AztecProtocol/aztec-packages, path: noir-projects/noir-protocol-circuits/crates/reset-kernel-lib } - - project: { repo: AztecProtocol/aztec-packages, path: noir-projects/noir-protocol-circuits/crates/rollup-lib } - project: { repo: AztecProtocol/aztec-packages, path: noir-projects/noir-protocol-circuits/crates/types } + # Use 1 test threads for rollup-lib because each test requires a lot of memory, and multiple ones in parallel exceed the maximum memory limit. + - project: { repo: AztecProtocol/aztec-packages, path: noir-projects/noir-protocol-circuits/crates/rollup-lib, nargo_args: "--test-threads 1" } name: Check external repo - ${{ matrix.project.repo }}/${{ matrix.project.path }} steps: + - name: Checkout + uses: actions/checkout@v4 + with: + path: noir-repo + - name: Checkout uses: actions/checkout@v4 with: @@ -592,9 +596,14 @@ jobs: - name: Run nargo test working-directory: ./test-repo/${{ matrix.project.path }} - run: nargo test -q --silence-warnings + run: | + out=$(nargo test --silence-warnings --skip-brillig-constraints-check --format json ${{ matrix.project.nargo_args }}) && echo "$out" > ${{ github.workspace }}/noir-repo/.github/critical_libraries_status/${{ matrix.project.repo }}/${{ matrix.project.path }}.actual.jsonl env: NARGO_IGNORE_TEST_FAILURES_FROM_FOREIGN_CALLS: true + + - name: Compare test results + working-directory: ./noir-repo + run: .github/scripts/check_test_results.sh .github/critical_libraries_status/${{ matrix.project.repo }}/${{ matrix.project.path }}.failures.jsonl .github/critical_libraries_status/${{ matrix.project.repo }}/${{ matrix.project.path }}.actual.jsonl # This is a job which depends on all test jobs and reports the overall status. # This allows us to add/remove test jobs without having to update the required workflows. diff --git a/.gitignore b/.gitignore index 8442d688fbf..9bfb8a89331 100644 --- a/.gitignore +++ b/.gitignore @@ -36,6 +36,7 @@ gates_report.json gates_report_brillig.json gates_report_brillig_execution.json compilation_report.json +execution_report.json # Github Actions scratch space # This gives a location to download artifacts into the repository in CI without making git dirty. diff --git a/compiler/noirc_driver/src/lib.rs b/compiler/noirc_driver/src/lib.rs index 9318e4d2b5c..1b311504b5c 100644 --- a/compiler/noirc_driver/src/lib.rs +++ b/compiler/noirc_driver/src/lib.rs @@ -150,6 +150,10 @@ pub struct CompileOptions { /// A lower value keeps the original program if it was smaller, even if it has more jumps. #[arg(long, hide = true, allow_hyphen_values = true)] pub max_bytecode_increase_percent: Option, + + /// Used internally to test for non-determinism in the compiler. + #[clap(long, hide = true)] + pub check_non_determinism: bool, } pub fn parse_expression_width(input: &str) -> Result { diff --git a/compiler/noirc_evaluator/src/acir/mod.rs b/compiler/noirc_evaluator/src/acir/mod.rs index e7b011b6d7b..43eb7df303f 100644 --- a/compiler/noirc_evaluator/src/acir/mod.rs +++ b/compiler/noirc_evaluator/src/acir/mod.rs @@ -2905,7 +2905,6 @@ mod test { ssa::{ function_builder::FunctionBuilder, ir::{ - call_stack::CallStack, function::FunctionId, instruction::BinaryOp, map::Id, @@ -2932,8 +2931,7 @@ mod test { builder.new_function("foo".into(), foo_id, inline_type); } // Set a call stack for testing whether `brillig_locations` in the `GeneratedAcir` was accurately set. - let mut stack = CallStack::unit(Location::dummy()); - stack.push_back(Location::dummy()); + let stack = vec![Location::dummy(), Location::dummy()]; let call_stack = builder.current_function.dfg.call_stack_data.get_or_insert_locations(stack); builder.set_call_stack(call_stack); diff --git a/compiler/noirc_evaluator/src/brillig/brillig_ir/artifact.rs b/compiler/noirc_evaluator/src/brillig/brillig_ir/artifact.rs index be4a6b84bc1..3654a95a03f 100644 --- a/compiler/noirc_evaluator/src/brillig/brillig_ir/artifact.rs +++ b/compiler/noirc_evaluator/src/brillig/brillig_ir/artifact.rs @@ -27,7 +27,7 @@ pub(crate) struct GeneratedBrillig { pub(crate) locations: BTreeMap, pub(crate) error_types: BTreeMap, pub(crate) name: String, - pub(crate) procedure_locations: HashMap, + pub(crate) procedure_locations: BTreeMap, } #[derive(Default, Debug, Clone)] @@ -61,7 +61,7 @@ pub(crate) struct BrilligArtifact { /// This is created as artifacts are linked together and allows us to determine /// which opcodes originate from reusable procedures.s /// The range is inclusive for both start and end opcode locations. - pub(crate) procedure_locations: HashMap, + pub(crate) procedure_locations: BTreeMap, } /// A pointer to a location in the opcode. diff --git a/compiler/noirc_evaluator/src/errors.rs b/compiler/noirc_evaluator/src/errors.rs index ee662c50b75..1e484f8af59 100644 --- a/compiler/noirc_evaluator/src/errors.rs +++ b/compiler/noirc_evaluator/src/errors.rs @@ -201,7 +201,7 @@ impl RuntimeError { RuntimeError::UnknownLoopBound { .. } => { let primary_message = self.to_string(); let location = - self.call_stack().back().expect("Expected RuntimeError to have a location"); + self.call_stack().last().expect("Expected RuntimeError to have a location"); Diagnostic::simple_error( primary_message, @@ -212,7 +212,7 @@ impl RuntimeError { _ => { let message = self.to_string(); let location = - self.call_stack().back().unwrap_or_else(|| panic!("Expected RuntimeError to have a location. Error message: {message}")); + self.call_stack().last().unwrap_or_else(|| panic!("Expected RuntimeError to have a location. Error message: {message}")); Diagnostic::simple_error(message, String::new(), location.span) } diff --git a/compiler/noirc_evaluator/src/ssa/ir/call_stack.rs b/compiler/noirc_evaluator/src/ssa/ir/call_stack.rs index 113609f4a11..e1df616bc66 100644 --- a/compiler/noirc_evaluator/src/ssa/ir/call_stack.rs +++ b/compiler/noirc_evaluator/src/ssa/ir/call_stack.rs @@ -3,7 +3,7 @@ use serde::{Deserialize, Serialize}; use noirc_errors::Location; -pub(crate) type CallStack = im::Vector; +pub(crate) type CallStack = Vec; #[derive(Debug, Default, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)] pub(crate) struct CallStackId(u32); @@ -57,9 +57,9 @@ impl Default for CallStackHelper { impl CallStackHelper { /// Construct a CallStack from a CallStackId pub(crate) fn get_call_stack(&self, mut call_stack: CallStackId) -> CallStack { - let mut result = im::Vector::new(); + let mut result = Vec::new(); while let Some(parent) = self.locations[call_stack.index()].parent { - result.push_back(self.locations[call_stack.index()].value); + result.push(self.locations[call_stack.index()].value); call_stack = parent; } result diff --git a/compiler/noirc_evaluator/src/ssa/opt/resolve_is_unconstrained.rs b/compiler/noirc_evaluator/src/ssa/opt/resolve_is_unconstrained.rs index 87e680932c6..eff6576b87f 100644 --- a/compiler/noirc_evaluator/src/ssa/opt/resolve_is_unconstrained.rs +++ b/compiler/noirc_evaluator/src/ssa/opt/resolve_is_unconstrained.rs @@ -39,6 +39,8 @@ impl Function { } } + let is_unconstrained = matches!(self.runtime(), RuntimeType::Brillig(_)).into(); + let is_within_unconstrained = self.dfg.make_constant(is_unconstrained, NumericType::bool()); for instruction_id in is_unconstrained_calls { let call_returns = self.dfg.instruction_results(instruction_id); let original_return_id = call_returns[0]; @@ -46,9 +48,6 @@ impl Function { // We replace the result with a fresh id. This will be unused, so the DIE pass will remove the leftover intrinsic call. self.dfg.replace_result(instruction_id, original_return_id); - let is_unconstrained = matches!(self.runtime(), RuntimeType::Brillig(_)).into(); - let is_within_unconstrained = - self.dfg.make_constant(is_unconstrained, NumericType::bool()); // Replace all uses of the original return value with the constant self.dfg.set_value_from_id(original_return_id, is_within_unconstrained); } diff --git a/compiler/noirc_evaluator/src/ssa/opt/unrolling.rs b/compiler/noirc_evaluator/src/ssa/opt/unrolling.rs index 2a272236195..ab4256197b9 100644 --- a/compiler/noirc_evaluator/src/ssa/opt/unrolling.rs +++ b/compiler/noirc_evaluator/src/ssa/opt/unrolling.rs @@ -18,6 +18,8 @@ //! //! When unrolling ACIR code, we remove reference count instructions because they are //! only used by Brillig bytecode. +use std::collections::BTreeSet; + use acvm::{acir::AcirField, FieldElement}; use im::HashSet; @@ -117,7 +119,7 @@ pub(super) struct Loop { back_edge_start: BasicBlockId, /// All the blocks contained within the loop, including `header` and `back_edge_start`. - pub(super) blocks: HashSet, + pub(super) blocks: BTreeSet, } pub(super) struct Loops { @@ -238,7 +240,7 @@ impl Loop { back_edge_start: BasicBlockId, cfg: &ControlFlowGraph, ) -> Self { - let mut blocks = HashSet::default(); + let mut blocks = BTreeSet::default(); blocks.insert(header); let mut insert = |block, stack: &mut Vec| { diff --git a/compiler/noirc_frontend/src/ast/statement.rs b/compiler/noirc_frontend/src/ast/statement.rs index 0ce21f9f798..bd65fc33377 100644 --- a/compiler/noirc_frontend/src/ast/statement.rs +++ b/compiler/noirc_frontend/src/ast/statement.rs @@ -1,5 +1,4 @@ use std::fmt::Display; -use std::sync::atomic::{AtomicU32, Ordering}; use acvm::acir::AcirField; use acvm::FieldElement; @@ -841,10 +840,9 @@ impl ForRange { block: Expression, for_loop_span: Span, ) -> Statement { - /// Counter used to generate unique names when desugaring - /// code in the parser requires the creation of fresh variables. - /// The parser is stateless so this is a static global instead. - static UNIQUE_NAME_COUNTER: AtomicU32 = AtomicU32::new(0); + // Counter used to generate unique names when desugaring + // code in the parser requires the creation of fresh variables. + let mut unique_name_counter: u32 = 0; match self { ForRange::Range(..) => { @@ -855,7 +853,8 @@ impl ForRange { let start_range = ExpressionKind::integer(FieldElement::zero()); let start_range = Expression::new(start_range, array_span); - let next_unique_id = UNIQUE_NAME_COUNTER.fetch_add(1, Ordering::Relaxed); + let next_unique_id = unique_name_counter; + unique_name_counter += 1; let array_name = format!("$i{next_unique_id}"); let array_span = array.span; let array_ident = Ident::new(array_name, array_span); @@ -888,7 +887,7 @@ impl ForRange { })); let end_range = Expression::new(end_range, array_span); - let next_unique_id = UNIQUE_NAME_COUNTER.fetch_add(1, Ordering::Relaxed); + let next_unique_id = unique_name_counter; let index_name = format!("$i{next_unique_id}"); let fresh_identifier = Ident::new(index_name.clone(), array_span); diff --git a/compiler/noirc_frontend/src/elaborator/mod.rs b/compiler/noirc_frontend/src/elaborator/mod.rs index 4a5947f15e3..55144f8944a 100644 --- a/compiler/noirc_frontend/src/elaborator/mod.rs +++ b/compiler/noirc_frontend/src/elaborator/mod.rs @@ -4,8 +4,8 @@ use std::{ }; use crate::{ - ast::ItemVisibility, hir_def::traits::ResolvedTraitBound, usage_tracker::UsageTracker, - StructField, StructType, TypeBindings, + ast::ItemVisibility, hir_def::traits::ResolvedTraitBound, node_interner::GlobalValue, + usage_tracker::UsageTracker, StructField, StructType, TypeBindings, }; use crate::{ ast::{ @@ -1689,7 +1689,7 @@ impl<'context> Elaborator<'context> { self.debug_comptime(location, |interner| value.display(interner).to_string()); - self.interner.get_global_mut(global_id).value = Some(value); + self.interner.get_global_mut(global_id).value = GlobalValue::Resolved(value); } } diff --git a/compiler/noirc_frontend/src/elaborator/path_resolution.rs b/compiler/noirc_frontend/src/elaborator/path_resolution.rs index a68991becb7..51673342fef 100644 --- a/compiler/noirc_frontend/src/elaborator/path_resolution.rs +++ b/compiler/noirc_frontend/src/elaborator/path_resolution.rs @@ -9,7 +9,7 @@ use crate::hir::resolution::visibility::item_in_module_is_visible; use crate::locations::ReferencesTracker; use crate::node_interner::{FuncId, GlobalId, StructId, TraitId, TypeAliasId}; -use crate::Type; +use crate::{Shared, Type, TypeAlias}; use super::types::SELF_TYPE_NAME; use super::Elaborator; @@ -218,18 +218,8 @@ impl<'context> Elaborator<'context> { ), ModuleDefId::TypeAliasId(id) => { let type_alias = self.interner.get_type_alias(id); - let type_alias = type_alias.borrow(); - - let module_id = match &type_alias.typ { - Type::Struct(struct_id, _generics) => struct_id.borrow().id.module_id(), - Type::Error => { - return Err(PathResolutionError::Unresolved(last_ident.clone())); - } - _ => { - // For now we only allow type aliases that point to structs. - // The more general case is captured here: https://github.com/noir-lang/noir/issues/6398 - panic!("Type alias in path not pointing to struct not yet supported") - } + let Some(module_id) = get_type_alias_module_def_id(&type_alias) else { + return Err(PathResolutionError::Unresolved(last_ident.clone())); }; ( @@ -345,3 +335,18 @@ fn merge_intermediate_path_resolution_item_with_module_def_id( }, } } + +fn get_type_alias_module_def_id(type_alias: &Shared) -> Option { + let type_alias = type_alias.borrow(); + + match &type_alias.typ { + Type::Struct(struct_id, _generics) => Some(struct_id.borrow().id.module_id()), + Type::Alias(type_alias, _generics) => get_type_alias_module_def_id(type_alias), + Type::Error => None, + _ => { + // For now we only allow type aliases that point to structs. + // The more general case is captured here: https://github.com/noir-lang/noir/issues/6398 + panic!("Type alias in path not pointing to struct not yet supported") + } + } +} diff --git a/compiler/noirc_frontend/src/elaborator/patterns.rs b/compiler/noirc_frontend/src/elaborator/patterns.rs index 0406321de42..133473219f6 100644 --- a/compiler/noirc_frontend/src/elaborator/patterns.rs +++ b/compiler/noirc_frontend/src/elaborator/patterns.rs @@ -604,17 +604,11 @@ impl<'context> Elaborator<'context> { alias_generics }; - // Now instantiate the underlying struct with those generics, the struct might + // Now instantiate the underlying struct or alias with those generics, the struct might // have more generics than those in the alias, like in this example: // // type Alias = Struct; - let typ = type_alias.get_type(&generics); - let Type::Struct(_, generics) = typ else { - // See https://github.com/noir-lang/noir/issues/6398 - panic!("Expected type alias to point to struct") - }; - - generics + get_type_alias_generics(&type_alias, &generics) } PathResolutionItem::TraitFunction(trait_id, Some(generics), _func_id) => { let trait_ = self.interner.get_trait(trait_id); @@ -880,3 +874,14 @@ impl<'context> Elaborator<'context> { (id, typ) } } + +fn get_type_alias_generics(type_alias: &TypeAlias, generics: &[Type]) -> Vec { + let typ = type_alias.get_type(generics); + match typ { + Type::Struct(_, generics) => generics, + Type::Alias(type_alias, generics) => { + get_type_alias_generics(&type_alias.borrow(), &generics) + } + _ => panic!("Expected type alias to point to struct or alias"), + } +} diff --git a/compiler/noirc_frontend/src/elaborator/types.rs b/compiler/noirc_frontend/src/elaborator/types.rs index 3551edf93f7..9aafd690bb6 100644 --- a/compiler/noirc_frontend/src/elaborator/types.rs +++ b/compiler/noirc_frontend/src/elaborator/types.rs @@ -28,8 +28,8 @@ use crate::{ traits::{NamedType, ResolvedTraitBound, Trait, TraitConstraint}, }, node_interner::{ - DependencyId, ExprId, ImplSearchErrorKind, NodeInterner, TraitId, TraitImplKind, - TraitMethodId, + DependencyId, ExprId, GlobalValue, ImplSearchErrorKind, NodeInterner, TraitId, + TraitImplKind, TraitMethodId, }, token::SecondaryAttribute, Generics, Kind, ResolvedGeneric, Type, TypeBinding, TypeBindings, UnificationError, @@ -426,7 +426,8 @@ impl<'context> Elaborator<'context> { let rhs = stmt.expression; let span = self.interner.expr_span(&rhs); - let Some(global_value) = &self.interner.get_global(id).value else { + let GlobalValue::Resolved(global_value) = &self.interner.get_global(id).value + else { self.push_err(ResolverError::UnevaluatedGlobalType { span }); return None; }; diff --git a/compiler/noirc_frontend/src/hir/comptime/errors.rs b/compiler/noirc_frontend/src/hir/comptime/errors.rs index 3df20b39209..6ff918328a1 100644 --- a/compiler/noirc_frontend/src/hir/comptime/errors.rs +++ b/compiler/noirc_frontend/src/hir/comptime/errors.rs @@ -243,6 +243,9 @@ pub enum InterpreterError { CannotInterpretFormatStringWithErrors { location: Location, }, + GlobalsDependencyCycle { + location: Location, + }, // These cases are not errors, they are just used to prevent us from running more code // until the loop can be resumed properly. These cases will never be displayed to users. @@ -319,7 +322,8 @@ impl InterpreterError { | InterpreterError::CannotResolveExpression { location, .. } | InterpreterError::CannotSetFunctionBody { location, .. } | InterpreterError::UnknownArrayLength { location, .. } - | InterpreterError::CannotInterpretFormatStringWithErrors { location } => *location, + | InterpreterError::CannotInterpretFormatStringWithErrors { location } + | InterpreterError::GlobalsDependencyCycle { location } => *location, InterpreterError::FailedToParseMacro { error, file, .. } => { Location::new(error.span(), *file) @@ -674,6 +678,11 @@ impl<'a> From<&'a InterpreterError> for CustomDiagnostic { "Some of the variables to interpolate could not be evaluated".to_string(); CustomDiagnostic::simple_error(msg, secondary, location.span) } + InterpreterError::GlobalsDependencyCycle { location } => { + let msg = "This global recursively depends on itself".to_string(); + let secondary = String::new(); + CustomDiagnostic::simple_error(msg, secondary, location.span) + } } } } diff --git a/compiler/noirc_frontend/src/hir/comptime/interpreter.rs b/compiler/noirc_frontend/src/hir/comptime/interpreter.rs index d717ec84d1f..e9e37243e07 100644 --- a/compiler/noirc_frontend/src/hir/comptime/interpreter.rs +++ b/compiler/noirc_frontend/src/hir/comptime/interpreter.rs @@ -20,6 +20,7 @@ use crate::monomorphization::{ perform_impl_bindings, perform_instantiation_bindings, resolve_trait_method, undo_instantiation_bindings, }; +use crate::node_interner::GlobalValue; use crate::token::{FmtStrFragment, Tokens}; use crate::TypeVariable; use crate::{ @@ -568,26 +569,39 @@ impl<'local, 'interner> Interpreter<'local, 'interner> { DefinitionKind::Local(_) => self.lookup(&ident), DefinitionKind::Global(global_id) => { // Avoid resetting the value if it is already known - if let Some(value) = &self.elaborator.interner.get_global(*global_id).value { - Ok(value.clone()) - } else { - let global_id = *global_id; - let crate_of_global = self.elaborator.interner.get_global(global_id).crate_id; - let let_ = - self.elaborator.interner.get_global_let_statement(global_id).ok_or_else( - || { + let global_id = *global_id; + let global_info = self.elaborator.interner.get_global(global_id); + let global_crate_id = global_info.crate_id; + match &global_info.value { + GlobalValue::Resolved(value) => Ok(value.clone()), + GlobalValue::Resolving => { + // Note that the error we issue here isn't very informative (it doesn't include the actual cycle) + // but the general dependency cycle detector will give a better error later on during compilation. + let location = self.elaborator.interner.expr_location(&id); + Err(InterpreterError::GlobalsDependencyCycle { location }) + } + GlobalValue::Unresolved => { + let let_ = self + .elaborator + .interner + .get_global_let_statement(global_id) + .ok_or_else(|| { let location = self.elaborator.interner.expr_location(&id); InterpreterError::VariableNotInScope { location } - }, - )?; + })?; - if let_.runs_comptime() || crate_of_global != self.crate_id { - self.evaluate_let(let_.clone())?; - } + self.elaborator.interner.get_global_mut(global_id).value = + GlobalValue::Resolving; + + if let_.runs_comptime() || global_crate_id != self.crate_id { + self.evaluate_let(let_.clone())?; + } - let value = self.lookup(&ident)?; - self.elaborator.interner.get_global_mut(global_id).value = Some(value.clone()); - Ok(value) + let value = self.lookup(&ident)?; + self.elaborator.interner.get_global_mut(global_id).value = + GlobalValue::Resolved(value.clone()); + Ok(value) + } } } DefinitionKind::NumericGeneric(type_variable, numeric_typ) => { @@ -1380,13 +1394,19 @@ impl<'local, 'interner> Interpreter<'local, 'interner> { fn evaluate_cast(&mut self, cast: &HirCastExpression, id: ExprId) -> IResult { let evaluated_lhs = self.evaluate(cast.lhs)?; - Self::evaluate_cast_one_step(cast, id, evaluated_lhs, self.elaborator.interner) + let location = self.elaborator.interner.expr_location(&id); + Self::evaluate_cast_one_step( + &cast.r#type, + location, + evaluated_lhs, + self.elaborator.interner, + ) } /// evaluate_cast without recursion pub fn evaluate_cast_one_step( - cast: &HirCastExpression, - id: ExprId, + typ: &Type, + location: Location, evaluated_lhs: Value, interner: &NodeInterner, ) -> IResult { @@ -1417,7 +1437,6 @@ impl<'local, 'interner> Interpreter<'local, 'interner> { (if value { FieldElement::one() } else { FieldElement::zero() }, false) } value => { - let location = interner.expr_location(&id); let typ = value.get_type().into_owned(); return Err(InterpreterError::NonNumericCasted { typ, location }); } @@ -1434,7 +1453,7 @@ impl<'local, 'interner> Interpreter<'local, 'interner> { } // Now actually cast the lhs, bit casting and wrapping as necessary - match cast.r#type.follow_bindings() { + match typ.follow_bindings() { Type::FieldElement => { if lhs_is_negative { lhs = FieldElement::zero() - lhs; @@ -1443,8 +1462,7 @@ impl<'local, 'interner> Interpreter<'local, 'interner> { } Type::Integer(sign, bit_size) => match (sign, bit_size) { (Signedness::Unsigned, IntegerBitSize::One) => { - let location = interner.expr_location(&id); - Err(InterpreterError::TypeUnsupported { typ: cast.r#type.clone(), location }) + Err(InterpreterError::TypeUnsupported { typ: typ.clone(), location }) } (Signedness::Unsigned, IntegerBitSize::Eight) => cast_to_int!(lhs, to_u128, u8, U8), (Signedness::Unsigned, IntegerBitSize::Sixteen) => { @@ -1457,8 +1475,7 @@ impl<'local, 'interner> Interpreter<'local, 'interner> { cast_to_int!(lhs, to_u128, u64, U64) } (Signedness::Signed, IntegerBitSize::One) => { - let location = interner.expr_location(&id); - Err(InterpreterError::TypeUnsupported { typ: cast.r#type.clone(), location }) + Err(InterpreterError::TypeUnsupported { typ: typ.clone(), location }) } (Signedness::Signed, IntegerBitSize::Eight) => cast_to_int!(lhs, to_i128, i8, I8), (Signedness::Signed, IntegerBitSize::Sixteen) => { @@ -1472,10 +1489,7 @@ impl<'local, 'interner> Interpreter<'local, 'interner> { } }, Type::Bool => Ok(Value::Bool(!lhs.is_zero() || lhs_is_negative)), - typ => { - let location = interner.expr_location(&id); - Err(InterpreterError::CastToNonNumericType { typ, location }) - } + typ => Err(InterpreterError::CastToNonNumericType { typ, location }), } } diff --git a/compiler/noirc_frontend/src/hir/comptime/interpreter/builtin.rs b/compiler/noirc_frontend/src/hir/comptime/interpreter/builtin.rs index 3d8ccf78926..2e672e3d0d5 100644 --- a/compiler/noirc_frontend/src/hir/comptime/interpreter/builtin.rs +++ b/compiler/noirc_frontend/src/hir/comptime/interpreter/builtin.rs @@ -64,6 +64,7 @@ impl<'local, 'context> Interpreter<'local, 'context> { "array_len" => array_len(interner, arguments, location), "array_refcount" => Ok(Value::U32(0)), "assert_constant" => Ok(Value::Bool(true)), + "as_field" => as_field(interner, arguments, location), "as_slice" => as_slice(interner, arguments, location), "ctstring_eq" => ctstring_eq(arguments, location), "ctstring_hash" => ctstring_hash(arguments, location), @@ -116,6 +117,7 @@ impl<'local, 'context> Interpreter<'local, 'context> { "field_less_than" => field_less_than(arguments, location), "fmtstr_as_ctstring" => fmtstr_as_ctstring(interner, arguments, location), "fmtstr_quoted_contents" => fmtstr_quoted_contents(interner, arguments, location), + "from_field" => from_field(interner, arguments, return_type, location), "fresh_type_variable" => fresh_type_variable(interner), "function_def_add_attribute" => function_def_add_attribute(self, arguments, location), "function_def_body" => function_def_body(interner, arguments, location), @@ -290,6 +292,16 @@ fn array_as_str_unchecked( Ok(Value::String(Rc::new(string))) } +// fn as_field(x: T) -> Field {} +fn as_field( + interner: &NodeInterner, + arguments: Vec<(Value, Location)>, + location: Location, +) -> IResult { + let (value, value_location) = check_one_argument(arguments, location)?; + Interpreter::evaluate_cast_one_step(&Type::FieldElement, value_location, value, interner) +} + fn as_slice( interner: &NodeInterner, arguments: Vec<(Value, Location)>, @@ -2224,6 +2236,17 @@ fn fmtstr_quoted_contents( Ok(Value::Quoted(Rc::new(tokens))) } +// fn from_field(x: Field) -> T {} +fn from_field( + interner: &NodeInterner, + arguments: Vec<(Value, Location)>, + return_type: Type, + location: Location, +) -> IResult { + let (value, value_location) = check_one_argument(arguments, location)?; + Interpreter::evaluate_cast_one_step(&return_type, value_location, value, interner) +} + // fn fresh_type_variable() -> Type fn fresh_type_variable(interner: &NodeInterner) -> IResult { Ok(Value::Type(interner.next_type_variable_with_kind(Kind::Any))) diff --git a/compiler/noirc_frontend/src/monomorphization/mod.rs b/compiler/noirc_frontend/src/monomorphization/mod.rs index 5297a89cc95..8c07d71de21 100644 --- a/compiler/noirc_frontend/src/monomorphization/mod.rs +++ b/compiler/noirc_frontend/src/monomorphization/mod.rs @@ -11,7 +11,7 @@ use crate::ast::{FunctionKind, IntegerBitSize, Signedness, UnaryOp, Visibility}; use crate::hir::comptime::InterpreterError; use crate::hir::type_check::{NoMatchingImplFoundError, TypeCheckError}; -use crate::node_interner::{ExprId, ImplSearchErrorKind}; +use crate::node_interner::{ExprId, GlobalValue, ImplSearchErrorKind}; use crate::token::FmtStrFragment; use crate::{ debug::DebugInstrumenter, @@ -895,7 +895,7 @@ impl<'interner> Monomorphizer<'interner> { DefinitionKind::Global(global_id) => { let global = self.interner.get_global(*global_id); - let expr = if let Some(value) = global.value.clone() { + let expr = if let GlobalValue::Resolved(value) = global.value.clone() { value .into_hir_expression(self.interner, global.location) .map_err(MonomorphizationError::InterpreterError)? diff --git a/compiler/noirc_frontend/src/node_interner.rs b/compiler/noirc_frontend/src/node_interner.rs index ae30b68c095..bee703a4de8 100644 --- a/compiler/noirc_frontend/src/node_interner.rs +++ b/compiler/noirc_frontend/src/node_interner.rs @@ -609,7 +609,14 @@ pub struct GlobalInfo { pub crate_id: CrateId, pub location: Location, pub let_statement: StmtId, - pub value: Option, + pub value: GlobalValue, +} + +#[derive(Debug, Clone)] +pub enum GlobalValue { + Unresolved, + Resolving, + Resolved(comptime::Value), } #[derive(Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] @@ -861,7 +868,7 @@ impl NodeInterner { crate_id, let_statement, location, - value: None, + value: GlobalValue::Unresolved, }); self.global_attributes.insert(id, attributes); id diff --git a/compiler/noirc_frontend/src/tests.rs b/compiler/noirc_frontend/src/tests.rs index 1aed3d62d30..2d7cf8acca6 100644 --- a/compiler/noirc_frontend/src/tests.rs +++ b/compiler/noirc_frontend/src/tests.rs @@ -3855,3 +3855,23 @@ fn disallows_underscore_on_right_hand_side() { assert_eq!(name, "_"); } + +#[test] +fn errors_on_cyclic_globals() { + let src = r#" + pub comptime global A: u32 = B; + pub comptime global B: u32 = A; + + fn main() { } + "#; + let errors = get_program_errors(src); + + assert!(errors.iter().any(|(error, _)| matches!( + error, + CompilationError::InterpreterError(InterpreterError::GlobalsDependencyCycle { .. }) + ))); + assert!(errors.iter().any(|(error, _)| matches!( + error, + CompilationError::ResolverError(ResolverError::DependencyCycle { .. }) + ))); +} diff --git a/compiler/noirc_frontend/src/tests/aliases.rs b/compiler/noirc_frontend/src/tests/aliases.rs index 8d3433299f6..1a5621c8adb 100644 --- a/compiler/noirc_frontend/src/tests/aliases.rs +++ b/compiler/noirc_frontend/src/tests/aliases.rs @@ -47,3 +47,31 @@ fn alias_in_let_pattern() { "#; assert_no_errors(src); } + +#[test] +fn double_alias_in_path() { + let src = r#" + struct Foo {} + + impl Foo { + fn new() -> Self { + Self {} + } + } + + type FooAlias1 = Foo; + type FooAlias2 = FooAlias1; + + fn main() { + let _ = FooAlias2::new(); + } + "#; + assert_no_errors(src); +} + +#[test] +fn double_generic_alias_in_path() { + let src = r#" + "#; + assert_no_errors(src); +} diff --git a/docs/docs/how_to/how-to-solidity-verifier.md b/docs/docs/how_to/how-to-solidity-verifier.mdx similarity index 64% rename from docs/docs/how_to/how-to-solidity-verifier.md rename to docs/docs/how_to/how-to-solidity-verifier.mdx index 2cc0f8e57ce..da36b60920d 100644 --- a/docs/docs/how_to/how-to-solidity-verifier.md +++ b/docs/docs/how_to/how-to-solidity-verifier.mdx @@ -20,24 +20,37 @@ sidebar_position: 0 pagination_next: tutorials/noirjs_app --- -Noir has the ability to generate a verifier contract in Solidity, which can be deployed in many EVM-compatible blockchains such as Ethereum. +import Tabs from '@theme/Tabs'; +import TabItem from '@theme/TabItem'; + +Noir is universal. The witness and the compiled program can be fed into a proving backend such as Aztec's [Barretenberg](https://github.com/AztecProtocol/aztec-packages/tree/master/barretenberg), which can then generate a verifier contract for deployment on blockchains. This allows for a powerful feature set, as one can make use of the conciseness and the privacy provided by Noir in an immutable ledger. Applications can range from simple P2P guessing games, to complex private DeFi interactions. -This guide shows you how to generate a Solidity Verifier and deploy it on the [Remix IDE](https://remix.ethereum.org/). It is assumed that: +Although not strictly in the domain of Noir itself, this guide shows how to generate a Solidity Verifier with Barretenberg and deploy it on the [Remix IDE](https://remix.ethereum.org/). It is assumed that: +- You will be using Barretenberg as your proving backend +- You will be using an EVM blockchain to verify your proof - You are comfortable with the Solidity programming language and understand how contracts are deployed on the Ethereum network - You have Noir installed and you have a Noir program. If you don't, [get started](../getting_started/quick_start.md) with Nargo and the example Hello Noir circuit - You are comfortable navigating RemixIDE. If you aren't or you need a refresher, you can find some video tutorials [here](https://www.youtube.com/channel/UCjTUPyFEr2xDGN6Cg8nKDaA) that could help you. ## Rundown -Generating a Solidity Verifier contract is actually a one-command process. However, compiling it and deploying it can have some caveats. Here's the rundown of this guide: +Generating a Solidity Verifier with Barretenberg contract is actually a one-command process. However, compiling it and deploying it can have some caveats. Here's the rundown of this guide: 1. How to generate a solidity smart contract 2. How to compile the smart contract in the RemixIDE 3. How to deploy it to a testnet +:::info[Which proving system to use?] + +Barretenberg currently provides two provers: `UltraPlonk` and `UltraHonk`. In a nutshell, `UltraHonk` is faster and uses less RAM, but its verifier contract is much more expensive. `UltraPlonk` is optimized for on-chain verification, but proving is more expensive. + +In any case, we provide instructions for both. Choose your poison ☠️ + +::: + ## Step 1 - Generate a contract This is by far the most straightforward step. Just run: @@ -46,25 +59,31 @@ This is by far the most straightforward step. Just run: nargo compile ``` -This will compile your source code into a Noir build artifact to be stored in the `./target` directory, you can then generate the smart contract using the commands: +This will compile your source code into a Noir build artifact to be stored in the `./target` directory. From here on, it's Barretenberg's work. You can generate the smart contract using the commands: + + + + +```sh +bb write_vk_ultra_keccak_honk -b ./target/.json +bb contract_ultra_honk +``` + + + ```sh -# Here we pass the path to the newly generated Noir artifact. bb write_vk -b ./target/.json bb contract ``` -replacing `` with the name of your Noir project. A new `contract` folder would then be generated in your project directory, containing the Solidity -file `contract.sol`. It can be deployed to any EVM blockchain acting as a verifier smart contract. + + -You can find more information about `bb` and the default Noir proving backend on [this page](../getting_started/quick_start.md#proving-backend). +replacing `` with the name of your Noir project. A `Verifier.sol` contract is now in the target folder and can be deployed to any EVM blockchain acting as a verifier smart contract. -:::info - -It is possible to generate verifier contracts of Noir programs for other smart contract platforms as long as the proving backend supplies an implementation. +You can find more information about `bb` and the default Noir proving backend on [this page](../getting_started/quick_start.md#proving-backend). -Barretenberg, the default proving backend for Nargo, supports generation of verifier contracts, for the time being these are only in Solidity. -::: ## Step 2 - Compiling @@ -85,17 +104,12 @@ To compile our the verifier, we can navigate to the compilation tab: ![Compilation Tab](@site/static/img/how-tos/solidity_verifier_2.png) -Remix should automatically match a suitable compiler version. However, hitting the "Compile" button will most likely generate a "Stack too deep" error: +Remix should automatically match a suitable compiler version. However, hitting the "Compile" button will most likely tell you the contract is too big to deploy on mainnet, or complain about a stack too deep: -![Stack too deep](@site/static/img/how-tos/solidity_verifier_3.png) +![Contract code too big](@site/static/img/how-tos/solidity_verifier_6.png) +![Stack too deep](@site/static/img/how-tos/solidity_verifier_8.png) -This is due to the verify function needing to put many variables on the stack, but enabling the optimizer resolves the issue. To do this, let's open the "Advanced Configurations" tab and enable optimization. The default 200 runs will suffice. - -:::info - -This time we will see a warning about an unused function parameter. This is expected, as the `verify` function doesn't use the `_proof` parameter inside a solidity block, it is loaded from calldata and used in assembly. - -::: +To avoid this, you can just use some optimization. Open the "Advanced Configurations" tab and enable optimization. The default 200 runs will suffice. ![Compilation success](@site/static/img/how-tos/solidity_verifier_4.png) @@ -103,54 +117,81 @@ This time we will see a warning about an unused function parameter. This is expe At this point we should have a compiled contract ready to deploy. If we navigate to the deploy section in Remix, we will see many different environments we can deploy to. The steps to deploy on each environment would be out-of-scope for this guide, so we will just use the default Remix VM. -Looking closely, we will notice that our "Solidity Verifier" is actually three contracts working together: - -- An `UltraVerificationKey` library which simply stores the verification key for our circuit. -- An abstract contract `BaseUltraVerifier` containing most of the verifying logic. -- A main `UltraVerifier` contract that inherits from the Base and uses the Key contract. - -Remix will take care of the dependencies for us so we can simply deploy the UltraVerifier contract by selecting it and hitting "deploy": +Looking closely, we will notice that our "Solidity Verifier" is composed on multiple contracts working together. Remix will take care of the dependencies for us so we can simply deploy the Verifier contract by selecting it and hitting "deploy": -![Deploying UltraVerifier](@site/static/img/how-tos/solidity_verifier_5.png) + + -A contract will show up in the "Deployed Contracts" section, where we can retrieve the Verification Key Hash. This is particularly useful for double-checking that the deployer contract is the correct one. +![Deploying HonkVerifier](@site/static/img/how-tos/solidity_verifier_7.png) -:::note + + -Why "UltraVerifier"? +![Deploying PlonkVerifier](@site/static/img/how-tos/solidity_verifier_9.png) -To be precise, the Noir compiler (`nargo`) doesn't generate the verifier contract directly. It compiles the Noir code into an intermediate language (ACIR), which is then executed by the backend. So it is the backend that returns the verifier smart contract, not Noir. + + -In this case, the Barretenberg Backend uses the UltraPlonk proving system, hence the "UltraVerifier" name. - -::: +A contract will show up in the "Deployed Contracts" section. ## Step 4 - Verifying -To verify a proof using the Solidity verifier contract, we call the `verify` function in this extended contract: +To verify a proof using the Solidity verifier contract, we call the `verify` function: ```solidity function verify(bytes calldata _proof, bytes32[] calldata _publicInputs) external view returns (bool) ``` -When using the default example in the [Hello Noir](../getting_started/quick_start.md) guide, the easiest way to confirm that the verifier contract is doing its job is by calling the `verify` function via remix with the required parameters. Note that the public inputs must be passed in separately to the rest of the proof so we must split the proof as returned from `bb`. +First generate a proof with `bb`. We need a `Prover.toml` file for our inputs. Run: -First generate a proof with `bb` at the location `./proof` using the steps in [get started](../getting_started/quick_start.md), this proof is in a binary format but we want to convert it into a hex string to pass into Remix, this can be done with the +```bash +nargo check +``` + +This will generate a `Prover.toml` you can fill with the values you want to prove. We can now execute the circuit with `nargo` and then use the proving backend to prove: + + + + +```bash +nargo execute +bb prove_ultra_keccak_honk -b ./target/.json -w ./target/ -o ./target/proof +``` + +:::tip[Public inputs] +Barretenberg attaches the public inputs to the proof, which in this case it's not very useful. If you're up for some JS, `bb.js` has [a method for it](https://github.com/AztecProtocol/aztec-packages/blob/master/barretenberg/ts/src/proof/index.ts), but in the CLI you can use this ugly snippet: + +```bash +cat ./target/proof | od -An -v -t x1 | tr -d $' \n' | sed 's/^.\{8\}//' | (read hex; echo "${hex:0:192}${hex:256}") +``` + +Beautiful. This assumes a circuit with one public input (32 bytes, for Barretenberg). For more inputs, you can just increment `hex:256` with 32 more bytes for each public input. + +::: + + + ```bash -# This value must be changed to match the number of public inputs (including return values!) in your program. -NUM_PUBLIC_INPUTS=1 -PUBLIC_INPUT_BYTES=32*NUM_PUBLIC_INPUTS -HEX_PUBLIC_INPUTS=$(head -c $PUBLIC_INPUT_BYTES ./proof | od -An -v -t x1 | tr -d $' \n') -HEX_PROOF=$(tail -c +$(($PUBLIC_INPUT_BYTES + 1)) ./proof | od -An -v -t x1 | tr -d $' \n') +nargo execute +bb prove -b ./target/.json -w ./target/ -o ./target/proof +``` + -echo "Public inputs:" -echo $HEX_PUBLIC_INPUTS +:::tip[Public inputs] +Barretenberg attaches the public inputs to the proof, which in this case it's not very useful. If you're up for some JS, `bb.js` has [a method for it](https://github.com/AztecProtocol/aztec-packages/blob/master/barretenberg/ts/src/proof/index.ts), but in the CLI you can use this ugly snippet: -echo "Proof:" -echo "0x$HEX_PROOF" +```bash +tail -c +33 ./target/proof | od -An -v -t x1 | tr -d $' \n' ``` +Beautiful. This assumes a circuit with one public input (32 bytes, for Barretenberg). For more inputs, you can just add 32 more bytes for each public input to the `tail` command. + +::: + + + + Remix expects that the public inputs will be split into an array of `bytes32` values so `HEX_PUBLIC_INPUTS` needs to be split up into 32 byte chunks which are prefixed with `0x` accordingly. A programmatic example of how the `verify` function is called can be seen in the example zk voting application [here](https://github.com/noir-lang/noir-examples/blob/33e598c257e2402ea3a6b68dd4c5ad492bce1b0a/foundry-voting/src/zkVote.sol#L35): @@ -188,7 +229,7 @@ the `verify` function will expect the public inputs array (second function param Passing only two inputs will result in an error such as `PUBLIC_INPUT_COUNT_INVALID(3, 2)`. -In this case, the inputs parameter to `verify` would be an array ordered as `[pubkey_x, pubkey_y, return`. +In this case, the inputs parameter to `verify` would be an array ordered as `[pubkey_x, pubkey_y, return]`. ::: diff --git a/docs/static/img/how-tos/solidity_verifier_6.png b/docs/static/img/how-tos/solidity_verifier_6.png new file mode 100644 index 00000000000..c92e874740a Binary files /dev/null and b/docs/static/img/how-tos/solidity_verifier_6.png differ diff --git a/docs/static/img/how-tos/solidity_verifier_7.png b/docs/static/img/how-tos/solidity_verifier_7.png new file mode 100644 index 00000000000..c8541fa544c Binary files /dev/null and b/docs/static/img/how-tos/solidity_verifier_7.png differ diff --git a/docs/static/img/how-tos/solidity_verifier_8.png b/docs/static/img/how-tos/solidity_verifier_8.png new file mode 100644 index 00000000000..125c140acec Binary files /dev/null and b/docs/static/img/how-tos/solidity_verifier_8.png differ diff --git a/docs/static/img/how-tos/solidity_verifier_9.png b/docs/static/img/how-tos/solidity_verifier_9.png new file mode 100644 index 00000000000..150812e3320 Binary files /dev/null and b/docs/static/img/how-tos/solidity_verifier_9.png differ diff --git a/test_programs/compile_success_empty/comptime_as_field/Nargo.toml b/test_programs/compile_success_empty/comptime_as_field/Nargo.toml new file mode 100644 index 00000000000..294832ba329 --- /dev/null +++ b/test_programs/compile_success_empty/comptime_as_field/Nargo.toml @@ -0,0 +1,6 @@ +[package] +name = "comptime_as_field" +type = "bin" +authors = [""] + +[dependencies] diff --git a/test_programs/compile_success_empty/comptime_as_field/src/main.nr b/test_programs/compile_success_empty/comptime_as_field/src/main.nr new file mode 100644 index 00000000000..e13db54b80b --- /dev/null +++ b/test_programs/compile_success_empty/comptime_as_field/src/main.nr @@ -0,0 +1,6 @@ +fn main() { + comptime { + let _: U128 = U128::from_integer(1); + } +} + diff --git a/test_programs/compile_success_empty/comptime_from_field/Nargo.toml b/test_programs/compile_success_empty/comptime_from_field/Nargo.toml new file mode 100644 index 00000000000..38a46ba0dbe --- /dev/null +++ b/test_programs/compile_success_empty/comptime_from_field/Nargo.toml @@ -0,0 +1,6 @@ +[package] +name = "comptime_from_field" +type = "bin" +authors = [""] + +[dependencies] diff --git a/test_programs/compile_success_empty/comptime_from_field/src/main.nr b/test_programs/compile_success_empty/comptime_from_field/src/main.nr new file mode 100644 index 00000000000..722c5c7eb32 --- /dev/null +++ b/test_programs/compile_success_empty/comptime_from_field/src/main.nr @@ -0,0 +1,6 @@ +fn main() { + comptime { + let _: Field = U128::from_hex("0x0").to_integer(); + } +} + diff --git a/test_programs/compile_success_empty/double_generic_alias_in_path/Nargo.toml b/test_programs/compile_success_empty/double_generic_alias_in_path/Nargo.toml new file mode 100644 index 00000000000..aaebee8d6ef --- /dev/null +++ b/test_programs/compile_success_empty/double_generic_alias_in_path/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "double_generic_alias_in_path" +type = "bin" +authors = [""] +compiler_version = ">=0.32.0" + +[dependencies] diff --git a/test_programs/compile_success_empty/double_generic_alias_in_path/src/main.nr b/test_programs/compile_success_empty/double_generic_alias_in_path/src/main.nr new file mode 100644 index 00000000000..09f2e5c4b43 --- /dev/null +++ b/test_programs/compile_success_empty/double_generic_alias_in_path/src/main.nr @@ -0,0 +1,14 @@ +struct Foo {} + +impl Foo { + fn new() -> Self { + Self {} + } +} + +type FooAlias1 = Foo; +type FooAlias2 = FooAlias1; + +fn main() { + let _ = FooAlias2::new(); +} diff --git a/test_programs/execution_report.sh b/test_programs/execution_report.sh new file mode 100755 index 00000000000..bdb1500d19e --- /dev/null +++ b/test_programs/execution_report.sh @@ -0,0 +1,59 @@ +#!/usr/bin/env bash +set -e + +current_dir=$(pwd) +base_path="$current_dir/execution_success" + +# Tests to be profiled for execution report +tests_to_profile=("sha256_regression" "regression_4709" "ram_blowup_regression") + +echo "{\"execution_reports\": [ " > $current_dir/execution_report.json + +# If there is an argument that means we want to generate a report for only the current directory +if [ "$#" -ne 0 ]; then + base_path="$current_dir" + tests_to_profile=(".") +fi + +ITER="1" +NUM_ARTIFACTS=${#tests_to_profile[@]} + +for dir in ${tests_to_profile[@]}; do + if [[ " ${excluded_dirs[@]} " =~ " ${dir} " ]]; then + continue + fi + + if [[ ${CI-false} = "true" ]] && [[ " ${ci_excluded_dirs[@]} " =~ " ${dir} " ]]; then + continue + fi + + cd $base_path/$dir + + # The default package to run is the supplied list hardcoded at the top of the script + PACKAGE_NAME=$dir + # Otherwise default to the current directory as the package we want to run + if [ "$#" -ne 0 ]; then + PACKAGE_NAME=$(basename $current_dir) + fi + + # Check whether a compilation artifact exists. + # Any programs part of this benchmark should already be compiled. + # We want to make sure that compilation time is not included in the execution time. + if [ ! -e ./target/*.json ]; then + echo "Missing compilation artifact for $PACKAGE_NAME" + exit 1 + fi + + COMPILE_TIME=$((time nargo execute --silence-warnings) 2>&1 | grep real | grep -oE '[0-9]+m[0-9]+.[0-9]+s') + echo -e " {\n \"artifact_name\":\"$PACKAGE_NAME\",\n \"time\":\"$COMPILE_TIME\"" >> $current_dir/execution_report.json + + if (($ITER == $NUM_ARTIFACTS)); then + echo "}" >> $current_dir/execution_report.json + else + echo "}, " >> $current_dir/execution_report.json + fi + + ITER=$(( $ITER + 1 )) +done + +echo "]}" >> $current_dir/execution_report.json diff --git a/tooling/nargo_cli/build.rs b/tooling/nargo_cli/build.rs index 8db2c1786d8..003897489c4 100644 --- a/tooling/nargo_cli/build.rs +++ b/tooling/nargo_cli/build.rs @@ -215,6 +215,9 @@ fn test_{test_name}(force_brillig: ForceBrillig, inliner_aggressiveness: Inliner // Set the maximum increase so that part of the optimization is exercised (it might fail). nargo.arg("--max-bytecode-increase-percent"); nargo.arg("50"); + + // Check whether the test case is non-deterministic + nargo.arg("--check-non-determinism"); }} {test_content} diff --git a/tooling/nargo_cli/src/cli/compile_cmd.rs b/tooling/nargo_cli/src/cli/compile_cmd.rs index 2ecf6959a94..f718021c351 100644 --- a/tooling/nargo_cli/src/cli/compile_cmd.rs +++ b/tooling/nargo_cli/src/cli/compile_cmd.rs @@ -216,6 +216,20 @@ fn compile_programs( cached_program, )?; + if compile_options.check_non_determinism { + let (program_two, _) = compile_program( + file_manager, + parsed_files, + workspace, + package, + compile_options, + load_cached_program(package), + )?; + if fxhash::hash64(&program) != fxhash::hash64(&program_two) { + panic!("Non deterministic result compiling {}", package.name); + } + } + // Choose the target width for the final, backend specific transformation. let target_width = get_target_width(package.expression_width, compile_options.expression_width); diff --git a/tooling/nargo_cli/src/cli/test_cmd/formatters.rs b/tooling/nargo_cli/src/cli/test_cmd/formatters.rs index 1b9b2d50378..75cf14ba120 100644 --- a/tooling/nargo_cli/src/cli/test_cmd/formatters.rs +++ b/tooling/nargo_cli/src/cli/test_cmd/formatters.rs @@ -417,6 +417,7 @@ impl Formatter for JsonFormatter { let mut json = Map::new(); json.insert("type".to_string(), json!("test")); json.insert("name".to_string(), json!(&test_result.name)); + json.insert("suite".to_string(), json!(&test_result.package_name)); json.insert("exec_time".to_string(), json!(test_result.time_to_run.as_secs_f64())); let mut stdout = String::new();