From dad8e65641f9832fff5b51edcdb2128b5ff72f45 Mon Sep 17 00:00:00 2001 From: AztecBot Date: Fri, 12 Apr 2024 13:54:33 +0000 Subject: [PATCH 1/7] feat: Proving the rollup circuits (https://github.com/AztecProtocol/aztec-packages/pull/5599) This PR implements a first stage of proving the rollup circuits. We introduce the bb binary and use it to produce verification keys, generate proofs and later verify those proofs for all of the rollup circuits, currently demonstrated in a unit test. --- .aztec-sync-commit | 2 +- acvm-repo/acvm_js/build.sh | 5 +- .../src/ssa/function_builder/mod.rs | 6 --- .../noirc_evaluator/src/ssa/ir/instruction.rs | 8 +-- .../src/ssa/opt/constant_folding.rs | 51 ------------------- tooling/acvm_cli/src/cli/execute_cmd.rs | 7 ++- tooling/acvm_cli/src/cli/fs/witness.rs | 47 ++++++++++++----- tooling/acvm_cli/src/errors.rs | 3 -- tooling/bb_abstraction_leaks/build.rs | 2 +- .../noir_js_backend_barretenberg/package.json | 2 +- tooling/noirc_abi_wasm/build.sh | 5 +- tooling/noirc_abi_wasm/src/lib.rs | 11 +++- yarn.lock | 13 +++-- 13 files changed, 61 insertions(+), 101 deletions(-) diff --git a/.aztec-sync-commit b/.aztec-sync-commit index 9ebd1dcccac..0190aad568e 100644 --- a/.aztec-sync-commit +++ b/.aztec-sync-commit @@ -1 +1 @@ -10d9ad99200a5897417ff5669763ead4e38d87fa +145cbcda61fd73f4e135348b31c59c774cfae965 diff --git a/acvm-repo/acvm_js/build.sh b/acvm-repo/acvm_js/build.sh index 4486a214c9c..58724dee02c 100755 --- a/acvm-repo/acvm_js/build.sh +++ b/acvm-repo/acvm_js/build.sh @@ -25,7 +25,6 @@ function run_if_available { require_command jq require_command cargo require_command wasm-bindgen -require_command wasm-opt self_path=$(dirname "$(readlink -f "$0")") pname=$(cargo read-manifest | jq -r '.name') @@ -49,5 +48,5 @@ BROWSER_WASM=${BROWSER_DIR}/${pname}_bg.wasm run_or_fail cargo build --lib --release --target $TARGET --package ${pname} run_or_fail wasm-bindgen $WASM_BINARY --out-dir $NODE_DIR --typescript --target nodejs run_or_fail wasm-bindgen $WASM_BINARY --out-dir $BROWSER_DIR --typescript --target web -run_or_fail wasm-opt $NODE_WASM -o $NODE_WASM -O -run_or_fail wasm-opt $BROWSER_WASM -o $BROWSER_WASM -O +run_if_available wasm-opt $NODE_WASM -o $NODE_WASM -O +run_if_available wasm-opt $BROWSER_WASM -o $BROWSER_WASM -O diff --git a/compiler/noirc_evaluator/src/ssa/function_builder/mod.rs b/compiler/noirc_evaluator/src/ssa/function_builder/mod.rs index 75a427397b6..d3e5e506111 100644 --- a/compiler/noirc_evaluator/src/ssa/function_builder/mod.rs +++ b/compiler/noirc_evaluator/src/ssa/function_builder/mod.rs @@ -326,12 +326,6 @@ impl FunctionBuilder { self.insert_instruction(Instruction::DecrementRc { value }, None); } - /// Insert an enable_side_effects_if instruction. These are normally only automatically - /// inserted during the flattening pass when branching is removed. - pub(crate) fn insert_enable_side_effects_if(&mut self, condition: ValueId) { - self.insert_instruction(Instruction::EnableSideEffects { condition }, None); - } - /// Terminates the current block with the given terminator instruction /// if the current block does not already have a terminator instruction. fn terminate_block_with(&mut self, terminator: TerminatorInstruction) { diff --git a/compiler/noirc_evaluator/src/ssa/ir/instruction.rs b/compiler/noirc_evaluator/src/ssa/ir/instruction.rs index 641d971af3c..2b23cc1c1e8 100644 --- a/compiler/noirc_evaluator/src/ssa/ir/instruction.rs +++ b/compiler/noirc_evaluator/src/ssa/ir/instruction.rs @@ -254,7 +254,7 @@ impl Instruction { // In ACIR, a division with a false predicate outputs (0,0), so it cannot replace another instruction unless they have the same predicate bin.operator != BinaryOp::Div } - Cast(_, _) | Truncate { .. } | Not(_) => true, + Cast(_, _) | Truncate { .. } | Not(_) | ArrayGet { .. } | ArraySet { .. } => true, // These either have side-effects or interact with memory Constrain(..) @@ -266,12 +266,6 @@ impl Instruction { | DecrementRc { .. } | RangeCheck { .. } => false, - // These can have different behavior depending on the EnableSideEffectsIf context. - // Enabling constant folding for these potentially enables replacing an enabled - // array get with one that was disabled. See - // https://github.com/noir-lang/noir/pull/4716#issuecomment-2047846328. - ArrayGet { .. } | ArraySet { .. } => false, - Call { func, .. } => match dfg[*func] { Value::Intrinsic(intrinsic) => !intrinsic.has_side_effects(), _ => false, diff --git a/compiler/noirc_evaluator/src/ssa/opt/constant_folding.rs b/compiler/noirc_evaluator/src/ssa/opt/constant_folding.rs index 5a7134f3486..6cac8c91bc3 100644 --- a/compiler/noirc_evaluator/src/ssa/opt/constant_folding.rs +++ b/compiler/noirc_evaluator/src/ssa/opt/constant_folding.rs @@ -607,55 +607,4 @@ mod test { assert_eq!(main.dfg[instructions[4]], Instruction::Constrain(v1, v_true, None)); assert_eq!(main.dfg[instructions[5]], Instruction::Constrain(v2, v_false, None)); } - - // Regression for #4600 - #[test] - fn array_get_regression() { - // fn main f0 { - // b0(v0: u1, v1: u64): - // enable_side_effects_if v0 - // v2 = array_get [Field 0, Field 1], index v1 - // v3 = not v0 - // enable_side_effects_if v3 - // v4 = array_get [Field 0, Field 1], index v1 - // } - // - // We want to make sure after constant folding both array_gets remain since they are - // under different enable_side_effects_if contexts and thus one may be disabled while - // the other is not. If one is removed, it is possible e.g. v4 is replaced with v2 which - // is disabled (only gets from index 0) and thus returns the wrong result. - let main_id = Id::test_new(0); - - // Compiling main - let mut builder = FunctionBuilder::new("main".into(), main_id); - let v0 = builder.add_parameter(Type::bool()); - let v1 = builder.add_parameter(Type::unsigned(64)); - - builder.insert_enable_side_effects_if(v0); - - let zero = builder.field_constant(0u128); - let one = builder.field_constant(1u128); - - let typ = Type::Array(Rc::new(vec![Type::field()]), 2); - let array = builder.array_constant(vec![zero, one].into(), typ); - - let _v2 = builder.insert_array_get(array, v1, Type::field()); - let v3 = builder.insert_not(v0); - - builder.insert_enable_side_effects_if(v3); - let _v4 = builder.insert_array_get(array, v1, Type::field()); - - // Expected output is unchanged - let ssa = builder.finish(); - let main = ssa.main(); - let instructions = main.dfg[main.entry_block()].instructions(); - let starting_instruction_count = instructions.len(); - assert_eq!(starting_instruction_count, 5); - - let ssa = ssa.fold_constants(); - let main = ssa.main(); - let instructions = main.dfg[main.entry_block()].instructions(); - let ending_instruction_count = instructions.len(); - assert_eq!(starting_instruction_count, ending_instruction_count); - } } diff --git a/tooling/acvm_cli/src/cli/execute_cmd.rs b/tooling/acvm_cli/src/cli/execute_cmd.rs index 86e7277451f..4e36dbd1f22 100644 --- a/tooling/acvm_cli/src/cli/execute_cmd.rs +++ b/tooling/acvm_cli/src/cli/execute_cmd.rs @@ -6,11 +6,10 @@ use bn254_blackbox_solver::Bn254BlackBoxSolver; use clap::Args; use crate::cli::fs::inputs::{read_bytecode_from_file, read_inputs_from_file}; -use crate::cli::fs::witness::save_witness_to_dir; use crate::errors::CliError; use nargo::ops::{execute_program, DefaultForeignCallExecutor}; -use super::fs::witness::create_output_witness_string; +use super::fs::witness::{create_output_witness_string, save_witness_to_dir}; /// Executes a circuit to calculate its return value #[derive(Debug, Clone, Args)] @@ -46,9 +45,9 @@ fn run_command(args: ExecuteCommand) -> Result { )?; if args.output_witness.is_some() { save_witness_to_dir( - &output_witness_string, - &args.working_directory, + output_witness, &args.output_witness.unwrap(), + &args.working_directory, )?; } Ok(output_witness_string) diff --git a/tooling/acvm_cli/src/cli/fs/witness.rs b/tooling/acvm_cli/src/cli/fs/witness.rs index 2daaa5a3a58..30ef4278f4b 100644 --- a/tooling/acvm_cli/src/cli/fs/witness.rs +++ b/tooling/acvm_cli/src/cli/fs/witness.rs @@ -5,24 +5,29 @@ use std::{ path::{Path, PathBuf}, }; -use acvm::acir::native_types::WitnessMap; +use acvm::acir::native_types::{WitnessMap, WitnessStack}; use crate::errors::{CliError, FilesystemError}; -/// Saves the provided output witnesses to a toml file created at the given location -pub(crate) fn save_witness_to_dir>( - output_witness: &String, - witness_dir: P, - file_name: &String, -) -> Result { - let witness_path = witness_dir.as_ref().join(file_name); +fn create_named_dir(named_dir: &Path, name: &str) -> PathBuf { + std::fs::create_dir_all(named_dir) + .unwrap_or_else(|_| panic!("could not create the `{name}` directory")); + + PathBuf::from(named_dir) +} - let mut file = File::create(&witness_path) - .map_err(|_| FilesystemError::OutputWitnessCreationFailed(file_name.clone()))?; - write!(file, "{}", output_witness) - .map_err(|_| FilesystemError::OutputWitnessWriteFailed(file_name.clone()))?; +fn write_to_file(bytes: &[u8], path: &Path) -> String { + let display = path.display(); - Ok(witness_path) + let mut file = match File::create(path) { + Err(why) => panic!("couldn't create {display}: {why}"), + Ok(file) => file, + }; + + match file.write_all(bytes) { + Err(why) => panic!("couldn't write to {display}: {why}"), + Ok(_) => display.to_string(), + } } /// Creates a toml representation of the provided witness map @@ -34,3 +39,19 @@ pub(crate) fn create_output_witness_string(witnesses: &WitnessMap) -> Result>( + witnesses: WitnessStack, + witness_name: &str, + witness_dir: P, +) -> Result { + create_named_dir(witness_dir.as_ref(), "witness"); + let witness_path = witness_dir.as_ref().join(witness_name).with_extension("gz"); + + let buf: Vec = witnesses + .try_into() + .map_err(|_op| FilesystemError::OutputWitnessCreationFailed(witness_name.to_string()))?; + write_to_file(buf.as_slice(), &witness_path); + + Ok(witness_path) +} diff --git a/tooling/acvm_cli/src/errors.rs b/tooling/acvm_cli/src/errors.rs index 923046410ea..8bc79347159 100644 --- a/tooling/acvm_cli/src/errors.rs +++ b/tooling/acvm_cli/src/errors.rs @@ -20,9 +20,6 @@ pub(crate) enum FilesystemError { #[error(" Error: failed to create output witness file {0}.")] OutputWitnessCreationFailed(String), - - #[error(" Error: failed to write output witness file {0}.")] - OutputWitnessWriteFailed(String), } #[derive(Debug, Error)] diff --git a/tooling/bb_abstraction_leaks/build.rs b/tooling/bb_abstraction_leaks/build.rs index 0f9770c805d..e055d7a3a5f 100644 --- a/tooling/bb_abstraction_leaks/build.rs +++ b/tooling/bb_abstraction_leaks/build.rs @@ -10,7 +10,7 @@ use const_format::formatcp; const USERNAME: &str = "AztecProtocol"; const REPO: &str = "aztec-packages"; -const VERSION: &str = "0.34.0"; +const VERSION: &str = "0.33.0"; const TAG: &str = formatcp!("aztec-packages-v{}", VERSION); const API_URL: &str = diff --git a/tooling/noir_js_backend_barretenberg/package.json b/tooling/noir_js_backend_barretenberg/package.json index 98bfdf1c3a8..438e91ff302 100644 --- a/tooling/noir_js_backend_barretenberg/package.json +++ b/tooling/noir_js_backend_barretenberg/package.json @@ -42,7 +42,7 @@ "lint": "NODE_NO_WARNINGS=1 eslint . --ext .ts --ignore-path ./.eslintignore --max-warnings 0" }, "dependencies": { - "@aztec/bb.js": "0.34.0", + "@aztec/bb.js": "portal:../../../../barretenberg/ts", "@noir-lang/types": "workspace:*", "fflate": "^0.8.0" }, diff --git a/tooling/noirc_abi_wasm/build.sh b/tooling/noirc_abi_wasm/build.sh index 4486a214c9c..58724dee02c 100755 --- a/tooling/noirc_abi_wasm/build.sh +++ b/tooling/noirc_abi_wasm/build.sh @@ -25,7 +25,6 @@ function run_if_available { require_command jq require_command cargo require_command wasm-bindgen -require_command wasm-opt self_path=$(dirname "$(readlink -f "$0")") pname=$(cargo read-manifest | jq -r '.name') @@ -49,5 +48,5 @@ BROWSER_WASM=${BROWSER_DIR}/${pname}_bg.wasm run_or_fail cargo build --lib --release --target $TARGET --package ${pname} run_or_fail wasm-bindgen $WASM_BINARY --out-dir $NODE_DIR --typescript --target nodejs run_or_fail wasm-bindgen $WASM_BINARY --out-dir $BROWSER_DIR --typescript --target web -run_or_fail wasm-opt $NODE_WASM -o $NODE_WASM -O -run_or_fail wasm-opt $BROWSER_WASM -o $BROWSER_WASM -O +run_if_available wasm-opt $NODE_WASM -o $NODE_WASM -O +run_if_available wasm-opt $BROWSER_WASM -o $BROWSER_WASM -O diff --git a/tooling/noirc_abi_wasm/src/lib.rs b/tooling/noirc_abi_wasm/src/lib.rs index ce15f6d502e..fad5abaebba 100644 --- a/tooling/noirc_abi_wasm/src/lib.rs +++ b/tooling/noirc_abi_wasm/src/lib.rs @@ -5,7 +5,7 @@ // See Cargo.toml for explanation. use getrandom as _; -use acvm::acir::native_types::WitnessMap; +use acvm::acir::native_types::{WitnessMap, WitnessStack}; use iter_extended::try_btree_map; use noirc_abi::{ errors::InputParserError, @@ -113,3 +113,12 @@ pub fn abi_decode(abi: JsAbi, witness_map: JsWitnessMap) -> Result::from_serde(&return_struct) .map_err(|err| err.to_string().into()) } + +#[wasm_bindgen(js_name = serializeWitness)] +pub fn serialise_witness(witness_map: JsWitnessMap) -> Result, JsAbiError> { + console_error_panic_hook::set_once(); + let converted_witness: WitnessMap = witness_map.into(); + let witness_stack: WitnessStack = converted_witness.into(); + let output = witness_stack.try_into(); + output.map_err(|_| JsAbiError::new("Failed to convert to Vec".to_string())) +} diff --git a/yarn.lock b/yarn.lock index 38e13814929..b45678f5d8b 100644 --- a/yarn.lock +++ b/yarn.lock @@ -221,19 +221,18 @@ __metadata: languageName: node linkType: hard -"@aztec/bb.js@npm:0.34.0": - version: 0.34.0 - resolution: "@aztec/bb.js@npm:0.34.0" +"@aztec/bb.js@portal:../../../../barretenberg/ts::locator=%40noir-lang%2Fbackend_barretenberg%40workspace%3Atooling%2Fnoir_js_backend_barretenberg": + version: 0.0.0-use.local + resolution: "@aztec/bb.js@portal:../../../../barretenberg/ts::locator=%40noir-lang%2Fbackend_barretenberg%40workspace%3Atooling%2Fnoir_js_backend_barretenberg" dependencies: comlink: ^4.4.1 commander: ^10.0.1 debug: ^4.3.4 tslib: ^2.4.0 bin: - bb.js: dest/node/main.js - checksum: 9d07834d81ed19e4d6fd5c1f3b07c565648df1165c30115f020ece9660b2b8599a5ed894a2090410f14020e73dd290484b30b76c9c71e863b8390fa2b7c1b729 + bb.js: ./dest/node/main.js languageName: node - linkType: hard + linkType: soft "@babel/code-frame@npm:^7.0.0, @babel/code-frame@npm:^7.10.4, @babel/code-frame@npm:^7.12.11, @babel/code-frame@npm:^7.16.0, @babel/code-frame@npm:^7.22.13, @babel/code-frame@npm:^7.23.5, @babel/code-frame@npm:^7.8.3": version: 7.23.5 @@ -4396,7 +4395,7 @@ __metadata: version: 0.0.0-use.local resolution: "@noir-lang/backend_barretenberg@workspace:tooling/noir_js_backend_barretenberg" dependencies: - "@aztec/bb.js": 0.34.0 + "@aztec/bb.js": "portal:../../../../barretenberg/ts" "@noir-lang/types": "workspace:*" "@types/node": ^20.6.2 "@types/prettier": ^3 From e55ce6c386c9e539ea4c30116b396e1613b9cb0b Mon Sep 17 00:00:00 2001 From: AztecBot Date: Fri, 12 Apr 2024 15:20:58 +0000 Subject: [PATCH 2/7] feat: Sync from noir (https://github.com/AztecProtocol/aztec-packages/pull/5725) Automated pull of development from the [noir](https://github.com/noir-lang/noir) programming language, a dependency of Aztec. BEGIN_COMMIT_OVERRIDE feat: get last mock oracles params (https://github.com/noir-lang/noir/pull/4789) feat: split `backend_barretenburg` into prover and verifier classes (https://github.com/noir-lang/noir/pull/4769) chore: testing that nargo fmt is idempotent (https://github.com/noir-lang/noir/pull/4765) feat: Sync from aztec-packages (https://github.com/noir-lang/noir/pull/4787) fix: ArrayGet and Set are not pure (https://github.com/noir-lang/noir/pull/4783) END_COMMIT_OVERRIDE --------- Co-authored-by: Santiago Palladino --- .aztec-sync-commit | 2 +- acvm-repo/acvm_js/build.sh | 5 +- noir_stdlib/src/test.nr | 7 + .../execution_success/mock_oracle/Prover.toml | 2 - .../execution_success/mock_oracle/src/main.nr | 27 ---- .../mock_oracle/Nargo.toml | 3 +- .../noir_test_success/mock_oracle/Prover.toml | 0 .../noir_test_success/mock_oracle/src/main.nr | 130 +++++++++++++++ tooling/acvm_cli/src/cli/execute_cmd.rs | 7 +- tooling/acvm_cli/src/cli/fs/witness.rs | 47 ++++-- tooling/acvm_cli/src/errors.rs | 3 - tooling/nargo/src/ops/foreign_calls.rs | 38 ++++- tooling/nargo_fmt/build.rs | 51 ++++-- tooling/noir_js/test/node/e2e.test.ts | 24 ++- .../noir_js_backend_barretenberg/package.json | 4 +- .../src/backend.ts | 143 +++++++++++++++++ .../noir_js_backend_barretenberg/src/index.ts | 151 +----------------- .../src/verifier.ts | 78 +++++++++ tooling/noir_js_types/src/types.ts | 20 +-- tooling/noirc_abi_wasm/build.sh | 5 +- tooling/noirc_abi_wasm/src/lib.rs | 11 +- yarn.lock | 13 +- 22 files changed, 530 insertions(+), 241 deletions(-) delete mode 100644 test_programs/execution_success/mock_oracle/Prover.toml delete mode 100644 test_programs/execution_success/mock_oracle/src/main.nr rename test_programs/{execution_success => noir_test_success}/mock_oracle/Nargo.toml (57%) create mode 100644 test_programs/noir_test_success/mock_oracle/Prover.toml create mode 100644 test_programs/noir_test_success/mock_oracle/src/main.nr create mode 100644 tooling/noir_js_backend_barretenberg/src/backend.ts create mode 100644 tooling/noir_js_backend_barretenberg/src/verifier.ts diff --git a/.aztec-sync-commit b/.aztec-sync-commit index 9ebd1dcccac..98bf27c5a33 100644 --- a/.aztec-sync-commit +++ b/.aztec-sync-commit @@ -1 +1 @@ -10d9ad99200a5897417ff5669763ead4e38d87fa +825c455a62faeae5d148ce4f914efacb8f4c50fd diff --git a/acvm-repo/acvm_js/build.sh b/acvm-repo/acvm_js/build.sh index 4486a214c9c..58724dee02c 100755 --- a/acvm-repo/acvm_js/build.sh +++ b/acvm-repo/acvm_js/build.sh @@ -25,7 +25,6 @@ function run_if_available { require_command jq require_command cargo require_command wasm-bindgen -require_command wasm-opt self_path=$(dirname "$(readlink -f "$0")") pname=$(cargo read-manifest | jq -r '.name') @@ -49,5 +48,5 @@ BROWSER_WASM=${BROWSER_DIR}/${pname}_bg.wasm run_or_fail cargo build --lib --release --target $TARGET --package ${pname} run_or_fail wasm-bindgen $WASM_BINARY --out-dir $NODE_DIR --typescript --target nodejs run_or_fail wasm-bindgen $WASM_BINARY --out-dir $BROWSER_DIR --typescript --target web -run_or_fail wasm-opt $NODE_WASM -o $NODE_WASM -O -run_or_fail wasm-opt $BROWSER_WASM -o $BROWSER_WASM -O +run_if_available wasm-opt $NODE_WASM -o $NODE_WASM -O +run_if_available wasm-opt $BROWSER_WASM -o $BROWSER_WASM -O diff --git a/noir_stdlib/src/test.nr b/noir_stdlib/src/test.nr index e1c320215de..e6a7e03fefc 100644 --- a/noir_stdlib/src/test.nr +++ b/noir_stdlib/src/test.nr @@ -4,6 +4,9 @@ unconstrained fn create_mock_oracle(name: str) -> Field {} #[oracle(set_mock_params)] unconstrained fn set_mock_params_oracle

(id: Field, params: P) {} +#[oracle(get_mock_last_params)] +unconstrained fn get_mock_last_params_oracle

(id: Field) -> P {} + #[oracle(set_mock_returns)] unconstrained fn set_mock_returns_oracle(id: Field, returns: R) {} @@ -27,6 +30,10 @@ impl OracleMock { self } + unconstrained pub fn get_last_params

(self) -> P { + get_mock_last_params_oracle(self.id) + } + unconstrained pub fn returns(self, returns: R) -> Self { set_mock_returns_oracle(self.id, returns); self diff --git a/test_programs/execution_success/mock_oracle/Prover.toml b/test_programs/execution_success/mock_oracle/Prover.toml deleted file mode 100644 index 2b26a4ce471..00000000000 --- a/test_programs/execution_success/mock_oracle/Prover.toml +++ /dev/null @@ -1,2 +0,0 @@ -x = "10" - diff --git a/test_programs/execution_success/mock_oracle/src/main.nr b/test_programs/execution_success/mock_oracle/src/main.nr deleted file mode 100644 index 90fca7993cc..00000000000 --- a/test_programs/execution_success/mock_oracle/src/main.nr +++ /dev/null @@ -1,27 +0,0 @@ -use dep::std::test::OracleMock; - -struct Point { - x: Field, - y: Field, -} - -#[oracle(foo)] -unconstrained fn foo_oracle(_point: Point, _array: [Field; 4]) -> Field {} - -unconstrained fn main() { - let array = [1, 2, 3, 4]; - let another_array = [4, 3, 2, 1]; - let point = Point { x: 14, y: 27 }; - - OracleMock::mock("foo").returns(42).times(1); - let mock = OracleMock::mock("foo").returns(0); - assert_eq(42, foo_oracle(point, array)); - assert_eq(0, foo_oracle(point, array)); - mock.clear(); - - OracleMock::mock("foo").with_params((point, array)).returns(10); - OracleMock::mock("foo").with_params((point, another_array)).returns(20); - assert_eq(10, foo_oracle(point, array)); - assert_eq(20, foo_oracle(point, another_array)); -} - diff --git a/test_programs/execution_success/mock_oracle/Nargo.toml b/test_programs/noir_test_success/mock_oracle/Nargo.toml similarity index 57% rename from test_programs/execution_success/mock_oracle/Nargo.toml rename to test_programs/noir_test_success/mock_oracle/Nargo.toml index b2916487e8c..428e965899c 100644 --- a/test_programs/execution_success/mock_oracle/Nargo.toml +++ b/test_programs/noir_test_success/mock_oracle/Nargo.toml @@ -2,5 +2,6 @@ name = "mock_oracle" type = "bin" authors = [""] +compiler_version = ">=0.23.0" -[dependencies] +[dependencies] \ No newline at end of file diff --git a/test_programs/noir_test_success/mock_oracle/Prover.toml b/test_programs/noir_test_success/mock_oracle/Prover.toml new file mode 100644 index 00000000000..e69de29bb2d diff --git a/test_programs/noir_test_success/mock_oracle/src/main.nr b/test_programs/noir_test_success/mock_oracle/src/main.nr new file mode 100644 index 00000000000..d840ffaef66 --- /dev/null +++ b/test_programs/noir_test_success/mock_oracle/src/main.nr @@ -0,0 +1,130 @@ +use dep::std::test::OracleMock; + +struct Point { + x: Field, + y: Field, +} + +impl Eq for Point { + fn eq(self, other: Point) -> bool { + (self.x == other.x) & (self.y == other.y) + } +} + +#[oracle(void_field)] +unconstrained fn void_field_oracle() -> Field {} + +unconstrained fn void_field() -> Field { + void_field_oracle() +} + +#[oracle(field_field)] +unconstrained fn field_field_oracle(_x: Field) -> Field {} + +unconstrained fn field_field(x: Field) -> Field { + field_field_oracle(x) +} + +#[oracle(struct_field)] +unconstrained fn struct_field_oracle(_point: Point, _array: [Field; 4]) -> Field {} + +unconstrained fn struct_field(point: Point, array: [Field; 4]) -> Field { + struct_field_oracle(point, array) +} + +#[test(should_fail)] +fn test_mock_no_returns() { + OracleMock::mock("void_field"); + void_field(); // Some return value must be set +} + +#[test] +fn test_mock() { + OracleMock::mock("void_field").returns(10); + assert_eq(void_field(), 10); +} + +#[test] +fn test_multiple_mock() { + let first_mock = OracleMock::mock("void_field").returns(10); + OracleMock::mock("void_field").returns(42); + + // The mocks are searched for in creation order, so the first one prevents the second from being called. + assert_eq(void_field(), 10); + + first_mock.clear(); + assert_eq(void_field(), 42); +} + +#[test] +fn test_multiple_mock_times() { + OracleMock::mock("void_field").returns(10).times(2); + OracleMock::mock("void_field").returns(42); + + assert_eq(void_field(), 10); + assert_eq(void_field(), 10); + assert_eq(void_field(), 42); +} + +#[test] +fn test_mock_with_params() { + OracleMock::mock("field_field").with_params((5,)).returns(10); + assert_eq(field_field(5), 10); +} + +#[test] +fn test_multiple_mock_with_params() { + OracleMock::mock("field_field").with_params((5,)).returns(10); + OracleMock::mock("field_field").with_params((7,)).returns(14); + + assert_eq(field_field(5), 10); + assert_eq(field_field(7), 14); +} + +#[test] +fn test_mock_last_params() { + let mock = OracleMock::mock("field_field").returns(10); + assert_eq(field_field(5), 10); + + assert_eq(mock.get_last_params(), 5); +} + +#[test] +fn test_mock_last_params_many_calls() { + let mock = OracleMock::mock("field_field").returns(10); + assert_eq(field_field(5), 10); + assert_eq(field_field(7), 10); + + assert_eq(mock.get_last_params(), 7); +} + +#[test] +fn test_mock_struct_field() { + // Combination of simpler test cases + + let array = [1, 2, 3, 4]; + let another_array = [4, 3, 2, 1]; + let point = Point { x: 14, y: 27 }; + + OracleMock::mock("struct_field").returns(42).times(2); + let timeless_mock = OracleMock::mock("struct_field").returns(0); + + assert_eq(42, struct_field(point, array)); + assert_eq(42, struct_field(point, array)); + // The times(2) mock is now cleared + + assert_eq(0, struct_field(point, array)); + + let last_params: (Point, [Field; 4]) = timeless_mock.get_last_params(); + assert_eq(last_params.0, point); + assert_eq(last_params.1, array); + + // We clear the mock with no times() to allow other mocks to be callable + timeless_mock.clear(); + + OracleMock::mock("struct_field").with_params((point, array)).returns(10); + OracleMock::mock("struct_field").with_params((point, another_array)).returns(20); + assert_eq(10, struct_field(point, array)); + assert_eq(20, struct_field(point, another_array)); +} + diff --git a/tooling/acvm_cli/src/cli/execute_cmd.rs b/tooling/acvm_cli/src/cli/execute_cmd.rs index 86e7277451f..4e36dbd1f22 100644 --- a/tooling/acvm_cli/src/cli/execute_cmd.rs +++ b/tooling/acvm_cli/src/cli/execute_cmd.rs @@ -6,11 +6,10 @@ use bn254_blackbox_solver::Bn254BlackBoxSolver; use clap::Args; use crate::cli::fs::inputs::{read_bytecode_from_file, read_inputs_from_file}; -use crate::cli::fs::witness::save_witness_to_dir; use crate::errors::CliError; use nargo::ops::{execute_program, DefaultForeignCallExecutor}; -use super::fs::witness::create_output_witness_string; +use super::fs::witness::{create_output_witness_string, save_witness_to_dir}; /// Executes a circuit to calculate its return value #[derive(Debug, Clone, Args)] @@ -46,9 +45,9 @@ fn run_command(args: ExecuteCommand) -> Result { )?; if args.output_witness.is_some() { save_witness_to_dir( - &output_witness_string, - &args.working_directory, + output_witness, &args.output_witness.unwrap(), + &args.working_directory, )?; } Ok(output_witness_string) diff --git a/tooling/acvm_cli/src/cli/fs/witness.rs b/tooling/acvm_cli/src/cli/fs/witness.rs index 2daaa5a3a58..30ef4278f4b 100644 --- a/tooling/acvm_cli/src/cli/fs/witness.rs +++ b/tooling/acvm_cli/src/cli/fs/witness.rs @@ -5,24 +5,29 @@ use std::{ path::{Path, PathBuf}, }; -use acvm::acir::native_types::WitnessMap; +use acvm::acir::native_types::{WitnessMap, WitnessStack}; use crate::errors::{CliError, FilesystemError}; -/// Saves the provided output witnesses to a toml file created at the given location -pub(crate) fn save_witness_to_dir>( - output_witness: &String, - witness_dir: P, - file_name: &String, -) -> Result { - let witness_path = witness_dir.as_ref().join(file_name); +fn create_named_dir(named_dir: &Path, name: &str) -> PathBuf { + std::fs::create_dir_all(named_dir) + .unwrap_or_else(|_| panic!("could not create the `{name}` directory")); + + PathBuf::from(named_dir) +} - let mut file = File::create(&witness_path) - .map_err(|_| FilesystemError::OutputWitnessCreationFailed(file_name.clone()))?; - write!(file, "{}", output_witness) - .map_err(|_| FilesystemError::OutputWitnessWriteFailed(file_name.clone()))?; +fn write_to_file(bytes: &[u8], path: &Path) -> String { + let display = path.display(); - Ok(witness_path) + let mut file = match File::create(path) { + Err(why) => panic!("couldn't create {display}: {why}"), + Ok(file) => file, + }; + + match file.write_all(bytes) { + Err(why) => panic!("couldn't write to {display}: {why}"), + Ok(_) => display.to_string(), + } } /// Creates a toml representation of the provided witness map @@ -34,3 +39,19 @@ pub(crate) fn create_output_witness_string(witnesses: &WitnessMap) -> Result>( + witnesses: WitnessStack, + witness_name: &str, + witness_dir: P, +) -> Result { + create_named_dir(witness_dir.as_ref(), "witness"); + let witness_path = witness_dir.as_ref().join(witness_name).with_extension("gz"); + + let buf: Vec = witnesses + .try_into() + .map_err(|_op| FilesystemError::OutputWitnessCreationFailed(witness_name.to_string()))?; + write_to_file(buf.as_slice(), &witness_path); + + Ok(witness_path) +} diff --git a/tooling/acvm_cli/src/errors.rs b/tooling/acvm_cli/src/errors.rs index 923046410ea..8bc79347159 100644 --- a/tooling/acvm_cli/src/errors.rs +++ b/tooling/acvm_cli/src/errors.rs @@ -20,9 +20,6 @@ pub(crate) enum FilesystemError { #[error(" Error: failed to create output witness file {0}.")] OutputWitnessCreationFailed(String), - - #[error(" Error: failed to write output witness file {0}.")] - OutputWitnessWriteFailed(String), } #[derive(Debug, Error)] diff --git a/tooling/nargo/src/ops/foreign_calls.rs b/tooling/nargo/src/ops/foreign_calls.rs index ea67f17af2a..bc91929e5e7 100644 --- a/tooling/nargo/src/ops/foreign_calls.rs +++ b/tooling/nargo/src/ops/foreign_calls.rs @@ -75,6 +75,7 @@ pub enum ForeignCall { AssertMessage, CreateMock, SetMockParams, + GetMockLastParams, SetMockReturns, SetMockTimes, ClearMock, @@ -93,6 +94,7 @@ impl ForeignCall { ForeignCall::AssertMessage => "assert_message", ForeignCall::CreateMock => "create_mock", ForeignCall::SetMockParams => "set_mock_params", + ForeignCall::GetMockLastParams => "get_mock_last_params", ForeignCall::SetMockReturns => "set_mock_returns", ForeignCall::SetMockTimes => "set_mock_times", ForeignCall::ClearMock => "clear_mock", @@ -105,6 +107,7 @@ impl ForeignCall { "assert_message" => Some(ForeignCall::AssertMessage), "create_mock" => Some(ForeignCall::CreateMock), "set_mock_params" => Some(ForeignCall::SetMockParams), + "get_mock_last_params" => Some(ForeignCall::GetMockLastParams), "set_mock_returns" => Some(ForeignCall::SetMockReturns), "set_mock_times" => Some(ForeignCall::SetMockTimes), "clear_mock" => Some(ForeignCall::ClearMock), @@ -122,6 +125,8 @@ struct MockedCall { name: String, /// Optionally match the parameters params: Option>, + /// The parameters with which the mock was last called + last_called_params: Option>, /// The result to return when this mock is called result: ForeignCallResult, /// How many times should this mock be called before it is removed @@ -134,6 +139,7 @@ impl MockedCall { id, name, params: None, + last_called_params: None, result: ForeignCallResult { values: vec![] }, times_left: None, } @@ -185,7 +191,11 @@ impl DefaultForeignCallExecutor { Ok((id, params)) } - fn find_mock_by_id(&mut self, id: usize) -> Option<&mut MockedCall> { + fn find_mock_by_id(&self, id: usize) -> Option<&MockedCall> { + self.mocked_responses.iter().find(|response| response.id == id) + } + + fn find_mock_by_id_mut(&mut self, id: usize) -> Option<&mut MockedCall> { self.mocked_responses.iter_mut().find(|response| response.id == id) } @@ -250,15 +260,27 @@ impl ForeignCallExecutor for DefaultForeignCallExecutor { } Some(ForeignCall::SetMockParams) => { let (id, params) = Self::extract_mock_id(&foreign_call.inputs)?; - self.find_mock_by_id(id) + self.find_mock_by_id_mut(id) .unwrap_or_else(|| panic!("Unknown mock id {}", id)) .params = Some(params.to_vec()); Ok(ForeignCallResult::default().into()) } + Some(ForeignCall::GetMockLastParams) => { + let (id, _) = Self::extract_mock_id(&foreign_call.inputs)?; + let mock = + self.find_mock_by_id(id).unwrap_or_else(|| panic!("Unknown mock id {}", id)); + + let last_called_params = mock + .last_called_params + .clone() + .unwrap_or_else(|| panic!("Mock {} was never called", mock.name)); + + Ok(last_called_params.into()) + } Some(ForeignCall::SetMockReturns) => { let (id, params) = Self::extract_mock_id(&foreign_call.inputs)?; - self.find_mock_by_id(id) + self.find_mock_by_id_mut(id) .unwrap_or_else(|| panic!("Unknown mock id {}", id)) .result = ForeignCallResult { values: params.to_vec() }; @@ -269,7 +291,7 @@ impl ForeignCallExecutor for DefaultForeignCallExecutor { let times = params[0].unwrap_field().try_to_u64().expect("Invalid bit size of times"); - self.find_mock_by_id(id) + self.find_mock_by_id_mut(id) .unwrap_or_else(|| panic!("Unknown mock id {}", id)) .times_left = Some(times); @@ -292,6 +314,9 @@ impl ForeignCallExecutor for DefaultForeignCallExecutor { .mocked_responses .get_mut(response_position) .expect("Invalid position of mocked response"); + + mock.last_called_params = Some(foreign_call.inputs.clone()); + let result = mock.result.values.clone(); if let Some(times_left) = &mut mock.times_left { @@ -316,7 +341,10 @@ impl ForeignCallExecutor for DefaultForeignCallExecutor { Ok(parsed_response.into()) } - (None, None) => panic!("Unknown foreign call {}", foreign_call_name), + (None, None) => panic!( + "No mock for foreign call {}({:?})", + foreign_call_name, &foreign_call.inputs + ), } } } diff --git a/tooling/nargo_fmt/build.rs b/tooling/nargo_fmt/build.rs index 6f41768c1dc..7d5f07c43bf 100644 --- a/tooling/nargo_fmt/build.rs +++ b/tooling/nargo_fmt/build.rs @@ -49,28 +49,55 @@ fn generate_formatter_tests(test_file: &mut File, test_data_dir: &Path) { let output_source_path = outputs_dir.join(file_name).display().to_string(); let output_source = std::fs::read_to_string(output_source_path.clone()).unwrap(); + let skip_idempotent_test = + // TODO(https://github.com/noir-lang/noir/issues/4766): spurious trailing space + test_name == "array" || + // TODO(https://github.com/noir-lang/noir/issues/4767): pre-comment space + // TODO(https://github.com/noir-lang/noir/issues/4768): spurious newline + test_name == "tuple"; + write!( test_file, r##" -#[test] -fn format_{test_name}() {{ - let input = r#"{input_source}"#; - let expected_output = r#"{output_source}"#; + #[test] + fn format_{test_name}() {{ + let input = r#"{input_source}"#; + let expected_output = r#"{output_source}"#; - let (parsed_module, _errors) = noirc_frontend::parse_program(input); + let (parsed_module, _errors) = noirc_frontend::parse_program(input); - let config = nargo_fmt::Config::of("{config}").unwrap(); - let fmt_text = nargo_fmt::format(input, parsed_module, &config); + let config = nargo_fmt::Config::of("{config}").unwrap(); + let fmt_text = nargo_fmt::format(input, parsed_module, &config); - if std::env::var("UPDATE_EXPECT").is_ok() {{ - std::fs::write("{output_source_path}", fmt_text.clone()).unwrap(); - }} + if std::env::var("UPDATE_EXPECT").is_ok() {{ + std::fs::write("{output_source_path}", fmt_text.clone()).unwrap(); + }} - similar_asserts::assert_eq!(fmt_text, expected_output); -}} + similar_asserts::assert_eq!(fmt_text, expected_output); + }} "## ) .expect("Could not write templated test file."); + + if !skip_idempotent_test { + write!( + test_file, + r##" + #[test] + fn format_idempotent_{test_name}() {{ + let expected_output = r#"{output_source}"#; + + let (parsed_module, _errors) = noirc_frontend::parse_program(expected_output); + + let config = nargo_fmt::Config::of("{config}").unwrap(); + let fmt_text = nargo_fmt::format(expected_output, parsed_module, &config); + + similar_asserts::assert_eq!(fmt_text, expected_output); + }} + "## + ) + .expect("Could not write templated test file."); + } } } diff --git a/tooling/noir_js/test/node/e2e.test.ts b/tooling/noir_js/test/node/e2e.test.ts index 8921314e8ea..979841c47e6 100644 --- a/tooling/noir_js/test/node/e2e.test.ts +++ b/tooling/noir_js/test/node/e2e.test.ts @@ -1,7 +1,7 @@ import { expect } from 'chai'; import assert_lt_json from '../noir_compiled_examples/assert_lt/target/assert_lt.json' assert { type: 'json' }; import { Noir } from '@noir-lang/noir_js'; -import { BarretenbergBackend as Backend } from '@noir-lang/backend_barretenberg'; +import { BarretenbergBackend as Backend, BarretenbergVerifier as Verifier } from '@noir-lang/backend_barretenberg'; import { CompiledCircuit } from '@noir-lang/types'; const assert_lt_program = assert_lt_json as CompiledCircuit; @@ -47,6 +47,28 @@ it('end-to-end proof creation and verification (outer) -- Program API', async () expect(isValid).to.be.true; }); +it('end-to-end proof creation and verification (outer) -- Verifier API', async () => { + // Noir.Js part + const inputs = { + x: '2', + y: '3', + }; + + // Initialize backend + const backend = new Backend(assert_lt_program); + // Initialize program + const program = new Noir(assert_lt_program, backend); + // Generate proof + const proof = await program.generateProof(inputs); + + const verificationKey = await backend.getVerificationKey(); + + // Proof verification + const verifier = new Verifier(); + const isValid = await verifier.verifyProof(proof, verificationKey); + expect(isValid).to.be.true; +}); + // TODO: maybe switch to using assert_statement_recursive here to test both options it('end-to-end proof creation and verification (inner)', async () => { // Noir.Js part diff --git a/tooling/noir_js_backend_barretenberg/package.json b/tooling/noir_js_backend_barretenberg/package.json index 98bfdf1c3a8..fefd2f6f8d9 100644 --- a/tooling/noir_js_backend_barretenberg/package.json +++ b/tooling/noir_js_backend_barretenberg/package.json @@ -42,7 +42,7 @@ "lint": "NODE_NO_WARNINGS=1 eslint . --ext .ts --ignore-path ./.eslintignore --max-warnings 0" }, "dependencies": { - "@aztec/bb.js": "0.34.0", + "@aztec/bb.js": "portal:../../../../barretenberg/ts", "@noir-lang/types": "workspace:*", "fflate": "^0.8.0" }, @@ -57,4 +57,4 @@ "ts-node": "^10.9.1", "typescript": "5.4.2" } -} +} \ No newline at end of file diff --git a/tooling/noir_js_backend_barretenberg/src/backend.ts b/tooling/noir_js_backend_barretenberg/src/backend.ts new file mode 100644 index 00000000000..d07681dd8c1 --- /dev/null +++ b/tooling/noir_js_backend_barretenberg/src/backend.ts @@ -0,0 +1,143 @@ +import { decompressSync as gunzip } from 'fflate'; +import { acirToUint8Array } from './serialize.js'; +import { Backend, CompiledCircuit, ProofData, VerifierBackend } from '@noir-lang/types'; +import { BackendOptions } from './types.js'; +import { deflattenPublicInputs } from './public_inputs.js'; +import { reconstructProofWithPublicInputs } from './verifier.js'; +import { type Barretenberg } from '@aztec/bb.js'; + +// This is the number of bytes in a UltraPlonk proof +// minus the public inputs. +const numBytesInProofWithoutPublicInputs: number = 2144; + +export class BarretenbergVerifierBackend implements VerifierBackend { + // These type assertions are used so that we don't + // have to initialize `api` and `acirComposer` in the constructor. + // These are initialized asynchronously in the `init` function, + // constructors cannot be asynchronous which is why we do this. + + protected api!: Barretenberg; + // eslint-disable-next-line @typescript-eslint/no-explicit-any + protected acirComposer: any; + protected acirUncompressedBytecode: Uint8Array; + + constructor( + acirCircuit: CompiledCircuit, + protected options: BackendOptions = { threads: 1 }, + ) { + const acirBytecodeBase64 = acirCircuit.bytecode; + this.acirUncompressedBytecode = acirToUint8Array(acirBytecodeBase64); + } + + /** @ignore */ + async instantiate(): Promise { + if (!this.api) { + if (typeof navigator !== 'undefined' && navigator.hardwareConcurrency) { + this.options.threads = navigator.hardwareConcurrency; + } else { + try { + const os = await import('os'); + this.options.threads = os.cpus().length; + } catch (e) { + console.log('Could not detect environment. Falling back to one thread.', e); + } + } + const { Barretenberg, RawBuffer, Crs } = await import('@aztec/bb.js'); + const api = await Barretenberg.new(this.options); + + const [_exact, _total, subgroupSize] = await api.acirGetCircuitSizes(this.acirUncompressedBytecode); + const crs = await Crs.new(subgroupSize + 1); + await api.commonInitSlabAllocator(subgroupSize); + await api.srsInitSrs(new RawBuffer(crs.getG1Data()), crs.numPoints, new RawBuffer(crs.getG2Data())); + + this.acirComposer = await api.acirNewAcirComposer(subgroupSize); + await api.acirInitProvingKey(this.acirComposer, this.acirUncompressedBytecode); + this.api = api; + } + } + + /** @description Verifies a proof */ + async verifyProof(proofData: ProofData): Promise { + const proof = reconstructProofWithPublicInputs(proofData); + await this.instantiate(); + await this.api.acirInitVerificationKey(this.acirComposer); + return await this.api.acirVerifyProof(this.acirComposer, proof); + } + + async getVerificationKey(): Promise { + await this.instantiate(); + await this.api.acirInitVerificationKey(this.acirComposer); + return await this.api.acirGetVerificationKey(this.acirComposer); + } + + async destroy(): Promise { + if (!this.api) { + return; + } + await this.api.destroy(); + } +} + +export class BarretenbergBackend extends BarretenbergVerifierBackend implements Backend { + /** @description Generates a proof */ + async generateProof(compressedWitness: Uint8Array): Promise { + await this.instantiate(); + const proofWithPublicInputs = await this.api.acirCreateProof( + this.acirComposer, + this.acirUncompressedBytecode, + gunzip(compressedWitness), + ); + + const splitIndex = proofWithPublicInputs.length - numBytesInProofWithoutPublicInputs; + + const publicInputsConcatenated = proofWithPublicInputs.slice(0, splitIndex); + const proof = proofWithPublicInputs.slice(splitIndex); + const publicInputs = deflattenPublicInputs(publicInputsConcatenated); + + return { proof, publicInputs }; + } + + /** + * Generates artifacts that will be passed to a circuit that will verify this proof. + * + * Instead of passing the proof and verification key as a byte array, we pass them + * as fields which makes it cheaper to verify in a circuit. + * + * The proof that is passed here will have been created using a circuit + * that has the #[recursive] attribute on its `main` method. + * + * The number of public inputs denotes how many public inputs are in the inner proof. + * + * @example + * ```typescript + * const artifacts = await backend.generateRecursiveProofArtifacts(proof, numOfPublicInputs); + * ``` + */ + async generateRecursiveProofArtifacts( + proofData: ProofData, + numOfPublicInputs = 0, + ): Promise<{ + proofAsFields: string[]; + vkAsFields: string[]; + vkHash: string; + }> { + await this.instantiate(); + const proof = reconstructProofWithPublicInputs(proofData); + const proofAsFields = ( + await this.api.acirSerializeProofIntoFields(this.acirComposer, proof, numOfPublicInputs) + ).slice(numOfPublicInputs); + + // TODO: perhaps we should put this in the init function. Need to benchmark + // TODO how long it takes. + await this.api.acirInitVerificationKey(this.acirComposer); + + // Note: If you don't init verification key, `acirSerializeVerificationKeyIntoFields`` will just hang on serialization + const vk = await this.api.acirSerializeVerificationKeyIntoFields(this.acirComposer); + + return { + proofAsFields: proofAsFields.map((p) => p.toString()), + vkAsFields: vk[0].map((vk) => vk.toString()), + vkHash: vk[1].toString(), + }; + } +} diff --git a/tooling/noir_js_backend_barretenberg/src/index.ts b/tooling/noir_js_backend_barretenberg/src/index.ts index bfdf1005a93..f28abb9a658 100644 --- a/tooling/noir_js_backend_barretenberg/src/index.ts +++ b/tooling/noir_js_backend_barretenberg/src/index.ts @@ -1,150 +1,7 @@ -import { decompressSync as gunzip } from 'fflate'; -import { acirToUint8Array } from './serialize.js'; -import { Backend, CompiledCircuit, ProofData } from '@noir-lang/types'; -import { BackendOptions } from './types.js'; -import { deflattenPublicInputs, flattenPublicInputsAsArray } from './public_inputs.js'; -import { type Barretenberg } from '@aztec/bb.js'; - +export { BarretenbergBackend } from './backend.js'; +export { BarretenbergVerifier } from './verifier.js'; export { publicInputsToWitnessMap } from './public_inputs.js'; -// This is the number of bytes in a UltraPlonk proof -// minus the public inputs. -const numBytesInProofWithoutPublicInputs: number = 2144; - -export class BarretenbergBackend implements Backend { - // These type assertions are used so that we don't - // have to initialize `api` and `acirComposer` in the constructor. - // These are initialized asynchronously in the `init` function, - // constructors cannot be asynchronous which is why we do this. - - private api!: Barretenberg; - // eslint-disable-next-line @typescript-eslint/no-explicit-any - private acirComposer: any; - private acirUncompressedBytecode: Uint8Array; - - constructor( - acirCircuit: CompiledCircuit, - private options: BackendOptions = { threads: 1 }, - ) { - const acirBytecodeBase64 = acirCircuit.bytecode; - this.acirUncompressedBytecode = acirToUint8Array(acirBytecodeBase64); - } - - /** @ignore */ - async instantiate(): Promise { - if (!this.api) { - if (typeof navigator !== 'undefined' && navigator.hardwareConcurrency) { - this.options.threads = navigator.hardwareConcurrency; - } else { - try { - const os = await import('os'); - this.options.threads = os.cpus().length; - } catch (e) { - console.log('Could not detect environment. Falling back to one thread.', e); - } - } - const { Barretenberg, RawBuffer, Crs } = await import('@aztec/bb.js'); - const api = await Barretenberg.new(this.options); - const [_exact, _total, subgroupSize] = await api.acirGetCircuitSizes(this.acirUncompressedBytecode); - const crs = await Crs.new(subgroupSize + 1); - await api.commonInitSlabAllocator(subgroupSize); - await api.srsInitSrs(new RawBuffer(crs.getG1Data()), crs.numPoints, new RawBuffer(crs.getG2Data())); - - this.acirComposer = await api.acirNewAcirComposer(subgroupSize); - await api.acirInitProvingKey(this.acirComposer, this.acirUncompressedBytecode); - this.api = api; - } - } - - /** @description Generates a proof */ - async generateProof(compressedWitness: Uint8Array): Promise { - await this.instantiate(); - // TODO: Change once `@aztec/bb.js` version is updated to use methods without isRecursive flag - const proofWithPublicInputs = await this.api.acirCreateProof( - this.acirComposer, - this.acirUncompressedBytecode, - gunzip(compressedWitness), - ); - - const splitIndex = proofWithPublicInputs.length - numBytesInProofWithoutPublicInputs; - - const publicInputsConcatenated = proofWithPublicInputs.slice(0, splitIndex); - const proof = proofWithPublicInputs.slice(splitIndex); - const publicInputs = deflattenPublicInputs(publicInputsConcatenated); - - return { proof, publicInputs }; - } - - /** - * Generates artifacts that will be passed to a circuit that will verify this proof. - * - * Instead of passing the proof and verification key as a byte array, we pass them - * as fields which makes it cheaper to verify in a circuit. - * - * The proof that is passed here will have been created using a circuit - * that has the #[recursive] attribute on its `main` method. - * - * The number of public inputs denotes how many public inputs are in the inner proof. - * - * @example - * ```typescript - * const artifacts = await backend.generateRecursiveProofArtifacts(proof, numOfPublicInputs); - * ``` - */ - async generateRecursiveProofArtifacts( - proofData: ProofData, - numOfPublicInputs = 0, - ): Promise<{ - proofAsFields: string[]; - vkAsFields: string[]; - vkHash: string; - }> { - await this.instantiate(); - const proof = reconstructProofWithPublicInputs(proofData); - const proofAsFields = ( - await this.api.acirSerializeProofIntoFields(this.acirComposer, proof, numOfPublicInputs) - ).slice(numOfPublicInputs); - - // TODO: perhaps we should put this in the init function. Need to benchmark - // TODO how long it takes. - await this.api.acirInitVerificationKey(this.acirComposer); - - // Note: If you don't init verification key, `acirSerializeVerificationKeyIntoFields`` will just hang on serialization - const vk = await this.api.acirSerializeVerificationKeyIntoFields(this.acirComposer); - - return { - proofAsFields: proofAsFields.map((p) => p.toString()), - vkAsFields: vk[0].map((vk) => vk.toString()), - vkHash: vk[1].toString(), - }; - } - - /** @description Verifies a proof */ - async verifyProof(proofData: ProofData): Promise { - const proof = reconstructProofWithPublicInputs(proofData); - await this.instantiate(); - await this.api.acirInitVerificationKey(this.acirComposer); - // TODO: Change once `@aztec/bb.js` version is updated to use methods without isRecursive flag - return await this.api.acirVerifyProof(this.acirComposer, proof); - } - - async destroy(): Promise { - if (!this.api) { - return; - } - await this.api.destroy(); - } -} - -function reconstructProofWithPublicInputs(proofData: ProofData): Uint8Array { - // Flatten publicInputs - const publicInputsConcatenated = flattenPublicInputsAsArray(proofData.publicInputs); - - // Concatenate publicInputs and proof - const proofWithPublicInputs = Uint8Array.from([...publicInputsConcatenated, ...proofData.proof]); - - return proofWithPublicInputs; -} - // typedoc exports -export { Backend, BackendOptions, CompiledCircuit, ProofData }; +export { Backend, CompiledCircuit, ProofData } from '@noir-lang/types'; +export { BackendOptions } from './types.js'; diff --git a/tooling/noir_js_backend_barretenberg/src/verifier.ts b/tooling/noir_js_backend_barretenberg/src/verifier.ts new file mode 100644 index 00000000000..fe9fa9cfffd --- /dev/null +++ b/tooling/noir_js_backend_barretenberg/src/verifier.ts @@ -0,0 +1,78 @@ +import { ProofData } from '@noir-lang/types'; +import { BackendOptions } from './types.js'; +import { flattenPublicInputsAsArray } from './public_inputs.js'; +import { type Barretenberg } from '@aztec/bb.js'; + +export class BarretenbergVerifier { + // These type assertions are used so that we don't + // have to initialize `api` and `acirComposer` in the constructor. + // These are initialized asynchronously in the `init` function, + // constructors cannot be asynchronous which is why we do this. + + private api!: Barretenberg; + // eslint-disable-next-line @typescript-eslint/no-explicit-any + private acirComposer: any; + + constructor(private options: BackendOptions = { threads: 1 }) {} + + /** @ignore */ + async instantiate(): Promise { + if (!this.api) { + if (typeof navigator !== 'undefined' && navigator.hardwareConcurrency) { + this.options.threads = navigator.hardwareConcurrency; + } else { + try { + const os = await import('os'); + this.options.threads = os.cpus().length; + } catch (e) { + console.log('Could not detect environment. Falling back to one thread.', e); + } + } + const { Barretenberg, RawBuffer, Crs } = await import('@aztec/bb.js'); + + // This is the number of CRS points necessary to verify a Barretenberg proof. + const NUM_CRS_POINTS_FOR_VERIFICATION: number = 0; + const [api, crs] = await Promise.all([Barretenberg.new(this.options), Crs.new(NUM_CRS_POINTS_FOR_VERIFICATION)]); + + await api.commonInitSlabAllocator(NUM_CRS_POINTS_FOR_VERIFICATION); + await api.srsInitSrs( + new RawBuffer([] /* crs.getG1Data() */), + NUM_CRS_POINTS_FOR_VERIFICATION, + new RawBuffer(crs.getG2Data()), + ); + + this.acirComposer = await api.acirNewAcirComposer(NUM_CRS_POINTS_FOR_VERIFICATION); + this.api = api; + } + } + + /** @description Verifies a proof */ + async verifyProof(proofData: ProofData, verificationKey: Uint8Array): Promise { + const { RawBuffer } = await import('@aztec/bb.js'); + + await this.instantiate(); + // The verifier can be used for a variety of ACIR programs so we should not assume that it + // is preloaded with the correct verification key. + await this.api.acirLoadVerificationKey(this.acirComposer, new RawBuffer(verificationKey)); + + const proof = reconstructProofWithPublicInputs(proofData); + return await this.api.acirVerifyProof(this.acirComposer, proof); + } + + async destroy(): Promise { + if (!this.api) { + return; + } + await this.api.destroy(); + } +} + +export function reconstructProofWithPublicInputs(proofData: ProofData): Uint8Array { + // Flatten publicInputs + const publicInputsConcatenated = flattenPublicInputsAsArray(proofData.publicInputs); + + // Concatenate publicInputs and proof + const proofWithPublicInputs = Uint8Array.from([...publicInputsConcatenated, ...proofData.proof]); + + return proofWithPublicInputs; +} diff --git a/tooling/noir_js_types/src/types.ts b/tooling/noir_js_types/src/types.ts index 3a62d79a807..456e5a57f40 100644 --- a/tooling/noir_js_types/src/types.ts +++ b/tooling/noir_js_types/src/types.ts @@ -29,7 +29,17 @@ export type Abi = { return_witnesses: number[]; }; -export interface Backend { +export interface VerifierBackend { + /** + * @description Verifies a proof */ + verifyProof(proofData: ProofData): Promise; + + /** + * @description Destroys the backend */ + destroy(): Promise; +} + +export interface Backend extends VerifierBackend { /** * @description Generates a proof */ generateProof(decompressedWitness: Uint8Array): Promise; @@ -49,14 +59,6 @@ export interface Backend { /** @description A Field containing the verification key hash */ vkHash: string; }>; - - /** - * @description Verifies a proof */ - verifyProof(proofData: ProofData): Promise; - - /** - * @description Destroys the backend */ - destroy(): Promise; } /** diff --git a/tooling/noirc_abi_wasm/build.sh b/tooling/noirc_abi_wasm/build.sh index 4486a214c9c..58724dee02c 100755 --- a/tooling/noirc_abi_wasm/build.sh +++ b/tooling/noirc_abi_wasm/build.sh @@ -25,7 +25,6 @@ function run_if_available { require_command jq require_command cargo require_command wasm-bindgen -require_command wasm-opt self_path=$(dirname "$(readlink -f "$0")") pname=$(cargo read-manifest | jq -r '.name') @@ -49,5 +48,5 @@ BROWSER_WASM=${BROWSER_DIR}/${pname}_bg.wasm run_or_fail cargo build --lib --release --target $TARGET --package ${pname} run_or_fail wasm-bindgen $WASM_BINARY --out-dir $NODE_DIR --typescript --target nodejs run_or_fail wasm-bindgen $WASM_BINARY --out-dir $BROWSER_DIR --typescript --target web -run_or_fail wasm-opt $NODE_WASM -o $NODE_WASM -O -run_or_fail wasm-opt $BROWSER_WASM -o $BROWSER_WASM -O +run_if_available wasm-opt $NODE_WASM -o $NODE_WASM -O +run_if_available wasm-opt $BROWSER_WASM -o $BROWSER_WASM -O diff --git a/tooling/noirc_abi_wasm/src/lib.rs b/tooling/noirc_abi_wasm/src/lib.rs index ce15f6d502e..fad5abaebba 100644 --- a/tooling/noirc_abi_wasm/src/lib.rs +++ b/tooling/noirc_abi_wasm/src/lib.rs @@ -5,7 +5,7 @@ // See Cargo.toml for explanation. use getrandom as _; -use acvm::acir::native_types::WitnessMap; +use acvm::acir::native_types::{WitnessMap, WitnessStack}; use iter_extended::try_btree_map; use noirc_abi::{ errors::InputParserError, @@ -113,3 +113,12 @@ pub fn abi_decode(abi: JsAbi, witness_map: JsWitnessMap) -> Result::from_serde(&return_struct) .map_err(|err| err.to_string().into()) } + +#[wasm_bindgen(js_name = serializeWitness)] +pub fn serialise_witness(witness_map: JsWitnessMap) -> Result, JsAbiError> { + console_error_panic_hook::set_once(); + let converted_witness: WitnessMap = witness_map.into(); + let witness_stack: WitnessStack = converted_witness.into(); + let output = witness_stack.try_into(); + output.map_err(|_| JsAbiError::new("Failed to convert to Vec".to_string())) +} diff --git a/yarn.lock b/yarn.lock index 38e13814929..b45678f5d8b 100644 --- a/yarn.lock +++ b/yarn.lock @@ -221,19 +221,18 @@ __metadata: languageName: node linkType: hard -"@aztec/bb.js@npm:0.34.0": - version: 0.34.0 - resolution: "@aztec/bb.js@npm:0.34.0" +"@aztec/bb.js@portal:../../../../barretenberg/ts::locator=%40noir-lang%2Fbackend_barretenberg%40workspace%3Atooling%2Fnoir_js_backend_barretenberg": + version: 0.0.0-use.local + resolution: "@aztec/bb.js@portal:../../../../barretenberg/ts::locator=%40noir-lang%2Fbackend_barretenberg%40workspace%3Atooling%2Fnoir_js_backend_barretenberg" dependencies: comlink: ^4.4.1 commander: ^10.0.1 debug: ^4.3.4 tslib: ^2.4.0 bin: - bb.js: dest/node/main.js - checksum: 9d07834d81ed19e4d6fd5c1f3b07c565648df1165c30115f020ece9660b2b8599a5ed894a2090410f14020e73dd290484b30b76c9c71e863b8390fa2b7c1b729 + bb.js: ./dest/node/main.js languageName: node - linkType: hard + linkType: soft "@babel/code-frame@npm:^7.0.0, @babel/code-frame@npm:^7.10.4, @babel/code-frame@npm:^7.12.11, @babel/code-frame@npm:^7.16.0, @babel/code-frame@npm:^7.22.13, @babel/code-frame@npm:^7.23.5, @babel/code-frame@npm:^7.8.3": version: 7.23.5 @@ -4396,7 +4395,7 @@ __metadata: version: 0.0.0-use.local resolution: "@noir-lang/backend_barretenberg@workspace:tooling/noir_js_backend_barretenberg" dependencies: - "@aztec/bb.js": 0.34.0 + "@aztec/bb.js": "portal:../../../../barretenberg/ts" "@noir-lang/types": "workspace:*" "@types/node": ^20.6.2 "@types/prettier": ^3 From eeb5b8ce13beffa3d7da52f8af879a9d66f6eb02 Mon Sep 17 00:00:00 2001 From: vezenovm Date: Mon, 15 Apr 2024 14:08:17 +0000 Subject: [PATCH 3/7] yarn --- tooling/bb_abstraction_leaks/build.rs | 2 +- tooling/noir_js_backend_barretenberg/package.json | 4 ++-- yarn.lock | 13 +++++++------ 3 files changed, 10 insertions(+), 9 deletions(-) diff --git a/tooling/bb_abstraction_leaks/build.rs b/tooling/bb_abstraction_leaks/build.rs index e055d7a3a5f..0f9770c805d 100644 --- a/tooling/bb_abstraction_leaks/build.rs +++ b/tooling/bb_abstraction_leaks/build.rs @@ -10,7 +10,7 @@ use const_format::formatcp; const USERNAME: &str = "AztecProtocol"; const REPO: &str = "aztec-packages"; -const VERSION: &str = "0.33.0"; +const VERSION: &str = "0.34.0"; const TAG: &str = formatcp!("aztec-packages-v{}", VERSION); const API_URL: &str = diff --git a/tooling/noir_js_backend_barretenberg/package.json b/tooling/noir_js_backend_barretenberg/package.json index fefd2f6f8d9..98bfdf1c3a8 100644 --- a/tooling/noir_js_backend_barretenberg/package.json +++ b/tooling/noir_js_backend_barretenberg/package.json @@ -42,7 +42,7 @@ "lint": "NODE_NO_WARNINGS=1 eslint . --ext .ts --ignore-path ./.eslintignore --max-warnings 0" }, "dependencies": { - "@aztec/bb.js": "portal:../../../../barretenberg/ts", + "@aztec/bb.js": "0.34.0", "@noir-lang/types": "workspace:*", "fflate": "^0.8.0" }, @@ -57,4 +57,4 @@ "ts-node": "^10.9.1", "typescript": "5.4.2" } -} \ No newline at end of file +} diff --git a/yarn.lock b/yarn.lock index b45678f5d8b..38e13814929 100644 --- a/yarn.lock +++ b/yarn.lock @@ -221,18 +221,19 @@ __metadata: languageName: node linkType: hard -"@aztec/bb.js@portal:../../../../barretenberg/ts::locator=%40noir-lang%2Fbackend_barretenberg%40workspace%3Atooling%2Fnoir_js_backend_barretenberg": - version: 0.0.0-use.local - resolution: "@aztec/bb.js@portal:../../../../barretenberg/ts::locator=%40noir-lang%2Fbackend_barretenberg%40workspace%3Atooling%2Fnoir_js_backend_barretenberg" +"@aztec/bb.js@npm:0.34.0": + version: 0.34.0 + resolution: "@aztec/bb.js@npm:0.34.0" dependencies: comlink: ^4.4.1 commander: ^10.0.1 debug: ^4.3.4 tslib: ^2.4.0 bin: - bb.js: ./dest/node/main.js + bb.js: dest/node/main.js + checksum: 9d07834d81ed19e4d6fd5c1f3b07c565648df1165c30115f020ece9660b2b8599a5ed894a2090410f14020e73dd290484b30b76c9c71e863b8390fa2b7c1b729 languageName: node - linkType: soft + linkType: hard "@babel/code-frame@npm:^7.0.0, @babel/code-frame@npm:^7.10.4, @babel/code-frame@npm:^7.12.11, @babel/code-frame@npm:^7.16.0, @babel/code-frame@npm:^7.22.13, @babel/code-frame@npm:^7.23.5, @babel/code-frame@npm:^7.8.3": version: 7.23.5 @@ -4395,7 +4396,7 @@ __metadata: version: 0.0.0-use.local resolution: "@noir-lang/backend_barretenberg@workspace:tooling/noir_js_backend_barretenberg" dependencies: - "@aztec/bb.js": "portal:../../../../barretenberg/ts" + "@aztec/bb.js": 0.34.0 "@noir-lang/types": "workspace:*" "@types/node": ^20.6.2 "@types/prettier": ^3 From ec175a1cd740a979328c20671a3ff2388df1f286 Mon Sep 17 00:00:00 2001 From: vezenovm Date: Mon, 15 Apr 2024 14:09:58 +0000 Subject: [PATCH 4/7] bring back arrayget arrayset not pure reversion --- .../src/ssa/function_builder/mod.rs | 6 +++ .../noirc_evaluator/src/ssa/ir/instruction.rs | 8 ++- .../src/ssa/opt/constant_folding.rs | 51 +++++++++++++++++++ 3 files changed, 64 insertions(+), 1 deletion(-) diff --git a/compiler/noirc_evaluator/src/ssa/function_builder/mod.rs b/compiler/noirc_evaluator/src/ssa/function_builder/mod.rs index d3e5e506111..75a427397b6 100644 --- a/compiler/noirc_evaluator/src/ssa/function_builder/mod.rs +++ b/compiler/noirc_evaluator/src/ssa/function_builder/mod.rs @@ -326,6 +326,12 @@ impl FunctionBuilder { self.insert_instruction(Instruction::DecrementRc { value }, None); } + /// Insert an enable_side_effects_if instruction. These are normally only automatically + /// inserted during the flattening pass when branching is removed. + pub(crate) fn insert_enable_side_effects_if(&mut self, condition: ValueId) { + self.insert_instruction(Instruction::EnableSideEffects { condition }, None); + } + /// Terminates the current block with the given terminator instruction /// if the current block does not already have a terminator instruction. fn terminate_block_with(&mut self, terminator: TerminatorInstruction) { diff --git a/compiler/noirc_evaluator/src/ssa/ir/instruction.rs b/compiler/noirc_evaluator/src/ssa/ir/instruction.rs index 2b23cc1c1e8..641d971af3c 100644 --- a/compiler/noirc_evaluator/src/ssa/ir/instruction.rs +++ b/compiler/noirc_evaluator/src/ssa/ir/instruction.rs @@ -254,7 +254,7 @@ impl Instruction { // In ACIR, a division with a false predicate outputs (0,0), so it cannot replace another instruction unless they have the same predicate bin.operator != BinaryOp::Div } - Cast(_, _) | Truncate { .. } | Not(_) | ArrayGet { .. } | ArraySet { .. } => true, + Cast(_, _) | Truncate { .. } | Not(_) => true, // These either have side-effects or interact with memory Constrain(..) @@ -266,6 +266,12 @@ impl Instruction { | DecrementRc { .. } | RangeCheck { .. } => false, + // These can have different behavior depending on the EnableSideEffectsIf context. + // Enabling constant folding for these potentially enables replacing an enabled + // array get with one that was disabled. See + // https://github.com/noir-lang/noir/pull/4716#issuecomment-2047846328. + ArrayGet { .. } | ArraySet { .. } => false, + Call { func, .. } => match dfg[*func] { Value::Intrinsic(intrinsic) => !intrinsic.has_side_effects(), _ => false, diff --git a/compiler/noirc_evaluator/src/ssa/opt/constant_folding.rs b/compiler/noirc_evaluator/src/ssa/opt/constant_folding.rs index 6cac8c91bc3..5a7134f3486 100644 --- a/compiler/noirc_evaluator/src/ssa/opt/constant_folding.rs +++ b/compiler/noirc_evaluator/src/ssa/opt/constant_folding.rs @@ -607,4 +607,55 @@ mod test { assert_eq!(main.dfg[instructions[4]], Instruction::Constrain(v1, v_true, None)); assert_eq!(main.dfg[instructions[5]], Instruction::Constrain(v2, v_false, None)); } + + // Regression for #4600 + #[test] + fn array_get_regression() { + // fn main f0 { + // b0(v0: u1, v1: u64): + // enable_side_effects_if v0 + // v2 = array_get [Field 0, Field 1], index v1 + // v3 = not v0 + // enable_side_effects_if v3 + // v4 = array_get [Field 0, Field 1], index v1 + // } + // + // We want to make sure after constant folding both array_gets remain since they are + // under different enable_side_effects_if contexts and thus one may be disabled while + // the other is not. If one is removed, it is possible e.g. v4 is replaced with v2 which + // is disabled (only gets from index 0) and thus returns the wrong result. + let main_id = Id::test_new(0); + + // Compiling main + let mut builder = FunctionBuilder::new("main".into(), main_id); + let v0 = builder.add_parameter(Type::bool()); + let v1 = builder.add_parameter(Type::unsigned(64)); + + builder.insert_enable_side_effects_if(v0); + + let zero = builder.field_constant(0u128); + let one = builder.field_constant(1u128); + + let typ = Type::Array(Rc::new(vec![Type::field()]), 2); + let array = builder.array_constant(vec![zero, one].into(), typ); + + let _v2 = builder.insert_array_get(array, v1, Type::field()); + let v3 = builder.insert_not(v0); + + builder.insert_enable_side_effects_if(v3); + let _v4 = builder.insert_array_get(array, v1, Type::field()); + + // Expected output is unchanged + let ssa = builder.finish(); + let main = ssa.main(); + let instructions = main.dfg[main.entry_block()].instructions(); + let starting_instruction_count = instructions.len(); + assert_eq!(starting_instruction_count, 5); + + let ssa = ssa.fold_constants(); + let main = ssa.main(); + let instructions = main.dfg[main.entry_block()].instructions(); + let ending_instruction_count = instructions.len(); + assert_eq!(starting_instruction_count, ending_instruction_count); + } } From 6e4e26a74e2d65b3084045ee22623d9ed45727f2 Mon Sep 17 00:00:00 2001 From: AztecBot Date: Mon, 15 Apr 2024 14:28:42 +0000 Subject: [PATCH 5/7] fix: primary_message typo in errors.rs (https://github.com/AztecProtocol/aztec-packages/pull/5646) --- .aztec-sync-commit | 2 +- acvm-repo/acvm_js/build.sh | 5 +- aztec_macros/src/utils/errors.rs | 2 +- noir_stdlib/src/test.nr | 7 + .../execution_success/mock_oracle/Prover.toml | 2 - .../execution_success/mock_oracle/src/main.nr | 27 ---- .../mock_oracle/Nargo.toml | 3 +- .../noir_test_success/mock_oracle/Prover.toml | 0 .../noir_test_success/mock_oracle/src/main.nr | 130 +++++++++++++++ tooling/acvm_cli/src/cli/execute_cmd.rs | 7 +- tooling/acvm_cli/src/cli/fs/witness.rs | 47 ++++-- tooling/acvm_cli/src/errors.rs | 3 - tooling/nargo/src/ops/foreign_calls.rs | 38 ++++- tooling/nargo_fmt/build.rs | 51 ++++-- tooling/noir_js/test/node/e2e.test.ts | 24 ++- .../noir_js_backend_barretenberg/package.json | 4 +- .../src/backend.ts | 143 +++++++++++++++++ .../noir_js_backend_barretenberg/src/index.ts | 151 +----------------- .../src/verifier.ts | 78 +++++++++ tooling/noir_js_types/src/types.ts | 20 +-- tooling/noirc_abi_wasm/build.sh | 5 +- tooling/noirc_abi_wasm/src/lib.rs | 11 +- yarn.lock | 13 +- 23 files changed, 531 insertions(+), 242 deletions(-) delete mode 100644 test_programs/execution_success/mock_oracle/Prover.toml delete mode 100644 test_programs/execution_success/mock_oracle/src/main.nr rename test_programs/{execution_success => noir_test_success}/mock_oracle/Nargo.toml (57%) create mode 100644 test_programs/noir_test_success/mock_oracle/Prover.toml create mode 100644 test_programs/noir_test_success/mock_oracle/src/main.nr create mode 100644 tooling/noir_js_backend_barretenberg/src/backend.ts create mode 100644 tooling/noir_js_backend_barretenberg/src/verifier.ts diff --git a/.aztec-sync-commit b/.aztec-sync-commit index 9ebd1dcccac..f99c7ec7e49 100644 --- a/.aztec-sync-commit +++ b/.aztec-sync-commit @@ -1 +1 @@ -10d9ad99200a5897417ff5669763ead4e38d87fa +1dfbe7bc3bf3c455d8fb6c8b5fe6a96c1edf7af9 diff --git a/acvm-repo/acvm_js/build.sh b/acvm-repo/acvm_js/build.sh index 4486a214c9c..58724dee02c 100755 --- a/acvm-repo/acvm_js/build.sh +++ b/acvm-repo/acvm_js/build.sh @@ -25,7 +25,6 @@ function run_if_available { require_command jq require_command cargo require_command wasm-bindgen -require_command wasm-opt self_path=$(dirname "$(readlink -f "$0")") pname=$(cargo read-manifest | jq -r '.name') @@ -49,5 +48,5 @@ BROWSER_WASM=${BROWSER_DIR}/${pname}_bg.wasm run_or_fail cargo build --lib --release --target $TARGET --package ${pname} run_or_fail wasm-bindgen $WASM_BINARY --out-dir $NODE_DIR --typescript --target nodejs run_or_fail wasm-bindgen $WASM_BINARY --out-dir $BROWSER_DIR --typescript --target web -run_or_fail wasm-opt $NODE_WASM -o $NODE_WASM -O -run_or_fail wasm-opt $BROWSER_WASM -o $BROWSER_WASM -O +run_if_available wasm-opt $NODE_WASM -o $NODE_WASM -O +run_if_available wasm-opt $BROWSER_WASM -o $BROWSER_WASM -O diff --git a/aztec_macros/src/utils/errors.rs b/aztec_macros/src/utils/errors.rs index 5d3a61a51dc..52ee5587559 100644 --- a/aztec_macros/src/utils/errors.rs +++ b/aztec_macros/src/utils/errors.rs @@ -23,7 +23,7 @@ impl From for MacroError { fn from(err: AztecMacroError) -> Self { match err { AztecMacroError::AztecDepNotFound {} => MacroError { - primary_message: "Aztec dependency not found. Please add aztec as a dependency in your Cargo.toml. For more information go to https://docs.aztec.network/developers/debugging/aztecnr-errors#aztec-dependency-not-found-please-add-aztec-as-a-dependency-in-your-nargotoml".to_owned(), + primary_message: "Aztec dependency not found. Please add aztec as a dependency in your Nargo.toml. For more information go to https://docs.aztec.network/developers/debugging/aztecnr-errors#aztec-dependency-not-found-please-add-aztec-as-a-dependency-in-your-nargotoml".to_owned(), secondary_message: None, span: None, }, diff --git a/noir_stdlib/src/test.nr b/noir_stdlib/src/test.nr index e1c320215de..e6a7e03fefc 100644 --- a/noir_stdlib/src/test.nr +++ b/noir_stdlib/src/test.nr @@ -4,6 +4,9 @@ unconstrained fn create_mock_oracle(name: str) -> Field {} #[oracle(set_mock_params)] unconstrained fn set_mock_params_oracle

(id: Field, params: P) {} +#[oracle(get_mock_last_params)] +unconstrained fn get_mock_last_params_oracle

(id: Field) -> P {} + #[oracle(set_mock_returns)] unconstrained fn set_mock_returns_oracle(id: Field, returns: R) {} @@ -27,6 +30,10 @@ impl OracleMock { self } + unconstrained pub fn get_last_params

(self) -> P { + get_mock_last_params_oracle(self.id) + } + unconstrained pub fn returns(self, returns: R) -> Self { set_mock_returns_oracle(self.id, returns); self diff --git a/test_programs/execution_success/mock_oracle/Prover.toml b/test_programs/execution_success/mock_oracle/Prover.toml deleted file mode 100644 index 2b26a4ce471..00000000000 --- a/test_programs/execution_success/mock_oracle/Prover.toml +++ /dev/null @@ -1,2 +0,0 @@ -x = "10" - diff --git a/test_programs/execution_success/mock_oracle/src/main.nr b/test_programs/execution_success/mock_oracle/src/main.nr deleted file mode 100644 index 90fca7993cc..00000000000 --- a/test_programs/execution_success/mock_oracle/src/main.nr +++ /dev/null @@ -1,27 +0,0 @@ -use dep::std::test::OracleMock; - -struct Point { - x: Field, - y: Field, -} - -#[oracle(foo)] -unconstrained fn foo_oracle(_point: Point, _array: [Field; 4]) -> Field {} - -unconstrained fn main() { - let array = [1, 2, 3, 4]; - let another_array = [4, 3, 2, 1]; - let point = Point { x: 14, y: 27 }; - - OracleMock::mock("foo").returns(42).times(1); - let mock = OracleMock::mock("foo").returns(0); - assert_eq(42, foo_oracle(point, array)); - assert_eq(0, foo_oracle(point, array)); - mock.clear(); - - OracleMock::mock("foo").with_params((point, array)).returns(10); - OracleMock::mock("foo").with_params((point, another_array)).returns(20); - assert_eq(10, foo_oracle(point, array)); - assert_eq(20, foo_oracle(point, another_array)); -} - diff --git a/test_programs/execution_success/mock_oracle/Nargo.toml b/test_programs/noir_test_success/mock_oracle/Nargo.toml similarity index 57% rename from test_programs/execution_success/mock_oracle/Nargo.toml rename to test_programs/noir_test_success/mock_oracle/Nargo.toml index b2916487e8c..428e965899c 100644 --- a/test_programs/execution_success/mock_oracle/Nargo.toml +++ b/test_programs/noir_test_success/mock_oracle/Nargo.toml @@ -2,5 +2,6 @@ name = "mock_oracle" type = "bin" authors = [""] +compiler_version = ">=0.23.0" -[dependencies] +[dependencies] \ No newline at end of file diff --git a/test_programs/noir_test_success/mock_oracle/Prover.toml b/test_programs/noir_test_success/mock_oracle/Prover.toml new file mode 100644 index 00000000000..e69de29bb2d diff --git a/test_programs/noir_test_success/mock_oracle/src/main.nr b/test_programs/noir_test_success/mock_oracle/src/main.nr new file mode 100644 index 00000000000..d840ffaef66 --- /dev/null +++ b/test_programs/noir_test_success/mock_oracle/src/main.nr @@ -0,0 +1,130 @@ +use dep::std::test::OracleMock; + +struct Point { + x: Field, + y: Field, +} + +impl Eq for Point { + fn eq(self, other: Point) -> bool { + (self.x == other.x) & (self.y == other.y) + } +} + +#[oracle(void_field)] +unconstrained fn void_field_oracle() -> Field {} + +unconstrained fn void_field() -> Field { + void_field_oracle() +} + +#[oracle(field_field)] +unconstrained fn field_field_oracle(_x: Field) -> Field {} + +unconstrained fn field_field(x: Field) -> Field { + field_field_oracle(x) +} + +#[oracle(struct_field)] +unconstrained fn struct_field_oracle(_point: Point, _array: [Field; 4]) -> Field {} + +unconstrained fn struct_field(point: Point, array: [Field; 4]) -> Field { + struct_field_oracle(point, array) +} + +#[test(should_fail)] +fn test_mock_no_returns() { + OracleMock::mock("void_field"); + void_field(); // Some return value must be set +} + +#[test] +fn test_mock() { + OracleMock::mock("void_field").returns(10); + assert_eq(void_field(), 10); +} + +#[test] +fn test_multiple_mock() { + let first_mock = OracleMock::mock("void_field").returns(10); + OracleMock::mock("void_field").returns(42); + + // The mocks are searched for in creation order, so the first one prevents the second from being called. + assert_eq(void_field(), 10); + + first_mock.clear(); + assert_eq(void_field(), 42); +} + +#[test] +fn test_multiple_mock_times() { + OracleMock::mock("void_field").returns(10).times(2); + OracleMock::mock("void_field").returns(42); + + assert_eq(void_field(), 10); + assert_eq(void_field(), 10); + assert_eq(void_field(), 42); +} + +#[test] +fn test_mock_with_params() { + OracleMock::mock("field_field").with_params((5,)).returns(10); + assert_eq(field_field(5), 10); +} + +#[test] +fn test_multiple_mock_with_params() { + OracleMock::mock("field_field").with_params((5,)).returns(10); + OracleMock::mock("field_field").with_params((7,)).returns(14); + + assert_eq(field_field(5), 10); + assert_eq(field_field(7), 14); +} + +#[test] +fn test_mock_last_params() { + let mock = OracleMock::mock("field_field").returns(10); + assert_eq(field_field(5), 10); + + assert_eq(mock.get_last_params(), 5); +} + +#[test] +fn test_mock_last_params_many_calls() { + let mock = OracleMock::mock("field_field").returns(10); + assert_eq(field_field(5), 10); + assert_eq(field_field(7), 10); + + assert_eq(mock.get_last_params(), 7); +} + +#[test] +fn test_mock_struct_field() { + // Combination of simpler test cases + + let array = [1, 2, 3, 4]; + let another_array = [4, 3, 2, 1]; + let point = Point { x: 14, y: 27 }; + + OracleMock::mock("struct_field").returns(42).times(2); + let timeless_mock = OracleMock::mock("struct_field").returns(0); + + assert_eq(42, struct_field(point, array)); + assert_eq(42, struct_field(point, array)); + // The times(2) mock is now cleared + + assert_eq(0, struct_field(point, array)); + + let last_params: (Point, [Field; 4]) = timeless_mock.get_last_params(); + assert_eq(last_params.0, point); + assert_eq(last_params.1, array); + + // We clear the mock with no times() to allow other mocks to be callable + timeless_mock.clear(); + + OracleMock::mock("struct_field").with_params((point, array)).returns(10); + OracleMock::mock("struct_field").with_params((point, another_array)).returns(20); + assert_eq(10, struct_field(point, array)); + assert_eq(20, struct_field(point, another_array)); +} + diff --git a/tooling/acvm_cli/src/cli/execute_cmd.rs b/tooling/acvm_cli/src/cli/execute_cmd.rs index 86e7277451f..4e36dbd1f22 100644 --- a/tooling/acvm_cli/src/cli/execute_cmd.rs +++ b/tooling/acvm_cli/src/cli/execute_cmd.rs @@ -6,11 +6,10 @@ use bn254_blackbox_solver::Bn254BlackBoxSolver; use clap::Args; use crate::cli::fs::inputs::{read_bytecode_from_file, read_inputs_from_file}; -use crate::cli::fs::witness::save_witness_to_dir; use crate::errors::CliError; use nargo::ops::{execute_program, DefaultForeignCallExecutor}; -use super::fs::witness::create_output_witness_string; +use super::fs::witness::{create_output_witness_string, save_witness_to_dir}; /// Executes a circuit to calculate its return value #[derive(Debug, Clone, Args)] @@ -46,9 +45,9 @@ fn run_command(args: ExecuteCommand) -> Result { )?; if args.output_witness.is_some() { save_witness_to_dir( - &output_witness_string, - &args.working_directory, + output_witness, &args.output_witness.unwrap(), + &args.working_directory, )?; } Ok(output_witness_string) diff --git a/tooling/acvm_cli/src/cli/fs/witness.rs b/tooling/acvm_cli/src/cli/fs/witness.rs index 2daaa5a3a58..30ef4278f4b 100644 --- a/tooling/acvm_cli/src/cli/fs/witness.rs +++ b/tooling/acvm_cli/src/cli/fs/witness.rs @@ -5,24 +5,29 @@ use std::{ path::{Path, PathBuf}, }; -use acvm::acir::native_types::WitnessMap; +use acvm::acir::native_types::{WitnessMap, WitnessStack}; use crate::errors::{CliError, FilesystemError}; -/// Saves the provided output witnesses to a toml file created at the given location -pub(crate) fn save_witness_to_dir>( - output_witness: &String, - witness_dir: P, - file_name: &String, -) -> Result { - let witness_path = witness_dir.as_ref().join(file_name); +fn create_named_dir(named_dir: &Path, name: &str) -> PathBuf { + std::fs::create_dir_all(named_dir) + .unwrap_or_else(|_| panic!("could not create the `{name}` directory")); + + PathBuf::from(named_dir) +} - let mut file = File::create(&witness_path) - .map_err(|_| FilesystemError::OutputWitnessCreationFailed(file_name.clone()))?; - write!(file, "{}", output_witness) - .map_err(|_| FilesystemError::OutputWitnessWriteFailed(file_name.clone()))?; +fn write_to_file(bytes: &[u8], path: &Path) -> String { + let display = path.display(); - Ok(witness_path) + let mut file = match File::create(path) { + Err(why) => panic!("couldn't create {display}: {why}"), + Ok(file) => file, + }; + + match file.write_all(bytes) { + Err(why) => panic!("couldn't write to {display}: {why}"), + Ok(_) => display.to_string(), + } } /// Creates a toml representation of the provided witness map @@ -34,3 +39,19 @@ pub(crate) fn create_output_witness_string(witnesses: &WitnessMap) -> Result>( + witnesses: WitnessStack, + witness_name: &str, + witness_dir: P, +) -> Result { + create_named_dir(witness_dir.as_ref(), "witness"); + let witness_path = witness_dir.as_ref().join(witness_name).with_extension("gz"); + + let buf: Vec = witnesses + .try_into() + .map_err(|_op| FilesystemError::OutputWitnessCreationFailed(witness_name.to_string()))?; + write_to_file(buf.as_slice(), &witness_path); + + Ok(witness_path) +} diff --git a/tooling/acvm_cli/src/errors.rs b/tooling/acvm_cli/src/errors.rs index 923046410ea..8bc79347159 100644 --- a/tooling/acvm_cli/src/errors.rs +++ b/tooling/acvm_cli/src/errors.rs @@ -20,9 +20,6 @@ pub(crate) enum FilesystemError { #[error(" Error: failed to create output witness file {0}.")] OutputWitnessCreationFailed(String), - - #[error(" Error: failed to write output witness file {0}.")] - OutputWitnessWriteFailed(String), } #[derive(Debug, Error)] diff --git a/tooling/nargo/src/ops/foreign_calls.rs b/tooling/nargo/src/ops/foreign_calls.rs index ea67f17af2a..bc91929e5e7 100644 --- a/tooling/nargo/src/ops/foreign_calls.rs +++ b/tooling/nargo/src/ops/foreign_calls.rs @@ -75,6 +75,7 @@ pub enum ForeignCall { AssertMessage, CreateMock, SetMockParams, + GetMockLastParams, SetMockReturns, SetMockTimes, ClearMock, @@ -93,6 +94,7 @@ impl ForeignCall { ForeignCall::AssertMessage => "assert_message", ForeignCall::CreateMock => "create_mock", ForeignCall::SetMockParams => "set_mock_params", + ForeignCall::GetMockLastParams => "get_mock_last_params", ForeignCall::SetMockReturns => "set_mock_returns", ForeignCall::SetMockTimes => "set_mock_times", ForeignCall::ClearMock => "clear_mock", @@ -105,6 +107,7 @@ impl ForeignCall { "assert_message" => Some(ForeignCall::AssertMessage), "create_mock" => Some(ForeignCall::CreateMock), "set_mock_params" => Some(ForeignCall::SetMockParams), + "get_mock_last_params" => Some(ForeignCall::GetMockLastParams), "set_mock_returns" => Some(ForeignCall::SetMockReturns), "set_mock_times" => Some(ForeignCall::SetMockTimes), "clear_mock" => Some(ForeignCall::ClearMock), @@ -122,6 +125,8 @@ struct MockedCall { name: String, /// Optionally match the parameters params: Option>, + /// The parameters with which the mock was last called + last_called_params: Option>, /// The result to return when this mock is called result: ForeignCallResult, /// How many times should this mock be called before it is removed @@ -134,6 +139,7 @@ impl MockedCall { id, name, params: None, + last_called_params: None, result: ForeignCallResult { values: vec![] }, times_left: None, } @@ -185,7 +191,11 @@ impl DefaultForeignCallExecutor { Ok((id, params)) } - fn find_mock_by_id(&mut self, id: usize) -> Option<&mut MockedCall> { + fn find_mock_by_id(&self, id: usize) -> Option<&MockedCall> { + self.mocked_responses.iter().find(|response| response.id == id) + } + + fn find_mock_by_id_mut(&mut self, id: usize) -> Option<&mut MockedCall> { self.mocked_responses.iter_mut().find(|response| response.id == id) } @@ -250,15 +260,27 @@ impl ForeignCallExecutor for DefaultForeignCallExecutor { } Some(ForeignCall::SetMockParams) => { let (id, params) = Self::extract_mock_id(&foreign_call.inputs)?; - self.find_mock_by_id(id) + self.find_mock_by_id_mut(id) .unwrap_or_else(|| panic!("Unknown mock id {}", id)) .params = Some(params.to_vec()); Ok(ForeignCallResult::default().into()) } + Some(ForeignCall::GetMockLastParams) => { + let (id, _) = Self::extract_mock_id(&foreign_call.inputs)?; + let mock = + self.find_mock_by_id(id).unwrap_or_else(|| panic!("Unknown mock id {}", id)); + + let last_called_params = mock + .last_called_params + .clone() + .unwrap_or_else(|| panic!("Mock {} was never called", mock.name)); + + Ok(last_called_params.into()) + } Some(ForeignCall::SetMockReturns) => { let (id, params) = Self::extract_mock_id(&foreign_call.inputs)?; - self.find_mock_by_id(id) + self.find_mock_by_id_mut(id) .unwrap_or_else(|| panic!("Unknown mock id {}", id)) .result = ForeignCallResult { values: params.to_vec() }; @@ -269,7 +291,7 @@ impl ForeignCallExecutor for DefaultForeignCallExecutor { let times = params[0].unwrap_field().try_to_u64().expect("Invalid bit size of times"); - self.find_mock_by_id(id) + self.find_mock_by_id_mut(id) .unwrap_or_else(|| panic!("Unknown mock id {}", id)) .times_left = Some(times); @@ -292,6 +314,9 @@ impl ForeignCallExecutor for DefaultForeignCallExecutor { .mocked_responses .get_mut(response_position) .expect("Invalid position of mocked response"); + + mock.last_called_params = Some(foreign_call.inputs.clone()); + let result = mock.result.values.clone(); if let Some(times_left) = &mut mock.times_left { @@ -316,7 +341,10 @@ impl ForeignCallExecutor for DefaultForeignCallExecutor { Ok(parsed_response.into()) } - (None, None) => panic!("Unknown foreign call {}", foreign_call_name), + (None, None) => panic!( + "No mock for foreign call {}({:?})", + foreign_call_name, &foreign_call.inputs + ), } } } diff --git a/tooling/nargo_fmt/build.rs b/tooling/nargo_fmt/build.rs index 6f41768c1dc..7d5f07c43bf 100644 --- a/tooling/nargo_fmt/build.rs +++ b/tooling/nargo_fmt/build.rs @@ -49,28 +49,55 @@ fn generate_formatter_tests(test_file: &mut File, test_data_dir: &Path) { let output_source_path = outputs_dir.join(file_name).display().to_string(); let output_source = std::fs::read_to_string(output_source_path.clone()).unwrap(); + let skip_idempotent_test = + // TODO(https://github.com/noir-lang/noir/issues/4766): spurious trailing space + test_name == "array" || + // TODO(https://github.com/noir-lang/noir/issues/4767): pre-comment space + // TODO(https://github.com/noir-lang/noir/issues/4768): spurious newline + test_name == "tuple"; + write!( test_file, r##" -#[test] -fn format_{test_name}() {{ - let input = r#"{input_source}"#; - let expected_output = r#"{output_source}"#; + #[test] + fn format_{test_name}() {{ + let input = r#"{input_source}"#; + let expected_output = r#"{output_source}"#; - let (parsed_module, _errors) = noirc_frontend::parse_program(input); + let (parsed_module, _errors) = noirc_frontend::parse_program(input); - let config = nargo_fmt::Config::of("{config}").unwrap(); - let fmt_text = nargo_fmt::format(input, parsed_module, &config); + let config = nargo_fmt::Config::of("{config}").unwrap(); + let fmt_text = nargo_fmt::format(input, parsed_module, &config); - if std::env::var("UPDATE_EXPECT").is_ok() {{ - std::fs::write("{output_source_path}", fmt_text.clone()).unwrap(); - }} + if std::env::var("UPDATE_EXPECT").is_ok() {{ + std::fs::write("{output_source_path}", fmt_text.clone()).unwrap(); + }} - similar_asserts::assert_eq!(fmt_text, expected_output); -}} + similar_asserts::assert_eq!(fmt_text, expected_output); + }} "## ) .expect("Could not write templated test file."); + + if !skip_idempotent_test { + write!( + test_file, + r##" + #[test] + fn format_idempotent_{test_name}() {{ + let expected_output = r#"{output_source}"#; + + let (parsed_module, _errors) = noirc_frontend::parse_program(expected_output); + + let config = nargo_fmt::Config::of("{config}").unwrap(); + let fmt_text = nargo_fmt::format(expected_output, parsed_module, &config); + + similar_asserts::assert_eq!(fmt_text, expected_output); + }} + "## + ) + .expect("Could not write templated test file."); + } } } diff --git a/tooling/noir_js/test/node/e2e.test.ts b/tooling/noir_js/test/node/e2e.test.ts index 8921314e8ea..979841c47e6 100644 --- a/tooling/noir_js/test/node/e2e.test.ts +++ b/tooling/noir_js/test/node/e2e.test.ts @@ -1,7 +1,7 @@ import { expect } from 'chai'; import assert_lt_json from '../noir_compiled_examples/assert_lt/target/assert_lt.json' assert { type: 'json' }; import { Noir } from '@noir-lang/noir_js'; -import { BarretenbergBackend as Backend } from '@noir-lang/backend_barretenberg'; +import { BarretenbergBackend as Backend, BarretenbergVerifier as Verifier } from '@noir-lang/backend_barretenberg'; import { CompiledCircuit } from '@noir-lang/types'; const assert_lt_program = assert_lt_json as CompiledCircuit; @@ -47,6 +47,28 @@ it('end-to-end proof creation and verification (outer) -- Program API', async () expect(isValid).to.be.true; }); +it('end-to-end proof creation and verification (outer) -- Verifier API', async () => { + // Noir.Js part + const inputs = { + x: '2', + y: '3', + }; + + // Initialize backend + const backend = new Backend(assert_lt_program); + // Initialize program + const program = new Noir(assert_lt_program, backend); + // Generate proof + const proof = await program.generateProof(inputs); + + const verificationKey = await backend.getVerificationKey(); + + // Proof verification + const verifier = new Verifier(); + const isValid = await verifier.verifyProof(proof, verificationKey); + expect(isValid).to.be.true; +}); + // TODO: maybe switch to using assert_statement_recursive here to test both options it('end-to-end proof creation and verification (inner)', async () => { // Noir.Js part diff --git a/tooling/noir_js_backend_barretenberg/package.json b/tooling/noir_js_backend_barretenberg/package.json index 98bfdf1c3a8..fefd2f6f8d9 100644 --- a/tooling/noir_js_backend_barretenberg/package.json +++ b/tooling/noir_js_backend_barretenberg/package.json @@ -42,7 +42,7 @@ "lint": "NODE_NO_WARNINGS=1 eslint . --ext .ts --ignore-path ./.eslintignore --max-warnings 0" }, "dependencies": { - "@aztec/bb.js": "0.34.0", + "@aztec/bb.js": "portal:../../../../barretenberg/ts", "@noir-lang/types": "workspace:*", "fflate": "^0.8.0" }, @@ -57,4 +57,4 @@ "ts-node": "^10.9.1", "typescript": "5.4.2" } -} +} \ No newline at end of file diff --git a/tooling/noir_js_backend_barretenberg/src/backend.ts b/tooling/noir_js_backend_barretenberg/src/backend.ts new file mode 100644 index 00000000000..d07681dd8c1 --- /dev/null +++ b/tooling/noir_js_backend_barretenberg/src/backend.ts @@ -0,0 +1,143 @@ +import { decompressSync as gunzip } from 'fflate'; +import { acirToUint8Array } from './serialize.js'; +import { Backend, CompiledCircuit, ProofData, VerifierBackend } from '@noir-lang/types'; +import { BackendOptions } from './types.js'; +import { deflattenPublicInputs } from './public_inputs.js'; +import { reconstructProofWithPublicInputs } from './verifier.js'; +import { type Barretenberg } from '@aztec/bb.js'; + +// This is the number of bytes in a UltraPlonk proof +// minus the public inputs. +const numBytesInProofWithoutPublicInputs: number = 2144; + +export class BarretenbergVerifierBackend implements VerifierBackend { + // These type assertions are used so that we don't + // have to initialize `api` and `acirComposer` in the constructor. + // These are initialized asynchronously in the `init` function, + // constructors cannot be asynchronous which is why we do this. + + protected api!: Barretenberg; + // eslint-disable-next-line @typescript-eslint/no-explicit-any + protected acirComposer: any; + protected acirUncompressedBytecode: Uint8Array; + + constructor( + acirCircuit: CompiledCircuit, + protected options: BackendOptions = { threads: 1 }, + ) { + const acirBytecodeBase64 = acirCircuit.bytecode; + this.acirUncompressedBytecode = acirToUint8Array(acirBytecodeBase64); + } + + /** @ignore */ + async instantiate(): Promise { + if (!this.api) { + if (typeof navigator !== 'undefined' && navigator.hardwareConcurrency) { + this.options.threads = navigator.hardwareConcurrency; + } else { + try { + const os = await import('os'); + this.options.threads = os.cpus().length; + } catch (e) { + console.log('Could not detect environment. Falling back to one thread.', e); + } + } + const { Barretenberg, RawBuffer, Crs } = await import('@aztec/bb.js'); + const api = await Barretenberg.new(this.options); + + const [_exact, _total, subgroupSize] = await api.acirGetCircuitSizes(this.acirUncompressedBytecode); + const crs = await Crs.new(subgroupSize + 1); + await api.commonInitSlabAllocator(subgroupSize); + await api.srsInitSrs(new RawBuffer(crs.getG1Data()), crs.numPoints, new RawBuffer(crs.getG2Data())); + + this.acirComposer = await api.acirNewAcirComposer(subgroupSize); + await api.acirInitProvingKey(this.acirComposer, this.acirUncompressedBytecode); + this.api = api; + } + } + + /** @description Verifies a proof */ + async verifyProof(proofData: ProofData): Promise { + const proof = reconstructProofWithPublicInputs(proofData); + await this.instantiate(); + await this.api.acirInitVerificationKey(this.acirComposer); + return await this.api.acirVerifyProof(this.acirComposer, proof); + } + + async getVerificationKey(): Promise { + await this.instantiate(); + await this.api.acirInitVerificationKey(this.acirComposer); + return await this.api.acirGetVerificationKey(this.acirComposer); + } + + async destroy(): Promise { + if (!this.api) { + return; + } + await this.api.destroy(); + } +} + +export class BarretenbergBackend extends BarretenbergVerifierBackend implements Backend { + /** @description Generates a proof */ + async generateProof(compressedWitness: Uint8Array): Promise { + await this.instantiate(); + const proofWithPublicInputs = await this.api.acirCreateProof( + this.acirComposer, + this.acirUncompressedBytecode, + gunzip(compressedWitness), + ); + + const splitIndex = proofWithPublicInputs.length - numBytesInProofWithoutPublicInputs; + + const publicInputsConcatenated = proofWithPublicInputs.slice(0, splitIndex); + const proof = proofWithPublicInputs.slice(splitIndex); + const publicInputs = deflattenPublicInputs(publicInputsConcatenated); + + return { proof, publicInputs }; + } + + /** + * Generates artifacts that will be passed to a circuit that will verify this proof. + * + * Instead of passing the proof and verification key as a byte array, we pass them + * as fields which makes it cheaper to verify in a circuit. + * + * The proof that is passed here will have been created using a circuit + * that has the #[recursive] attribute on its `main` method. + * + * The number of public inputs denotes how many public inputs are in the inner proof. + * + * @example + * ```typescript + * const artifacts = await backend.generateRecursiveProofArtifacts(proof, numOfPublicInputs); + * ``` + */ + async generateRecursiveProofArtifacts( + proofData: ProofData, + numOfPublicInputs = 0, + ): Promise<{ + proofAsFields: string[]; + vkAsFields: string[]; + vkHash: string; + }> { + await this.instantiate(); + const proof = reconstructProofWithPublicInputs(proofData); + const proofAsFields = ( + await this.api.acirSerializeProofIntoFields(this.acirComposer, proof, numOfPublicInputs) + ).slice(numOfPublicInputs); + + // TODO: perhaps we should put this in the init function. Need to benchmark + // TODO how long it takes. + await this.api.acirInitVerificationKey(this.acirComposer); + + // Note: If you don't init verification key, `acirSerializeVerificationKeyIntoFields`` will just hang on serialization + const vk = await this.api.acirSerializeVerificationKeyIntoFields(this.acirComposer); + + return { + proofAsFields: proofAsFields.map((p) => p.toString()), + vkAsFields: vk[0].map((vk) => vk.toString()), + vkHash: vk[1].toString(), + }; + } +} diff --git a/tooling/noir_js_backend_barretenberg/src/index.ts b/tooling/noir_js_backend_barretenberg/src/index.ts index bfdf1005a93..f28abb9a658 100644 --- a/tooling/noir_js_backend_barretenberg/src/index.ts +++ b/tooling/noir_js_backend_barretenberg/src/index.ts @@ -1,150 +1,7 @@ -import { decompressSync as gunzip } from 'fflate'; -import { acirToUint8Array } from './serialize.js'; -import { Backend, CompiledCircuit, ProofData } from '@noir-lang/types'; -import { BackendOptions } from './types.js'; -import { deflattenPublicInputs, flattenPublicInputsAsArray } from './public_inputs.js'; -import { type Barretenberg } from '@aztec/bb.js'; - +export { BarretenbergBackend } from './backend.js'; +export { BarretenbergVerifier } from './verifier.js'; export { publicInputsToWitnessMap } from './public_inputs.js'; -// This is the number of bytes in a UltraPlonk proof -// minus the public inputs. -const numBytesInProofWithoutPublicInputs: number = 2144; - -export class BarretenbergBackend implements Backend { - // These type assertions are used so that we don't - // have to initialize `api` and `acirComposer` in the constructor. - // These are initialized asynchronously in the `init` function, - // constructors cannot be asynchronous which is why we do this. - - private api!: Barretenberg; - // eslint-disable-next-line @typescript-eslint/no-explicit-any - private acirComposer: any; - private acirUncompressedBytecode: Uint8Array; - - constructor( - acirCircuit: CompiledCircuit, - private options: BackendOptions = { threads: 1 }, - ) { - const acirBytecodeBase64 = acirCircuit.bytecode; - this.acirUncompressedBytecode = acirToUint8Array(acirBytecodeBase64); - } - - /** @ignore */ - async instantiate(): Promise { - if (!this.api) { - if (typeof navigator !== 'undefined' && navigator.hardwareConcurrency) { - this.options.threads = navigator.hardwareConcurrency; - } else { - try { - const os = await import('os'); - this.options.threads = os.cpus().length; - } catch (e) { - console.log('Could not detect environment. Falling back to one thread.', e); - } - } - const { Barretenberg, RawBuffer, Crs } = await import('@aztec/bb.js'); - const api = await Barretenberg.new(this.options); - const [_exact, _total, subgroupSize] = await api.acirGetCircuitSizes(this.acirUncompressedBytecode); - const crs = await Crs.new(subgroupSize + 1); - await api.commonInitSlabAllocator(subgroupSize); - await api.srsInitSrs(new RawBuffer(crs.getG1Data()), crs.numPoints, new RawBuffer(crs.getG2Data())); - - this.acirComposer = await api.acirNewAcirComposer(subgroupSize); - await api.acirInitProvingKey(this.acirComposer, this.acirUncompressedBytecode); - this.api = api; - } - } - - /** @description Generates a proof */ - async generateProof(compressedWitness: Uint8Array): Promise { - await this.instantiate(); - // TODO: Change once `@aztec/bb.js` version is updated to use methods without isRecursive flag - const proofWithPublicInputs = await this.api.acirCreateProof( - this.acirComposer, - this.acirUncompressedBytecode, - gunzip(compressedWitness), - ); - - const splitIndex = proofWithPublicInputs.length - numBytesInProofWithoutPublicInputs; - - const publicInputsConcatenated = proofWithPublicInputs.slice(0, splitIndex); - const proof = proofWithPublicInputs.slice(splitIndex); - const publicInputs = deflattenPublicInputs(publicInputsConcatenated); - - return { proof, publicInputs }; - } - - /** - * Generates artifacts that will be passed to a circuit that will verify this proof. - * - * Instead of passing the proof and verification key as a byte array, we pass them - * as fields which makes it cheaper to verify in a circuit. - * - * The proof that is passed here will have been created using a circuit - * that has the #[recursive] attribute on its `main` method. - * - * The number of public inputs denotes how many public inputs are in the inner proof. - * - * @example - * ```typescript - * const artifacts = await backend.generateRecursiveProofArtifacts(proof, numOfPublicInputs); - * ``` - */ - async generateRecursiveProofArtifacts( - proofData: ProofData, - numOfPublicInputs = 0, - ): Promise<{ - proofAsFields: string[]; - vkAsFields: string[]; - vkHash: string; - }> { - await this.instantiate(); - const proof = reconstructProofWithPublicInputs(proofData); - const proofAsFields = ( - await this.api.acirSerializeProofIntoFields(this.acirComposer, proof, numOfPublicInputs) - ).slice(numOfPublicInputs); - - // TODO: perhaps we should put this in the init function. Need to benchmark - // TODO how long it takes. - await this.api.acirInitVerificationKey(this.acirComposer); - - // Note: If you don't init verification key, `acirSerializeVerificationKeyIntoFields`` will just hang on serialization - const vk = await this.api.acirSerializeVerificationKeyIntoFields(this.acirComposer); - - return { - proofAsFields: proofAsFields.map((p) => p.toString()), - vkAsFields: vk[0].map((vk) => vk.toString()), - vkHash: vk[1].toString(), - }; - } - - /** @description Verifies a proof */ - async verifyProof(proofData: ProofData): Promise { - const proof = reconstructProofWithPublicInputs(proofData); - await this.instantiate(); - await this.api.acirInitVerificationKey(this.acirComposer); - // TODO: Change once `@aztec/bb.js` version is updated to use methods without isRecursive flag - return await this.api.acirVerifyProof(this.acirComposer, proof); - } - - async destroy(): Promise { - if (!this.api) { - return; - } - await this.api.destroy(); - } -} - -function reconstructProofWithPublicInputs(proofData: ProofData): Uint8Array { - // Flatten publicInputs - const publicInputsConcatenated = flattenPublicInputsAsArray(proofData.publicInputs); - - // Concatenate publicInputs and proof - const proofWithPublicInputs = Uint8Array.from([...publicInputsConcatenated, ...proofData.proof]); - - return proofWithPublicInputs; -} - // typedoc exports -export { Backend, BackendOptions, CompiledCircuit, ProofData }; +export { Backend, CompiledCircuit, ProofData } from '@noir-lang/types'; +export { BackendOptions } from './types.js'; diff --git a/tooling/noir_js_backend_barretenberg/src/verifier.ts b/tooling/noir_js_backend_barretenberg/src/verifier.ts new file mode 100644 index 00000000000..fe9fa9cfffd --- /dev/null +++ b/tooling/noir_js_backend_barretenberg/src/verifier.ts @@ -0,0 +1,78 @@ +import { ProofData } from '@noir-lang/types'; +import { BackendOptions } from './types.js'; +import { flattenPublicInputsAsArray } from './public_inputs.js'; +import { type Barretenberg } from '@aztec/bb.js'; + +export class BarretenbergVerifier { + // These type assertions are used so that we don't + // have to initialize `api` and `acirComposer` in the constructor. + // These are initialized asynchronously in the `init` function, + // constructors cannot be asynchronous which is why we do this. + + private api!: Barretenberg; + // eslint-disable-next-line @typescript-eslint/no-explicit-any + private acirComposer: any; + + constructor(private options: BackendOptions = { threads: 1 }) {} + + /** @ignore */ + async instantiate(): Promise { + if (!this.api) { + if (typeof navigator !== 'undefined' && navigator.hardwareConcurrency) { + this.options.threads = navigator.hardwareConcurrency; + } else { + try { + const os = await import('os'); + this.options.threads = os.cpus().length; + } catch (e) { + console.log('Could not detect environment. Falling back to one thread.', e); + } + } + const { Barretenberg, RawBuffer, Crs } = await import('@aztec/bb.js'); + + // This is the number of CRS points necessary to verify a Barretenberg proof. + const NUM_CRS_POINTS_FOR_VERIFICATION: number = 0; + const [api, crs] = await Promise.all([Barretenberg.new(this.options), Crs.new(NUM_CRS_POINTS_FOR_VERIFICATION)]); + + await api.commonInitSlabAllocator(NUM_CRS_POINTS_FOR_VERIFICATION); + await api.srsInitSrs( + new RawBuffer([] /* crs.getG1Data() */), + NUM_CRS_POINTS_FOR_VERIFICATION, + new RawBuffer(crs.getG2Data()), + ); + + this.acirComposer = await api.acirNewAcirComposer(NUM_CRS_POINTS_FOR_VERIFICATION); + this.api = api; + } + } + + /** @description Verifies a proof */ + async verifyProof(proofData: ProofData, verificationKey: Uint8Array): Promise { + const { RawBuffer } = await import('@aztec/bb.js'); + + await this.instantiate(); + // The verifier can be used for a variety of ACIR programs so we should not assume that it + // is preloaded with the correct verification key. + await this.api.acirLoadVerificationKey(this.acirComposer, new RawBuffer(verificationKey)); + + const proof = reconstructProofWithPublicInputs(proofData); + return await this.api.acirVerifyProof(this.acirComposer, proof); + } + + async destroy(): Promise { + if (!this.api) { + return; + } + await this.api.destroy(); + } +} + +export function reconstructProofWithPublicInputs(proofData: ProofData): Uint8Array { + // Flatten publicInputs + const publicInputsConcatenated = flattenPublicInputsAsArray(proofData.publicInputs); + + // Concatenate publicInputs and proof + const proofWithPublicInputs = Uint8Array.from([...publicInputsConcatenated, ...proofData.proof]); + + return proofWithPublicInputs; +} diff --git a/tooling/noir_js_types/src/types.ts b/tooling/noir_js_types/src/types.ts index 3a62d79a807..456e5a57f40 100644 --- a/tooling/noir_js_types/src/types.ts +++ b/tooling/noir_js_types/src/types.ts @@ -29,7 +29,17 @@ export type Abi = { return_witnesses: number[]; }; -export interface Backend { +export interface VerifierBackend { + /** + * @description Verifies a proof */ + verifyProof(proofData: ProofData): Promise; + + /** + * @description Destroys the backend */ + destroy(): Promise; +} + +export interface Backend extends VerifierBackend { /** * @description Generates a proof */ generateProof(decompressedWitness: Uint8Array): Promise; @@ -49,14 +59,6 @@ export interface Backend { /** @description A Field containing the verification key hash */ vkHash: string; }>; - - /** - * @description Verifies a proof */ - verifyProof(proofData: ProofData): Promise; - - /** - * @description Destroys the backend */ - destroy(): Promise; } /** diff --git a/tooling/noirc_abi_wasm/build.sh b/tooling/noirc_abi_wasm/build.sh index 4486a214c9c..58724dee02c 100755 --- a/tooling/noirc_abi_wasm/build.sh +++ b/tooling/noirc_abi_wasm/build.sh @@ -25,7 +25,6 @@ function run_if_available { require_command jq require_command cargo require_command wasm-bindgen -require_command wasm-opt self_path=$(dirname "$(readlink -f "$0")") pname=$(cargo read-manifest | jq -r '.name') @@ -49,5 +48,5 @@ BROWSER_WASM=${BROWSER_DIR}/${pname}_bg.wasm run_or_fail cargo build --lib --release --target $TARGET --package ${pname} run_or_fail wasm-bindgen $WASM_BINARY --out-dir $NODE_DIR --typescript --target nodejs run_or_fail wasm-bindgen $WASM_BINARY --out-dir $BROWSER_DIR --typescript --target web -run_or_fail wasm-opt $NODE_WASM -o $NODE_WASM -O -run_or_fail wasm-opt $BROWSER_WASM -o $BROWSER_WASM -O +run_if_available wasm-opt $NODE_WASM -o $NODE_WASM -O +run_if_available wasm-opt $BROWSER_WASM -o $BROWSER_WASM -O diff --git a/tooling/noirc_abi_wasm/src/lib.rs b/tooling/noirc_abi_wasm/src/lib.rs index ce15f6d502e..fad5abaebba 100644 --- a/tooling/noirc_abi_wasm/src/lib.rs +++ b/tooling/noirc_abi_wasm/src/lib.rs @@ -5,7 +5,7 @@ // See Cargo.toml for explanation. use getrandom as _; -use acvm::acir::native_types::WitnessMap; +use acvm::acir::native_types::{WitnessMap, WitnessStack}; use iter_extended::try_btree_map; use noirc_abi::{ errors::InputParserError, @@ -113,3 +113,12 @@ pub fn abi_decode(abi: JsAbi, witness_map: JsWitnessMap) -> Result::from_serde(&return_struct) .map_err(|err| err.to_string().into()) } + +#[wasm_bindgen(js_name = serializeWitness)] +pub fn serialise_witness(witness_map: JsWitnessMap) -> Result, JsAbiError> { + console_error_panic_hook::set_once(); + let converted_witness: WitnessMap = witness_map.into(); + let witness_stack: WitnessStack = converted_witness.into(); + let output = witness_stack.try_into(); + output.map_err(|_| JsAbiError::new("Failed to convert to Vec".to_string())) +} diff --git a/yarn.lock b/yarn.lock index 38e13814929..b45678f5d8b 100644 --- a/yarn.lock +++ b/yarn.lock @@ -221,19 +221,18 @@ __metadata: languageName: node linkType: hard -"@aztec/bb.js@npm:0.34.0": - version: 0.34.0 - resolution: "@aztec/bb.js@npm:0.34.0" +"@aztec/bb.js@portal:../../../../barretenberg/ts::locator=%40noir-lang%2Fbackend_barretenberg%40workspace%3Atooling%2Fnoir_js_backend_barretenberg": + version: 0.0.0-use.local + resolution: "@aztec/bb.js@portal:../../../../barretenberg/ts::locator=%40noir-lang%2Fbackend_barretenberg%40workspace%3Atooling%2Fnoir_js_backend_barretenberg" dependencies: comlink: ^4.4.1 commander: ^10.0.1 debug: ^4.3.4 tslib: ^2.4.0 bin: - bb.js: dest/node/main.js - checksum: 9d07834d81ed19e4d6fd5c1f3b07c565648df1165c30115f020ece9660b2b8599a5ed894a2090410f14020e73dd290484b30b76c9c71e863b8390fa2b7c1b729 + bb.js: ./dest/node/main.js languageName: node - linkType: hard + linkType: soft "@babel/code-frame@npm:^7.0.0, @babel/code-frame@npm:^7.10.4, @babel/code-frame@npm:^7.12.11, @babel/code-frame@npm:^7.16.0, @babel/code-frame@npm:^7.22.13, @babel/code-frame@npm:^7.23.5, @babel/code-frame@npm:^7.8.3": version: 7.23.5 @@ -4396,7 +4395,7 @@ __metadata: version: 0.0.0-use.local resolution: "@noir-lang/backend_barretenberg@workspace:tooling/noir_js_backend_barretenberg" dependencies: - "@aztec/bb.js": 0.34.0 + "@aztec/bb.js": "portal:../../../../barretenberg/ts" "@noir-lang/types": "workspace:*" "@types/node": ^20.6.2 "@types/prettier": ^3 From 44a0a9ec8d8212d3f8aaa17f1b9a8fd806401658 Mon Sep 17 00:00:00 2001 From: vezenovm Date: Mon, 15 Apr 2024 14:31:19 +0000 Subject: [PATCH 6/7] yarn again --- tooling/noir_js_backend_barretenberg/package.json | 4 ++-- yarn.lock | 13 +++++++------ 2 files changed, 9 insertions(+), 8 deletions(-) diff --git a/tooling/noir_js_backend_barretenberg/package.json b/tooling/noir_js_backend_barretenberg/package.json index fefd2f6f8d9..98bfdf1c3a8 100644 --- a/tooling/noir_js_backend_barretenberg/package.json +++ b/tooling/noir_js_backend_barretenberg/package.json @@ -42,7 +42,7 @@ "lint": "NODE_NO_WARNINGS=1 eslint . --ext .ts --ignore-path ./.eslintignore --max-warnings 0" }, "dependencies": { - "@aztec/bb.js": "portal:../../../../barretenberg/ts", + "@aztec/bb.js": "0.34.0", "@noir-lang/types": "workspace:*", "fflate": "^0.8.0" }, @@ -57,4 +57,4 @@ "ts-node": "^10.9.1", "typescript": "5.4.2" } -} \ No newline at end of file +} diff --git a/yarn.lock b/yarn.lock index b45678f5d8b..38e13814929 100644 --- a/yarn.lock +++ b/yarn.lock @@ -221,18 +221,19 @@ __metadata: languageName: node linkType: hard -"@aztec/bb.js@portal:../../../../barretenberg/ts::locator=%40noir-lang%2Fbackend_barretenberg%40workspace%3Atooling%2Fnoir_js_backend_barretenberg": - version: 0.0.0-use.local - resolution: "@aztec/bb.js@portal:../../../../barretenberg/ts::locator=%40noir-lang%2Fbackend_barretenberg%40workspace%3Atooling%2Fnoir_js_backend_barretenberg" +"@aztec/bb.js@npm:0.34.0": + version: 0.34.0 + resolution: "@aztec/bb.js@npm:0.34.0" dependencies: comlink: ^4.4.1 commander: ^10.0.1 debug: ^4.3.4 tslib: ^2.4.0 bin: - bb.js: ./dest/node/main.js + bb.js: dest/node/main.js + checksum: 9d07834d81ed19e4d6fd5c1f3b07c565648df1165c30115f020ece9660b2b8599a5ed894a2090410f14020e73dd290484b30b76c9c71e863b8390fa2b7c1b729 languageName: node - linkType: soft + linkType: hard "@babel/code-frame@npm:^7.0.0, @babel/code-frame@npm:^7.10.4, @babel/code-frame@npm:^7.12.11, @babel/code-frame@npm:^7.16.0, @babel/code-frame@npm:^7.22.13, @babel/code-frame@npm:^7.23.5, @babel/code-frame@npm:^7.8.3": version: 7.23.5 @@ -4395,7 +4396,7 @@ __metadata: version: 0.0.0-use.local resolution: "@noir-lang/backend_barretenberg@workspace:tooling/noir_js_backend_barretenberg" dependencies: - "@aztec/bb.js": "portal:../../../../barretenberg/ts" + "@aztec/bb.js": 0.34.0 "@noir-lang/types": "workspace:*" "@types/node": ^20.6.2 "@types/prettier": ^3 From d9cc3ed1c1cb5ff97069f036f6e65d9a11d7f59d Mon Sep 17 00:00:00 2001 From: Tom French <15848336+TomAFrench@users.noreply.github.com> Date: Mon, 15 Apr 2024 15:34:18 +0100 Subject: [PATCH 7/7] Update build.sh --- acvm-repo/acvm_js/build.sh | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/acvm-repo/acvm_js/build.sh b/acvm-repo/acvm_js/build.sh index 58724dee02c..4486a214c9c 100755 --- a/acvm-repo/acvm_js/build.sh +++ b/acvm-repo/acvm_js/build.sh @@ -25,6 +25,7 @@ function run_if_available { require_command jq require_command cargo require_command wasm-bindgen +require_command wasm-opt self_path=$(dirname "$(readlink -f "$0")") pname=$(cargo read-manifest | jq -r '.name') @@ -48,5 +49,5 @@ BROWSER_WASM=${BROWSER_DIR}/${pname}_bg.wasm run_or_fail cargo build --lib --release --target $TARGET --package ${pname} run_or_fail wasm-bindgen $WASM_BINARY --out-dir $NODE_DIR --typescript --target nodejs run_or_fail wasm-bindgen $WASM_BINARY --out-dir $BROWSER_DIR --typescript --target web -run_if_available wasm-opt $NODE_WASM -o $NODE_WASM -O -run_if_available wasm-opt $BROWSER_WASM -o $BROWSER_WASM -O +run_or_fail wasm-opt $NODE_WASM -o $NODE_WASM -O +run_or_fail wasm-opt $BROWSER_WASM -o $BROWSER_WASM -O