diff --git a/.github/workflows/mirror-repos.yml b/.github/workflows/mirror-repos.yml index 5e85593176d..947c8b5080f 100644 --- a/.github/workflows/mirror-repos.yml +++ b/.github/workflows/mirror-repos.yml @@ -51,7 +51,7 @@ jobs: monorepo_url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY" # list all aztec-packages tags, take the "highest" version - monorepo_tag="$(git tag --list aztec-packages-v* | sort --version-sort | tail -1)" + monorepo_tag="$(git tag --list v* | sort --version-sort | tail -1)" monorepo_protocol_circuits_path="noir-projects/noir-protocol-circuits" # take all Nargo.toml files that reference noir-protocol-circuits @@ -60,7 +60,7 @@ jobs: # match lines like this: # protocol_types = { path = "../../noir-protocol-circuits/crates/types" } # and replace with - # protocol_types = { git="https://github.com/aztecprotocol/aztec-packages", tag="aztec-packages-v0.16.9", directory="noir-projects/noir-protocol-circuits/crates/types" } + # protocol_types = { git="https://github.com/aztecprotocol/aztec-packages", tag="v0.16.9", directory="noir-projects/noir-protocol-circuits/crates/types" } for nargo_file in $nargo_files; do sed --regexp-extended --in-place \ "s;path\s*=\s*\".*noir-protocol-circuits(.*)\";git=\"$monorepo_url\", tag=\"$monorepo_tag\", directory=\"$monorepo_protocol_circuits_path\1\";" \ diff --git a/.noir-sync-commit b/.noir-sync-commit index 7040dbf0e9d..2b7ca3cb79d 100644 --- a/.noir-sync-commit +++ b/.noir-sync-commit @@ -1 +1 @@ -826b18a10630471c19c25ab745f9bfe045813e69 +b3443c124b19a909bf9cb370b4e0ebc151bb6aa3 \ No newline at end of file diff --git a/.release-please-manifest.json b/.release-please-manifest.json index bec5fdbfd16..4275ff5323d 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "0.77.0" + ".": "0.77.1" } diff --git a/.test_patterns.yml b/.test_patterns.yml index 63d4249fb71..7f1c5217173 100644 --- a/.test_patterns.yml +++ b/.test_patterns.yml @@ -8,6 +8,19 @@ # When a failing test matches one or more of the "regex" properties below, # a message is sent to slack channel #aztec3-ci tagging the owners. +names: + - adam: &adam "U04BM8H25NJ" + - alex: &alex "U05QWV669JB" + - charlie: &charlie "UKUMA5J7K" + - lasse: &lasse "U03E5SYLY3Z" + - leila: &leila "UBLTU1NJ3" + - luke: &luke "U03JYU7AQET" + - mitch: &mitch "U06AY5G1NRK" + - palla: &palla "U04TPBU26E8" + - phil: &phil "U02G4KAD57Y" + - sean: &sean "U04DT239VQU" + - tom: &tom "U04LLT331NK" + tests: # barretenberg # @@ -20,18 +33,25 @@ tests: # Expected: true - regex: "join_split_example_tests" owners: - - "U03JYU7AQET" # luke + - *luke + + # Sumcheck is failing for some reason + - regex: "barretenberg/acir_tests/run_test.sh ram_blowup_regression" + skip: true + owners: + - *tom + # noir # Something to do with how I run the tests now. Think these are fine in nextest. - regex: "noir_lsp-.* notifications::notification_tests::test_caches_open_files" skip: true owners: - - "UKUMA5J7K" # charlie + - *charlie - regex: "noir_lsp-.* requests::" skip: true owners: - - "UKUMA5J7K" # charlie + - *charlie # Sometimes see this on ARM. But not when run on it's own... # FAILED 6a60c4e796ac0aef: noir/scripts/run_test.sh debug-21ff1948430ded06 tests::debug_ram_blowup_regression (code: 101) # running 1 test @@ -47,7 +67,7 @@ tests: - regex: "tests::debug_ram_blowup_regression" skip: true owners: - - "UKUMA5J7K" # charlie + - *charlie # Seen this error on all the below. # e.g. to grind: seq 1 16 | parallel --bar --tag --halt now,fail=1 ci3/dump_fail "NAME_POSTFIX=_{} yarn-project/end-to-end/scripts/run_test.sh simple e2e_p2p/gossip_network >/dev/null" @@ -76,11 +96,11 @@ tests: # at Object. (e2e_p2p/reqresp.test.ts:66:13) - regex: "simple e2e_p2p/" owners: - - "U04DT239VQU" # sean + - *sean - regex: "simple e2e_fees/private_payments" owners: - - "U02G4KAD57Y" # phil + - *phil # Summary of all failing tests # FAIL ./e2e_cheat_codes.test.ts @@ -109,56 +129,56 @@ tests: # Ran all test suites matching /e2e_cheat_codes/i. - regex: "simple e2e_cheat_codes" owners: - - "U03E5SYLY3Z" # lasse + - *lasse - regex: "prover-client/src/test/bb_prover_parity.test.ts" owners: - - "U04TPBU26E8" # palla + - *palla - regex: "prover-client/src/proving_broker/broker_prover_facade.test.ts" owners: - - "U04TPBU26E8" # palla + - *palla - regex: "prover-client/src/orchestrator/orchestrator_errors.test.ts" owners: - - "U04TPBU26E8" # palla + - *palla # yarn-project tests - regex: "p2p/src/services/reqresp/reqresp.test.ts" owners: - - "U04DT239VQU" # sean + - *sean - regex: "sequencer-client/src/slasher/slasher_client.test.ts" owners: - - "U03E5SYLY3Z" # lasse + - *lasse - regex: "prover-client/src/test/bb_prover_full_rollup.test.ts" owners: - - "U04TPBU26E8" # palla + - *palla - regex: "yarn-project/kv-store" owners: - - "U05QWV669JB" # alex + - *alex # kind tests - regex: "spartan/bootstrap.sh test-kind-4epochs" owners: - - "UBLTU1NJ3" # leila + - *leila - regex: "spartan/bootstrap.sh test-prod-deployment" owners: - - "U06AY5G1NRK" # mitch + - *mitch - regex: "spartan/bootstrap.sh test-kind-upgrade-rollup-version" owners: - - "U06AY5G1NRK" # mitch + - *mitch - - regex: "spartan/bootstrap.sh test-kind" + - regex: "spartan/bootstrap.sh test-kind-transfer" owners: - - "U02G4KAD57Y" # phil - - "U06AY5G1NRK" # mitch + - *phil + - *mitch - regex: "spartan/bootstrap.sh test-local" skip: true owners: - - "U04BM8H25NJ" # adam + - *adam # Slack testing. - regex: "nonsense to match" owners: - - "UKUMA5J7K" # charlie + - *charlie diff --git a/CHANGELOG.md b/CHANGELOG.md index 0e0b8c74745..2e99e97d55d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,34 @@ # Changelog +## [0.77.1](https://github.com/AztecProtocol/aztec-packages/compare/v0.77.0...v0.77.1) (2025-03-05) + + +### Features + +* Combine group polynomials in translator by interleaving rather than concatenation ([#12343](https://github.com/AztecProtocol/aztec-packages/issues/12343)) ([c7dc549](https://github.com/AztecProtocol/aztec-packages/commit/c7dc5492c431ad6052a92d7de265f2a2e59af728)) +* Enrich env vars based on network option ([#12489](https://github.com/AztecProtocol/aztec-packages/issues/12489)) ([6921f46](https://github.com/AztecProtocol/aztec-packages/commit/6921f4674864a1ea8f1e61b96f9c3a4014a555b0)) +* tightly pack logs inside blobs ([#11752](https://github.com/AztecProtocol/aztec-packages/issues/11752)) ([b6871ce](https://github.com/AztecProtocol/aztec-packages/commit/b6871ce5487f7ab1cc27cf8777fa238028f2dc10)) +* track if spot and sanitise merge queue name ([#12432](https://github.com/AztecProtocol/aztec-packages/issues/12432)) ([7a307e7](https://github.com/AztecProtocol/aztec-packages/commit/7a307e7348d6a03fc8c25ebd587c924ad370fdeb)) + + +### Bug Fixes + +* read rollup address from registry ([#12496](https://github.com/AztecProtocol/aztec-packages/issues/12496)) ([374a5d5](https://github.com/AztecProtocol/aztec-packages/commit/374a5d5ecaf006b0cdddf69ef581237e9b6add32)), closes [#12492](https://github.com/AztecProtocol/aztec-packages/issues/12492) +* release and add nightly tag flow ([#12493](https://github.com/AztecProtocol/aztec-packages/issues/12493)) ([c1daa11](https://github.com/AztecProtocol/aztec-packages/commit/c1daa11be668d5a85b39a82ce18b81745d2a283e)) +* release flow ([#12501](https://github.com/AztecProtocol/aztec-packages/issues/12501)) ([01c9795](https://github.com/AztecProtocol/aztec-packages/commit/01c9795b96df2f18d9ca60d35f7ec2d2f66396cd)) +* release part 2 ([#12502](https://github.com/AztecProtocol/aztec-packages/issues/12502)) ([ce9c454](https://github.com/AztecProtocol/aztec-packages/commit/ce9c4541d04d6e288ea6bffa18c2621ac23f9079)) +* update bbup to match new release naming ([#12495](https://github.com/AztecProtocol/aztec-packages/issues/12495)) ([80c9b4e](https://github.com/AztecProtocol/aztec-packages/commit/80c9b4e2e34e70d423622ea0391a6d0a1785ddf9)) + + +### Miscellaneous + +* clean env vars ([#12356](https://github.com/AztecProtocol/aztec-packages/issues/12356)) ([ec2ce9a](https://github.com/AztecProtocol/aztec-packages/commit/ec2ce9a37e489aadb8b48d801f6fb70ee6b42003)) +* fix a bunch of trait import issues ([#12431](https://github.com/AztecProtocol/aztec-packages/issues/12431)) ([3b981f9](https://github.com/AztecProtocol/aztec-packages/commit/3b981f9217f9b859bdfbcdba2f5c080392c98da6)) +* Fix MEMORY argument to yarn project test run ([#12488](https://github.com/AztecProtocol/aztec-packages/issues/12488)) ([8932dd6](https://github.com/AztecProtocol/aztec-packages/commit/8932dd69bfd9579a3d350fa1557f5bee54616289)) +* force release-please PR for 0.77.1 ([e22ac0e](https://github.com/AztecProtocol/aztec-packages/commit/e22ac0ebf990381137c659b727e6aac9a1d30df2)) +* parallelise interleaving and remove mentions of concatenation ([#12373](https://github.com/AztecProtocol/aztec-packages/issues/12373)) ([aaef150](https://github.com/AztecProtocol/aztec-packages/commit/aaef150f70197c9c59fafc06bd54eb7415185541)) +* repair release-please PR for 0.77.1 ([b11c211](https://github.com/AztecProtocol/aztec-packages/commit/b11c2117713bd38028975373b6dc8726f5c4214b)) + ## [0.77.0](https://github.com/AztecProtocol/aztec-packages/compare/aztec-packages-v0.76.4...aztec-packages-v0.77.0) (2025-02-14) diff --git a/avm-transpiler/Cargo.lock b/avm-transpiler/Cargo.lock index 43880befbe3..9dac6786db7 100644 --- a/avm-transpiler/Cargo.lock +++ b/avm-transpiler/Cargo.lock @@ -1,6 +1,6 @@ # This file is automatically @generated by Cargo. # It is not intended for manual editing. -version = 3 +version = 4 [[package]] name = "acir" @@ -10,7 +10,12 @@ dependencies = [ "base64 0.21.7", "bincode", "brillig", + "color-eyre", "flate2", + "noir_protobuf", + "prost", + "prost-build", + "protoc-prebuilt", "serde", "serde-big-array", "strum", @@ -61,6 +66,15 @@ dependencies = [ "thiserror", ] +[[package]] +name = "addr2line" +version = "0.21.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8a30b2e23b9e17a9f90641c7ab1549cd9b44f296d3ccbf309d2863cfe398a0cb" +dependencies = [ + "gimli", +] + [[package]] name = "adler" version = "1.0.2" @@ -145,7 +159,7 @@ version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ad186efb764318d35165f1758e7dcef3b10628e26d41a44bc5550652e6804391" dependencies = [ - "windows-sys", + "windows-sys 0.52.0", ] [[package]] @@ -155,9 +169,15 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "61a38449feb7068f52bb06c12759005cf459ee52bb4adc1d5a7c4322d716fb19" dependencies = [ "anstyle", - "windows-sys", + "windows-sys 0.52.0", ] +[[package]] +name = "anyhow" +version = "1.0.97" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dcfed56ad506cb2c684a14971b8861fdc3baaaae314b9e5f9bb532cbe3ba7a4f" + [[package]] name = "ark-bn254" version = "0.5.0" @@ -217,7 +237,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "62945a2f7e6de02a31fe400aa489f0e0f5b2502e69f95f853adb82a96c7a6b60" dependencies = [ "quote", - "syn 2.0.72", + "syn 2.0.87", ] [[package]] @@ -230,7 +250,7 @@ dependencies = [ "num-traits", "proc-macro2", "quote", - "syn 2.0.72", + "syn 2.0.87", ] [[package]] @@ -269,7 +289,7 @@ checksum = "213888f660fddcca0d257e88e54ac05bca01885f258ccdf695bafd77031bb69d" dependencies = [ "proc-macro2", "quote", - "syn 2.0.72", + "syn 2.0.87", ] [[package]] @@ -316,6 +336,21 @@ dependencies = [ "serde_json", ] +[[package]] +name = "backtrace" +version = "0.3.71" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "26b05800d2e817c8b3b4b54abd461726265fa9789ae34330622f2db9ee696f9d" +dependencies = [ + "addr2line", + "cc", + "cfg-if", + "libc", + "miniz_oxide", + "object", + "rustc-demangle", +] + [[package]] name = "base16ct" version = "0.1.1" @@ -349,6 +384,12 @@ dependencies = [ "serde", ] +[[package]] +name = "bitflags" +version = "2.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c8214115b7bf84099f1309324e63141d4c5d7cc26862f97a0a857dbefe165bd" + [[package]] name = "blake2" version = "0.10.6" @@ -411,11 +452,20 @@ version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" +[[package]] +name = "bytes" +version = "1.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d71b6127be86fdcfddb610f7182ac57211d4b18a3e9c82eb2d17662f2227ad6a" + [[package]] name = "cc" -version = "1.1.6" +version = "1.2.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2aba8f4e9906c7ce3c73463f62a7f0c65183ada1a2d47e397cc8810827f9694f" +checksum = "be714c154be609ec7f5dad223a33bf1482fff90472de28f7362806e6d4832b8c" +dependencies = [ + "shlex", +] [[package]] name = "cfg-if" @@ -457,6 +507,33 @@ dependencies = [ "unicode-width", ] +[[package]] +name = "color-eyre" +version = "0.6.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "55146f5e46f237f7423d74111267d4597b59b0dad0ffaf7303bce9945d843ad5" +dependencies = [ + "backtrace", + "color-spantrace", + "eyre", + "indenter", + "once_cell", + "owo-colors", + "tracing-error", +] + +[[package]] +name = "color-spantrace" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cd6be1b2a7e382e2b98b43b2adcca6bb0e465af0bdd38123873ae61eb17a72c2" +dependencies = [ + "once_cell", + "owo-colors", + "tracing-core", + "tracing-error", +] + [[package]] name = "colorchoice" version = "1.0.1" @@ -499,6 +576,12 @@ dependencies = [ "cfg-if", ] +[[package]] +name = "crossbeam-utils" +version = "0.8.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d0a5c400df2834b80a4c3327b3aad3a4c4cd4de0629063962b03235697506a28" + [[package]] name = "crypto-bigint" version = "0.4.9" @@ -542,7 +625,7 @@ dependencies = [ "proc-macro2", "quote", "strsim", - "syn 2.0.72", + "syn 2.0.87", ] [[package]] @@ -553,7 +636,7 @@ checksum = "d336a2a514f6ccccaa3e09b02d41d35330c07ddf03a62165fcec10bb561c7806" dependencies = [ "darling_core", "quote", - "syn 2.0.72", + "syn 2.0.87", ] [[package]] @@ -587,6 +670,17 @@ dependencies = [ "subtle", ] +[[package]] +name = "displaydoc" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.87", +] + [[package]] name = "ecdsa" version = "0.14.8" @@ -608,7 +702,7 @@ dependencies = [ "enum-ordinalize", "proc-macro2", "quote", - "syn 2.0.72", + "syn 2.0.87", ] [[package]] @@ -654,7 +748,7 @@ checksum = "0d28318a75d4aead5c4db25382e8ef717932d0346600cacae6357eb5941bc5ff" dependencies = [ "proc-macro2", "quote", - "syn 2.0.72", + "syn 2.0.87", ] [[package]] @@ -686,6 +780,32 @@ version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5" +[[package]] +name = "errno" +version = "0.3.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "33d852cb9b869c2a9b3df2f71a3074817f01e1844f839a144f5fcef059a4eb5d" +dependencies = [ + "libc", + "windows-sys 0.52.0", +] + +[[package]] +name = "eyre" +version = "0.6.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7cd915d99f24784cdc19fd37ef22b97e3ff0ae756c7e492e9fbfe897d61e2aec" +dependencies = [ + "indenter", + "once_cell", +] + +[[package]] +name = "fastrand" +version = "2.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be" + [[package]] name = "ff" version = "0.12.1" @@ -696,6 +816,12 @@ dependencies = [ "subtle", ] +[[package]] +name = "fixedbitset" +version = "0.5.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1d674e81391d1e1ab681a28d99df07927c6d4aa5b027d7da16ba32d1d21ecd99" + [[package]] name = "flate2" version = "1.0.30" @@ -721,6 +847,15 @@ version = "1.0.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" +[[package]] +name = "form_urlencoded" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e13624c2627564efccf4934284bdd98cbaa14e79b0b5a141218e507b3a823456" +dependencies = [ + "percent-encoding", +] + [[package]] name = "fxhash" version = "0.2.1" @@ -751,6 +886,12 @@ dependencies = [ "wasi", ] +[[package]] +name = "gimli" +version = "0.28.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4271d37baee1b8c7e4b708028c57d816cf9d2434acb33a549475f78c181f6253" + [[package]] name = "group" version = "0.12.1" @@ -768,12 +909,6 @@ version = "0.12.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888" -[[package]] -name = "hashbrown" -version = "0.14.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e5274423e17b7c9fc20b6e7e208532f9b19825d82dfd615708b70edd83df41f1" - [[package]] name = "hashbrown" version = "0.15.2" @@ -833,12 +968,157 @@ dependencies = [ "cc", ] +[[package]] +name = "icu_collections" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "db2fa452206ebee18c4b5c2274dbf1de17008e874b4dc4f0aea9d01ca79e4526" +dependencies = [ + "displaydoc", + "yoke", + "zerofrom", + "zerovec", +] + +[[package]] +name = "icu_locid" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "13acbb8371917fc971be86fc8057c41a64b521c184808a698c02acc242dbf637" +dependencies = [ + "displaydoc", + "litemap", + "tinystr", + "writeable", + "zerovec", +] + +[[package]] +name = "icu_locid_transform" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "01d11ac35de8e40fdeda00d9e1e9d92525f3f9d887cdd7aa81d727596788b54e" +dependencies = [ + "displaydoc", + "icu_locid", + "icu_locid_transform_data", + "icu_provider", + "tinystr", + "zerovec", +] + +[[package]] +name = "icu_locid_transform_data" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fdc8ff3388f852bede6b579ad4e978ab004f139284d7b28715f773507b946f6e" + +[[package]] +name = "icu_normalizer" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "19ce3e0da2ec68599d193c93d088142efd7f9c5d6fc9b803774855747dc6a84f" +dependencies = [ + "displaydoc", + "icu_collections", + "icu_normalizer_data", + "icu_properties", + "icu_provider", + "smallvec", + "utf16_iter", + "utf8_iter", + "write16", + "zerovec", +] + +[[package]] +name = "icu_normalizer_data" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f8cafbf7aa791e9b22bec55a167906f9e1215fd475cd22adfcf660e03e989516" + +[[package]] +name = "icu_properties" +version = "1.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "93d6020766cfc6302c15dbbc9c8778c37e62c14427cb7f6e601d849e092aeef5" +dependencies = [ + "displaydoc", + "icu_collections", + "icu_locid_transform", + "icu_properties_data", + "icu_provider", + "tinystr", + "zerovec", +] + +[[package]] +name = "icu_properties_data" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "67a8effbc3dd3e4ba1afa8ad918d5684b8868b3b26500753effea8d2eed19569" + +[[package]] +name = "icu_provider" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6ed421c8a8ef78d3e2dbc98a973be2f3770cb42b606e3ab18d6237c4dfde68d9" +dependencies = [ + "displaydoc", + "icu_locid", + "icu_provider_macros", + "stable_deref_trait", + "tinystr", + "writeable", + "yoke", + "zerofrom", + "zerovec", +] + +[[package]] +name = "icu_provider_macros" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1ec89e9337638ecdc08744df490b221a7399bf8d164eb52a665454e60e075ad6" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.87", +] + [[package]] name = "ident_case" version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39" +[[package]] +name = "idna" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "686f825264d630750a544639377bae737628043f20d38bbc029e8f29ea968a7e" +dependencies = [ + "idna_adapter", + "smallvec", + "utf8_iter", +] + +[[package]] +name = "idna_adapter" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "daca1df1c957320b2cf139ac61e7bd64fed304c5040df000a745aa1de3b4ef71" +dependencies = [ + "icu_normalizer", + "icu_properties", +] + +[[package]] +name = "indenter" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ce23b50ad8242c51a442f3ff322d56b02f08852c77e4c0b4d3fd684abc89c683" + [[package]] name = "indexmap" version = "1.9.3" @@ -852,12 +1132,12 @@ dependencies = [ [[package]] name = "indexmap" -version = "2.2.6" +version = "2.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "168fb715dda47215e360912c096649d23d58bf392ac62f73919e831745e40f26" +checksum = "8c9c992b02b5b4c94ea26e32fe5bccb7aa7d9f390ab5c1221ff895bc7ea8b652" dependencies = [ "equivalent", - "hashbrown 0.14.5", + "hashbrown 0.15.2", "serde", ] @@ -916,6 +1196,12 @@ dependencies = [ "cpufeatures", ] +[[package]] +name = "lazy_static" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe" + [[package]] name = "libaes" version = "0.7.0" @@ -928,6 +1214,18 @@ version = "0.2.155" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "97b3888a4aecf77e811145cadf6eef5901f4782c53886191b2f693f24761847c" +[[package]] +name = "linux-raw-sys" +version = "0.4.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d26c52dbd32dccf2d10cac7725f8eae5296885fb5703b261f7d0a0739ec807ab" + +[[package]] +name = "litemap" +version = "0.7.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "23fb14cb19457329c82206317a5663005a4d404783dc74f4252769b0d5f42856" + [[package]] name = "log" version = "0.4.22" @@ -949,6 +1247,20 @@ dependencies = [ "adler", ] +[[package]] +name = "multimap" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "defc4c55412d89136f966bbb339008b474350e5e6e78d2714439c386b3137a03" + +[[package]] +name = "noir_protobuf" +version = "1.0.0-beta.3" +dependencies = [ + "color-eyre", + "prost", +] + [[package]] name = "noirc_errors" version = "1.0.0-beta.3" @@ -1008,12 +1320,27 @@ dependencies = [ "autocfg", ] +[[package]] +name = "object" +version = "0.32.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a6a622008b6e321afc04970976f62ee297fdbaa6f95318ca343e3eebb9648441" +dependencies = [ + "memchr", +] + [[package]] name = "once_cell" version = "1.20.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "945462a4b81e43c4e3ba96bd7b49d834c6f61198356aa858733bc4acf3cbe62e" +[[package]] +name = "owo-colors" +version = "3.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c1b04fb49957986fdce4d6ee7a65027d55d4b6d2265e5848bbb507b58ccfdb6f" + [[package]] name = "p256" version = "0.11.1" @@ -1031,6 +1358,22 @@ version = "1.0.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "57c0d7b74b563b49d38dae00a0c37d4d6de9b432382b2892f0574ddcae73fd0a" +[[package]] +name = "percent-encoding" +version = "2.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e" + +[[package]] +name = "petgraph" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3672b37090dbd86368a4145bc067582552b29c27377cad4e0a306c97f9bd7772" +dependencies = [ + "fixedbitset", + "indexmap 2.7.1", +] + [[package]] name = "pin-project-lite" version = "0.2.14" @@ -1059,6 +1402,16 @@ version = "0.2.17" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de" +[[package]] +name = "prettyplease" +version = "0.2.25" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "64d1ec885c64d0457d564db4ec299b2dae3f9c02808b8ad9c3a089c591b18033" +dependencies = [ + "proc-macro2", + "syn 2.0.87", +] + [[package]] name = "proc-macro2" version = "1.0.86" @@ -1068,6 +1421,68 @@ dependencies = [ "unicode-ident", ] +[[package]] +name = "prost" +version = "0.13.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2796faa41db3ec313a31f7624d9286acf277b52de526150b7e69f3debf891ee5" +dependencies = [ + "bytes", + "prost-derive", +] + +[[package]] +name = "prost-build" +version = "0.13.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "be769465445e8c1474e9c5dac2018218498557af32d9ed057325ec9a41ae81bf" +dependencies = [ + "heck", + "itertools", + "log", + "multimap", + "once_cell", + "petgraph", + "prettyplease", + "prost", + "prost-types", + "regex", + "syn 2.0.87", + "tempfile", +] + +[[package]] +name = "prost-derive" +version = "0.13.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8a56d757972c98b346a9b766e3f02746cde6dd1cd1d1d563472929fdd74bec4d" +dependencies = [ + "anyhow", + "itertools", + "proc-macro2", + "quote", + "syn 2.0.87", +] + +[[package]] +name = "prost-types" +version = "0.13.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "52c2c1bf36ddb1a1c396b3601a3cec27c2462e45f07c386894ec3ccf5332bd16" +dependencies = [ + "prost", +] + +[[package]] +name = "protoc-prebuilt" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4d85d4641fe3b8c6e853dfd09fe35379bc6b6e66bd692ac29ed4f7087de69ed5" +dependencies = [ + "ureq", + "zip", +] + [[package]] name = "quote" version = "1.0.36" @@ -1146,6 +1561,71 @@ dependencies = [ "zeroize", ] +[[package]] +name = "ring" +version = "0.17.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ed9b823fa29b721a59671b41d6b06e66b29e0628e207e8b1c3ceeda701ec928d" +dependencies = [ + "cc", + "cfg-if", + "getrandom", + "libc", + "untrusted", + "windows-sys 0.52.0", +] + +[[package]] +name = "rustc-demangle" +version = "0.1.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "719b953e2095829ee67db738b3bfa9fa368c94900df327b3f07fe6e794d2fe1f" + +[[package]] +name = "rustix" +version = "0.38.34" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "70dc5ec042f7a43c4a73241207cecc9873a06d45debb38b329f8541d85c2730f" +dependencies = [ + "bitflags", + "errno", + "libc", + "linux-raw-sys", + "windows-sys 0.52.0", +] + +[[package]] +name = "rustls" +version = "0.23.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "47796c98c480fce5406ef69d1c76378375492c3b0a0de587be0c1d9feb12f395" +dependencies = [ + "log", + "once_cell", + "ring", + "rustls-pki-types", + "rustls-webpki", + "subtle", + "zeroize", +] + +[[package]] +name = "rustls-pki-types" +version = "1.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "917ce264624a4b4db1c364dcc35bfca9ded014d0a958cd47ad3e960e988ea51c" + +[[package]] +name = "rustls-webpki" +version = "0.102.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "64ca1bc8749bd4cf37b5ce386cc146580777b4e8572c7b97baf22c83f444bee9" +dependencies = [ + "ring", + "rustls-pki-types", + "untrusted", +] + [[package]] name = "rustversion" version = "1.0.18" @@ -1198,7 +1678,7 @@ checksum = "e0cd7e117be63d3c3678776753929474f3b04a43a080c744d6b0ae2a8c28e222" dependencies = [ "proc-macro2", "quote", - "syn 2.0.72", + "syn 2.0.87", ] [[package]] @@ -1222,7 +1702,7 @@ dependencies = [ "chrono", "hex", "indexmap 1.9.3", - "indexmap 2.2.6", + "indexmap 2.7.1", "serde", "serde_derive", "serde_json", @@ -1239,7 +1719,7 @@ dependencies = [ "darling", "proc-macro2", "quote", - "syn 2.0.72", + "syn 2.0.87", ] [[package]] @@ -1253,6 +1733,21 @@ dependencies = [ "digest", ] +[[package]] +name = "sharded-slab" +version = "0.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f40ca3c46823713e0d4209592e8d6e826aa57e928f09752619fc696c499637f6" +dependencies = [ + "lazy_static", +] + +[[package]] +name = "shlex" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64" + [[package]] name = "signature" version = "1.6.4" @@ -1263,6 +1758,12 @@ dependencies = [ "rand_core", ] +[[package]] +name = "smallvec" +version = "1.14.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7fcf8323ef1faaee30a44a340193b1ac6814fd9b7b4e88e9d4519a3e4abe1cfd" + [[package]] name = "spki" version = "0.6.0" @@ -1273,6 +1774,12 @@ dependencies = [ "der", ] +[[package]] +name = "stable_deref_trait" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3" + [[package]] name = "strsim" version = "0.11.1" @@ -1317,15 +1824,39 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.72" +version = "2.0.87" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dc4b9b9bf2add8093d3f2c0204471e951b2285580335de42f9d2534f3ae7a8af" +checksum = "25aa4ce346d03a6dcd68dd8b4010bcb74e54e62c90c573f394c46eae99aba32d" dependencies = [ "proc-macro2", "quote", "unicode-ident", ] +[[package]] +name = "synstructure" +version = "0.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c8af7666ab7b6390ab78131fb5b0fce11d6b7a6951602017c35fa82800708971" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.87", +] + +[[package]] +name = "tempfile" +version = "3.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "04cbcdd0c794ebb0d4cf35e88edd2f7d2c4c3e9a5a6dab322839b321c6a87a64" +dependencies = [ + "cfg-if", + "fastrand", + "once_cell", + "rustix", + "windows-sys 0.59.0", +] + [[package]] name = "termcolor" version = "1.4.1" @@ -1352,7 +1883,17 @@ checksum = "a4558b58466b9ad7ca0f102865eccc95938dca1a74a856f2b57b6629050da261" dependencies = [ "proc-macro2", "quote", - "syn 2.0.72", + "syn 2.0.87", +] + +[[package]] +name = "thread_local" +version = "1.1.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b9ef9bad013ada3808854ceac7b46812a6465ba368859a37e2100283d2d719c" +dependencies = [ + "cfg-if", + "once_cell", ] [[package]] @@ -1386,6 +1927,16 @@ dependencies = [ "time-core", ] +[[package]] +name = "tinystr" +version = "0.7.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9117f5d4db391c1cf6927e7bea3db74b9a1c1add8f7eda9ffd5364f40f57b82f" +dependencies = [ + "displaydoc", + "zerovec", +] + [[package]] name = "tracing" version = "0.1.40" @@ -1405,7 +1956,7 @@ checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7" dependencies = [ "proc-macro2", "quote", - "syn 2.0.72", + "syn 2.0.87", ] [[package]] @@ -1415,6 +1966,28 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c06d3da6113f116aaee68e4d601191614c9053067f9ab7f6edbcb161237daa54" dependencies = [ "once_cell", + "valuable", +] + +[[package]] +name = "tracing-error" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b1581020d7a273442f5b45074a6a57d5757ad0a47dac0e9f0bd57b81936f3db" +dependencies = [ + "tracing", + "tracing-subscriber", +] + +[[package]] +name = "tracing-subscriber" +version = "0.3.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e8189decb5ac0fa7bc8b96b7cb9b2701d60d48805aca84a238004d665fcc4008" +dependencies = [ + "sharded-slab", + "thread_local", + "tracing-core", ] [[package]] @@ -1435,12 +2008,62 @@ version = "0.1.13" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0336d538f7abc86d282a4189614dfaa90810dfc2c6f6427eaf88e16311dd225d" +[[package]] +name = "untrusted" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1" + +[[package]] +name = "ureq" +version = "2.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "02d1a66277ed75f640d608235660df48c8e3c19f3b4edb6a263315626cc3c01d" +dependencies = [ + "base64 0.22.1", + "log", + "once_cell", + "rustls", + "rustls-pki-types", + "url", + "webpki-roots", +] + +[[package]] +name = "url" +version = "2.5.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32f8b686cadd1473f4bd0117a5d28d36b1ade384ea9b5069a1c40aefed7fda60" +dependencies = [ + "form_urlencoded", + "idna", + "percent-encoding", +] + +[[package]] +name = "utf16_iter" +version = "1.0.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c8232dd3cdaed5356e0f716d285e4b40b932ac434100fe9b7e0e8e935b9e6246" + +[[package]] +name = "utf8_iter" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6c140620e7ffbb22c2dee59cafe6084a59b5ffc27a8859a5f0d494b5d52b6be" + [[package]] name = "utf8parse" version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821" +[[package]] +name = "valuable" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ba73ea9cf16a25df0c8caa16c51acb937d5712a8429db78a3ee29d5dcacd3a65" + [[package]] name = "version_check" version = "0.9.4" @@ -1474,7 +2097,7 @@ dependencies = [ "once_cell", "proc-macro2", "quote", - "syn 2.0.72", + "syn 2.0.87", "wasm-bindgen-shared", ] @@ -1496,7 +2119,7 @@ checksum = "e94f17b526d0a461a191c78ea52bbce64071ed5c04c9ffe424dcb38f74171bb7" dependencies = [ "proc-macro2", "quote", - "syn 2.0.72", + "syn 2.0.87", "wasm-bindgen-backend", "wasm-bindgen-shared", ] @@ -1507,13 +2130,22 @@ version = "0.2.92" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "af190c94f2773fdb3729c55b007a722abb5384da03bc0986df4c289bf5567e96" +[[package]] +name = "webpki-roots" +version = "0.26.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2210b291f7ea53617fbafcc4939f10914214ec15aace5ba62293a668f322c5c9" +dependencies = [ + "rustls-pki-types", +] + [[package]] name = "winapi-util" version = "0.1.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4d4cc384e1e73b93bafa6fb4f1df8c41695c8a91cf9c4c64358067d15a7b6c6b" dependencies = [ - "windows-sys", + "windows-sys 0.52.0", ] [[package]] @@ -1534,6 +2166,15 @@ dependencies = [ "windows-targets", ] +[[package]] +name = "windows-sys" +version = "0.59.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b" +dependencies = [ + "windows-targets", +] + [[package]] name = "windows-targets" version = "0.52.6" @@ -1598,6 +2239,42 @@ version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" +[[package]] +name = "write16" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d1890f4022759daae28ed4fe62859b1236caebfc61ede2f63ed4e695f3f6d936" + +[[package]] +name = "writeable" +version = "0.5.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e9df38ee2d2c3c5948ea468a8406ff0db0b29ae1ffde1bcf20ef305bcc95c51" + +[[package]] +name = "yoke" +version = "0.7.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "120e6aef9aa629e3d4f52dc8cc43a015c7724194c97dfaf45180d2daf2b77f40" +dependencies = [ + "serde", + "stable_deref_trait", + "yoke-derive", + "zerofrom", +] + +[[package]] +name = "yoke-derive" +version = "0.7.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2380878cad4ac9aac1e2435f3eb4020e8374b5f13c296cb75b4620ff8e229154" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.87", + "synstructure", +] + [[package]] name = "zerocopy" version = "0.7.35" @@ -1615,7 +2292,28 @@ checksum = "fa4f8080344d4671fb4e831a13ad1e68092748387dfc4f55e356242fae12ce3e" dependencies = [ "proc-macro2", "quote", - "syn 2.0.72", + "syn 2.0.87", +] + +[[package]] +name = "zerofrom" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "50cc42e0333e05660c3587f3bf9d0478688e15d870fab3346451ce7f8c9fbea5" +dependencies = [ + "zerofrom-derive", +] + +[[package]] +name = "zerofrom-derive" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d71e5d6e06ab090c67b5e44993ec16b72dcbaabc526db883a360057678b48502" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.87", + "synstructure", ] [[package]] @@ -1635,5 +2333,39 @@ checksum = "ce36e65b0d2999d2aafac989fb249189a141aee1f53c612c1f37d72631959f69" dependencies = [ "proc-macro2", "quote", - "syn 2.0.72", + "syn 2.0.87", +] + +[[package]] +name = "zerovec" +version = "0.10.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "aa2b893d79df23bfb12d5461018d408ea19dfafe76c2c7ef6d4eba614f8ff079" +dependencies = [ + "yoke", + "zerofrom", + "zerovec-derive", +] + +[[package]] +name = "zerovec-derive" +version = "0.10.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6eafa6dfb17584ea3e2bd6e76e0cc15ad7af12b09abdd1ca55961bed9b1063c6" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.87", +] + +[[package]] +name = "zip" +version = "0.6.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "760394e246e4c28189f19d488c058bf16f564016aefac5d32bb1f3b51d5e9261" +dependencies = [ + "byteorder", + "crc32fast", + "crossbeam-utils", + "flate2", ] diff --git a/avm-transpiler/bootstrap.sh b/avm-transpiler/bootstrap.sh index 89921df52a9..474f97eb35e 100755 --- a/avm-transpiler/bootstrap.sh +++ b/avm-transpiler/bootstrap.sh @@ -15,7 +15,7 @@ function build { echo_header "avm-transpiler build" artifact=avm-transpiler-$hash.tar.gz if ! cache_download $artifact; then - denoise "cargo build --release" + denoise "cargo build --release --locked" denoise "cargo fmt --check" denoise "cargo clippy" cache_upload $artifact target/release/avm-transpiler diff --git a/aztec-up/bin/.aztec-run b/aztec-up/bin/.aztec-run index 054a6164626..d374e63ded7 100755 --- a/aztec-up/bin/.aztec-run +++ b/aztec-up/bin/.aztec-run @@ -110,9 +110,21 @@ for env in ${ENV_VARS_TO_INJECT:-}; do fi done +arg_port_assignment="" + # Dynamic port assignment. if [ -n "${AZTEC_PORT:-}" ]; then - arg_port_assignment="-p $AZTEC_PORT:$AZTEC_PORT" + arg_port_assignment+=" -p $AZTEC_PORT:$AZTEC_PORT " +fi + +if [ -n "${P2P_TCP_LISTEN_ADDR:-}" ]; then + P2P_TCP_PORT=${P2P_TCP_LISTEN_ADDR#*:} + arg_port_assignment+=" -p $P2P_TCP_PORT:$P2P_TCP_PORT " +fi + +if [ -n "${P2P_UDP_LISTEN_ADDR:-}" ]; then + P2P_UDP_PORT=${P2P_UDP_LISTEN_ADDR#*:} + arg_port_assignment+=" -p $P2P_UDP_PORT:$P2P_UDP_PORT/udp " fi # For debugging the aztec-up scripts, can be useful to mount local code into container. @@ -120,9 +132,14 @@ if [ "${MOUNT_LOCAL_CODE:-0}" -eq 1 ]; then arg_volume_mounts=" -v $PWD/..:/usr/src:ro" fi +# Container name. +if [ -n "$CONTAINER_NAME" ]; then + arg_container_name="--name $CONTAINER_NAME" +fi + function run { docker run \ - --name $CONTAINER_NAME \ + ${arg_container_name:-} \ --rm \ --workdir "$PWD" \ ${arg_interactive:-} \ diff --git a/aztec-up/bin/aztec b/aztec-up/bin/aztec index 1380532fb11..316b83dfdcb 100755 --- a/aztec-up/bin/aztec +++ b/aztec-up/bin/aztec @@ -40,7 +40,7 @@ case ${1:-} in # Should this just be aztec-test? It's like, a new command that doesn't exist on aztec cli. # Or just make this a first class command on aztec cli? # TODO: Need to force ipv4 here with 127.0.0.1 for some reason. TXE's not on ipv6? - exec $(dirname $0)/.aztec-run aztec-sandbox bash -c " + exec $(dirname $0)/.aztec-run "" bash -c " node --no-warnings /usr/src/yarn-project/aztec/dest/bin/index.js start --txe --port 8081 & while ! nc -z 127.0.0.1 8081 &>/dev/null; do sleep 0.2; done export NARGO_FOREIGN_CALL_TIMEOUT=300000 diff --git a/barretenberg/.gitrepo b/barretenberg/.gitrepo index d618efc7e86..921b5003ffa 100644 --- a/barretenberg/.gitrepo +++ b/barretenberg/.gitrepo @@ -6,7 +6,7 @@ [subrepo] remote = https://github.com/AztecProtocol/barretenberg branch = master - commit = 6f7e924b1885098c196a2d8e1053c01ac54a9d40 - parent = 518e7b9a3a148c7a856c3d5ee24d64aa4e1453af + commit = da6136a0b9081671adca9b03d47b2c1a94459d47 + parent = dada06f323795da751d0617249b86146cad46378 method = merge cmdver = 0.4.6 diff --git a/barretenberg/acir_tests/bootstrap.sh b/barretenberg/acir_tests/bootstrap.sh index f29cd20e58d..df8bfe0f29d 100755 --- a/barretenberg/acir_tests/bootstrap.sh +++ b/barretenberg/acir_tests/bootstrap.sh @@ -25,7 +25,7 @@ function build { denoise "cd ../../noir/noir-repo/test_programs/execution_success && git clean -fdx" cp -R ../../noir/noir-repo/test_programs/execution_success acir_tests # Running these requires extra gluecode so they're skipped. - rm -rf acir_tests/{diamond_deps_0,workspace,workspace_default_member} + rm -rf acir_tests/{diamond_deps_0,workspace,workspace_default_member,regression_7323} # TODO(https://github.com/AztecProtocol/barretenberg/issues/1108): problem regardless the proof system used # TODO: Check if resolved. Move to .test_skip_patterns if not. rm -rf acir_tests/regression_5045 diff --git a/barretenberg/cpp/pil/vm2/instr_fetching.pil b/barretenberg/cpp/pil/vm2/instr_fetching.pil index 41808a89e18..f7844dc34fd 100644 --- a/barretenberg/cpp/pil/vm2/instr_fetching.pil +++ b/barretenberg/cpp/pil/vm2/instr_fetching.pil @@ -30,6 +30,8 @@ pol commit op1, op2, op3, op4, op5, op6, op7; // Wire to execution opcodes translation. pol commit exec_opcode; +pol commit instr_size_in_bytes; + // Bring in the bytes from the bytecode columns. #[BYTES_FROM_BC_DEC] sel { @@ -82,6 +84,7 @@ pol commit sel_op_dc_17; sel { bd0, exec_opcode, + instr_size_in_bytes, sel_op_dc_0, sel_op_dc_1, sel_op_dc_2, sel_op_dc_3, sel_op_dc_4, sel_op_dc_5, sel_op_dc_6, sel_op_dc_7, sel_op_dc_8, sel_op_dc_9, sel_op_dc_10, sel_op_dc_11, @@ -91,6 +94,7 @@ sel { precomputed.sel_range_wire_opcode { precomputed.clk, precomputed.exec_opcode, + precomputed.instr_size_in_bytes, precomputed.sel_op_dc_0, precomputed.sel_op_dc_1, precomputed.sel_op_dc_2, precomputed.sel_op_dc_3, precomputed.sel_op_dc_4, precomputed.sel_op_dc_5, precomputed.sel_op_dc_6, precomputed.sel_op_dc_7, precomputed.sel_op_dc_8, precomputed.sel_op_dc_9, precomputed.sel_op_dc_10, precomputed.sel_op_dc_11, @@ -103,11 +107,19 @@ precomputed.sel_range_wire_opcode { // Remark: Upper-casing the alias needs to be edited manually (not code-generated)! pol SEL_OP_DC_18 = sel_op_dc_2 + sel_op_dc_6; +#[INDIRECT_BYTES_DECOMPOSITION] indirect = sel_op_dc_0 * (bd1 * 2**8 + bd2 * 2**0) + SEL_OP_DC_18 * (bd1 * 2**0); +#[OP1_BYTES_DECOMPOSITION] op1 = sel_op_dc_0 * (bd3 * 2**8 + bd4 * 2**0) + sel_op_dc_2 * (bd2 * 2**8 + bd3 * 2**0) + sel_op_dc_6 * (bd2 * 2**0) + sel_op_dc_15 * (bd1 * 2**24 + bd2 * 2**16 + bd3 * 2**8 + bd4 * 2**0); +#[OP2_BYTES_DECOMPOSITION] op2 = sel_op_dc_0 * (bd5 * 2**8 + bd6 * 2**0) + sel_op_dc_3 * (bd4 * 2**8 + bd5 * 2**0) + sel_op_dc_6 * (bd3 * 2**0) + sel_op_dc_8 * (bd4 * 2**0) + sel_op_dc_16 * (bd4 * 2**24 + bd5 * 2**16 + bd6 * 2**8 + bd7 * 2**0); +#[OP3_BYTES_DECOMPOSITION] op3 = sel_op_dc_0 * (bd7 * 2**8 + bd8 * 2**0) + sel_op_dc_4 * (bd6 * 2**8 + bd7 * 2**0) + sel_op_dc_9 * (bd5 * 2**248 + bd6 * 2**240 + bd7 * 2**232 + bd8 * 2**224 + bd9 * 2**216 + bd10 * 2**208 + bd11 * 2**200 + bd12 * 2**192 + bd13 * 2**184 + bd14 * 2**176 + bd15 * 2**168 + bd16 * 2**160 + bd17 * 2**152 + bd18 * 2**144 + bd19 * 2**136 + bd20 * 2**128 + bd21 * 2**120 + bd22 * 2**112 + bd23 * 2**104 + bd24 * 2**96 + bd25 * 2**88 + bd26 * 2**80 + bd27 * 2**72 + bd28 * 2**64 + bd29 * 2**56 + bd30 * 2**48 + bd31 * 2**40 + bd32 * 2**32 + bd33 * 2**24 + bd34 * 2**16 + bd35 * 2**8 + bd36 * 2**0) + sel_op_dc_10 * (bd5 * 2**120 + bd6 * 2**112 + bd7 * 2**104 + bd8 * 2**96 + bd9 * 2**88 + bd10 * 2**80 + bd11 * 2**72 + bd12 * 2**64 + bd13 * 2**56 + bd14 * 2**48 + bd15 * 2**40 + bd16 * 2**32 + bd17 * 2**24 + bd18 * 2**16 + bd19 * 2**8 + bd20 * 2**0) + sel_op_dc_11 * (bd5 * 2**56 + bd6 * 2**48 + bd7 * 2**40 + bd8 * 2**32 + bd9 * 2**24 + bd10 * 2**16 + bd11 * 2**8 + bd12 * 2**0) + sel_op_dc_12 * (bd5 * 2**24 + bd6 * 2**16 + bd7 * 2**8 + bd8 * 2**0) + sel_op_dc_13 * (bd5 * 2**8 + bd6 * 2**0) + sel_op_dc_14 * (bd4 * 2**0) + sel_op_dc_17 * (bd6 * 2**0); +#[OP4_BYTES_DECOMPOSITION] op4 = sel_op_dc_0 * (bd9 * 2**8 + bd10 * 2**0) + sel_op_dc_5 * (bd8 * 2**8 + bd9 * 2**0) + sel_op_dc_7 * (bd8 * 2**0); +#[OP5_BYTES_DECOMPOSITION] op5 = sel_op_dc_0 * (bd11 * 2**8 + bd12 * 2**0); +#[OP6_BYTES_DECOMPOSITION] op6 = sel_op_dc_1 * (bd13 * 2**8 + bd14 * 2**0); +#[OP7_BYTES_DECOMPOSITION] op7 = sel_op_dc_1 * (bd15 * 2**8 + bd16 * 2**0); diff --git a/barretenberg/cpp/pil/vm2/precomputed.pil b/barretenberg/cpp/pil/vm2/precomputed.pil index 7067e53ea0d..9fbc178944d 100644 --- a/barretenberg/cpp/pil/vm2/precomputed.pil +++ b/barretenberg/cpp/pil/vm2/precomputed.pil @@ -75,6 +75,7 @@ pol constant sel_op_dc_16; pol constant sel_op_dc_17; pol constant exec_opcode; +pol constant instr_size_in_bytes; // Toggle the rows which index (clk) is equal to a wire opcode // Is used to lookup into the wire instruction spec table which contains the operand decomposition diff --git a/barretenberg/cpp/src/barretenberg/bb/main.cpp b/barretenberg/cpp/src/barretenberg/bb/main.cpp index 65067081872..1e6ae14d960 100644 --- a/barretenberg/cpp/src/barretenberg/bb/main.cpp +++ b/barretenberg/cpp/src/barretenberg/bb/main.cpp @@ -10,7 +10,9 @@ using namespace bb; -const char* BB_VERSION_PLACEHOLDER = "00000000.00000000.00000000"; +// This is updated in-place by sed during the release process. This prevents +// the version string from needing to be present at build-time, simplifying e.g. caching. +const char* const BB_VERSION_PLACEHOLDER = "00000000.00000000.00000000"; // TODO(https://github.com/AztecProtocol/barretenberg/issues/1257): Remove unused/seemingly unnecessary flags. // TODO(https://github.com/AztecProtocol/barretenberg/issues/1258): Improve defaults. @@ -19,7 +21,7 @@ const char* BB_VERSION_PLACEHOLDER = "00000000.00000000.00000000"; void print_active_subcommands(const CLI::App& app, const std::string& prefix = "bb command: ") { // get_subcommands() returns a vector of pointers to subcommands - for (auto subcmd : app.get_subcommands()) { + for (auto* subcmd : app.get_subcommands()) { // Check if this subcommand was activated (nonzero count) if (subcmd->count() > 0) { vinfo(prefix, subcmd->get_name()); @@ -231,9 +233,9 @@ int main(int argc, char* argv[]) add_crs_path_option(&app); /*************************************************************************************************************** - * Subcommand: version + * Builtin flag: --version ***************************************************************************************************************/ - CLI::App* version = app.add_subcommand("version", "Print the version string."); + app.set_version_flag("--version", BB_VERSION_PLACEHOLDER, "Print the version string."); /*************************************************************************************************************** * Subcommand: check @@ -690,14 +692,8 @@ int main(int argc, char* argv[]) }; try { - if (version->parsed()) { - // Placeholder that we replace inside the binary as a pre-release step. - // Compared to the prevs CMake injection strategy, this avoids full rebuilds. - std::cout << BB_VERSION_PLACEHOLDER << std::endl; - return 0; - } // ULTRA PLONK - else if (OLD_API_gates->parsed()) { + if (OLD_API_gates->parsed()) { gate_count(bytecode_path, flags.recursive, flags.honk_recursion, true); } else if (OLD_API_prove->parsed()) { prove_ultra_plonk(bytecode_path, witness_path, plonk_prove_output_path, flags.recursive); @@ -780,4 +776,5 @@ int main(int argc, char* argv[]) std::cerr << err.what() << std::endl; return 1; } + return 0; } diff --git a/barretenberg/cpp/src/barretenberg/boomerang_value_detection/CMakeLists.txt b/barretenberg/cpp/src/barretenberg/boomerang_value_detection/CMakeLists.txt index dae39eee2de..3b7f86dca34 100644 --- a/barretenberg/cpp/src/barretenberg/boomerang_value_detection/CMakeLists.txt +++ b/barretenberg/cpp/src/barretenberg/boomerang_value_detection/CMakeLists.txt @@ -1 +1 @@ -barretenberg_module(boomerang_value_detection stdlib_circuit_builders circuit_checker stdlib_primitives numeric stdlib_aes128 stdlib_sha256 stdlib_blake2s stdlib_blake3s) \ No newline at end of file +barretenberg_module(boomerang_value_detection stdlib_circuit_builders circuit_checker stdlib_primitives numeric stdlib_aes128 stdlib_sha256 stdlib_blake2s stdlib_blake3s stdlib_poseidon2 stdlib_primitives) diff --git a/barretenberg/cpp/src/barretenberg/boomerang_value_detection/graph.cpp b/barretenberg/cpp/src/barretenberg/boomerang_value_detection/graph.cpp index 03c31e53137..34b6c183782 100644 --- a/barretenberg/cpp/src/barretenberg/boomerang_value_detection/graph.cpp +++ b/barretenberg/cpp/src/barretenberg/boomerang_value_detection/graph.cpp @@ -1,27 +1,63 @@ -#include "graph.hpp" +#include "./graph.hpp" +#include "barretenberg/stdlib_circuit_builders/ultra_circuit_builder.hpp" #include +#include #include using namespace bb::plookup; using namespace bb; +namespace cdg { + /** - * @brief this method removes duplicate variables from a gate, - * converts variables from a gate to real variables, and then - * updates variable gates count for real variable indexes + * @brief this method finds index of the block in circuit builder by comparing pointers to blocks + * @tparam FF field type + * @param ultra_builder circuit builder containing the blocks + * @param block block to find + * @return size_t index of the found block */ +template size_t Graph_::find_block_index(UltraCircuitBuilder& ultra_builder, const UltraBlock& block) +{ + auto blocks_data = ultra_builder.blocks.get(); + size_t index = 0; + for (size_t i = 0; i < blocks_data.size(); i++) { + if ((void*)(&blocks_data[i]) == (void*)(&block)) { + index = i; + break; + } + } + return index; +} +/** + * @brief this method processes variables from a gate by removing duplicates and updating tracking structures + * @tparam FF field type + * @param ultra_circuit_builder circuit builder containing the variables + * @param gate_variables vector of variables to process + * @param gate_index index of the current gate + * @param block_idx index of the current block + * @details The method performs several operations: + * 1) Removes duplicate variables from the input vector + * 2) Converts each variable to its real index using to_real + * 3) Creates key-value pairs of (variable_index, block_index) for tracking + * 4) Updates variable_gates map with gate indices for each variable + * 5) Increments the gate count for each processed variable + */ template inline void Graph_::process_gate_variables(UltraCircuitBuilder& ultra_circuit_builder, - std::vector& gate_variables) + std::vector& gate_variables, + size_t gate_index, + size_t block_idx) { auto unique_variables = std::unique(gate_variables.begin(), gate_variables.end()); gate_variables.erase(unique_variables, gate_variables.end()); if (gate_variables.empty()) { return; } - for (size_t i = 0; i < gate_variables.size(); i++) { - gate_variables[i] = this->to_real(ultra_circuit_builder, gate_variables[i]); + for (auto& var_idx : gate_variables) { + var_idx = this->to_real(ultra_circuit_builder, var_idx); + KeyPair key = std::make_pair(var_idx, block_idx); + variable_gates[key].emplace_back(gate_index); } for (const auto& variable_index : gate_variables) { variables_gate_counts[variable_index] += 1; @@ -29,180 +65,481 @@ inline void Graph_::process_gate_variables(UltraCircuitBuilder& ultra_circui } /** - * @brief this method implements connected components from arithmetic gates - * @tparam FF - * @param ultra_circuit_builder - * @param index - * @return std::vector + * @brief this method creates connected components from arithmetic gates + * @tparam FF field type + * @param ultra_circuit_builder circuit builder containing the gates + * @param index index of the current gate + * @param block_idx index of the current block + * @param blk block containing the gates + * @return std::vector> vector of connected components from the gate and minigate + * @details Processes both regular arithmetic gates and minigates, handling fixed witness gates + * and different arithmetic operations based on selector values */ - template -inline std::vector Graph_::get_arithmetic_gate_connected_component( - bb::UltraCircuitBuilder& ultra_circuit_builder, size_t index) +inline std::vector> Graph_::get_arithmetic_gate_connected_component( + bb::UltraCircuitBuilder& ultra_circuit_builder, size_t index, size_t block_idx, UltraBlock& blk) { - auto& arithmetic_block = ultra_circuit_builder.blocks.arithmetic; - uint32_t left_idx = arithmetic_block.w_l()[index]; - uint32_t right_idx = arithmetic_block.w_r()[index]; - uint32_t out_idx = arithmetic_block.w_o()[index]; - uint32_t fourth_idx = arithmetic_block.w_4()[index]; - auto q_m = arithmetic_block.q_m()[index]; - auto q_1 = arithmetic_block.q_1()[index]; - auto q_2 = arithmetic_block.q_2()[index]; - auto q_3 = arithmetic_block.q_3()[index]; - auto q_4 = arithmetic_block.q_4()[index]; - std::vector gate_variables = {}; - if (q_m != 0 || q_1 != 1 || q_2 != 0 || q_3 != 0 || q_4 != 0) { + auto q_arith = blk.q_arith()[index]; + std::vector gate_variables; + std::vector minigate_variables; + std::vector> all_gates_variables; + if (q_arith.is_zero()) { + return {}; + } + auto q_m = blk.q_m()[index]; + auto q_1 = blk.q_1()[index]; + auto q_2 = blk.q_2()[index]; + auto q_3 = blk.q_3()[index]; + auto q_4 = blk.q_4()[index]; + + uint32_t left_idx = blk.w_l()[index]; + uint32_t right_idx = blk.w_r()[index]; + uint32_t out_idx = blk.w_o()[index]; + uint32_t fourth_idx = blk.w_4()[index]; + if (q_m.is_zero() && q_1 == 1 && q_2.is_zero() && q_3.is_zero() && q_4.is_zero() && q_arith == FF::one()) { + // this is fixed_witness gate. So, variable index contains in left wire. So, we have to take only it. + fixed_variables.insert(this->to_real(ultra_circuit_builder, left_idx)); + } else if (!q_m.is_zero() || q_1 != FF::one() || !q_2.is_zero() || !q_3.is_zero() || !q_4.is_zero()) { // this is not the gate for fix_witness, so we have to process this gate - if (arithmetic_block.q_arith()[index] > 0) { - if (q_m != 0) { - gate_variables.emplace_back(left_idx); - gate_variables.emplace_back(right_idx); - } - if (q_1 != 0) { + if (!q_m.is_zero()) { + gate_variables.emplace_back(left_idx); + gate_variables.emplace_back(right_idx); + } else { + if (!q_1.is_zero()) { gate_variables.emplace_back(left_idx); } - if (q_2 != 0) { + if (!q_2.is_zero()) { gate_variables.emplace_back(right_idx); } - if (q_3 != 0) { - gate_variables.emplace_back(out_idx); - } - if (q_4 != 0) { - gate_variables.emplace_back(fourth_idx); - } - if (arithmetic_block.q_arith()[index] == 2) { - // We have to use w_4_shift from the next gate - // if and only if the current gate isn't last, cause we can't - // look into the next gate - if (index != arithmetic_block.size() - 1) { - uint32_t fourth_shift_idx = arithmetic_block.w_4()[index + 1]; - gate_variables.emplace_back(fourth_shift_idx); - } + } + + if (!q_3.is_zero()) { + gate_variables.emplace_back(out_idx); + } + if (!q_4.is_zero()) { + gate_variables.emplace_back(fourth_idx); + } + if (q_arith == FF(2)) { + // We have to use w_4_shift from the next gate + // if and only if the current gate isn't last, cause we can't + // look into the next gate + if (index != blk.size() - 1) { + gate_variables.emplace_back(blk.w_4()[index + 1]); } - if (arithmetic_block.q_arith()[index] == 3) { - // TODO(daniel): want to process this case later - ASSERT(false); + } + if (q_arith == FF(3)) { + // In this gate mini gate is enabled, we have 2 equations: + // q_1 * w_1 + q_2 * w_2 + q_3 * w_3 + q_4 * w_4 + q_c + 2 * w_4_omega = 0 + // w_1 + w_4 - w_1_omega + q_m = 0 + minigate_variables.insert(minigate_variables.end(), { left_idx, fourth_idx }); + if (index != blk.size() - 1) { + gate_variables.emplace_back(blk.w_4()[index + 1]); + minigate_variables.emplace_back(blk.w_l()[index + 1]); } } } - this->process_gate_variables(ultra_circuit_builder, gate_variables); - return gate_variables; + this->process_gate_variables(ultra_circuit_builder, gate_variables, index, block_idx); + this->process_gate_variables(ultra_circuit_builder, minigate_variables, index, block_idx); + all_gates_variables.emplace_back(gate_variables); + if (!minigate_variables.empty()) { + all_gates_variables.emplace_back(minigate_variables); + } + + return all_gates_variables; } /** * @brief this method creates connected components from elliptic gates - * @tparam FF - * @param ultra_circuit_builder - * @param index - * @return std::vector + * @tparam FF field type + * @param ultra_circuit_builder circuit builder containing the gates + * @param index index of the current gate + * @param block_idx index of the current block + * @param blk block containing the gates + * @return std::vector vector of connected variables from the gate + * @details Handles both elliptic curve addition and doubling operations, + * collecting variables from current and next gates as needed */ - template inline std::vector Graph_::get_elliptic_gate_connected_component( - bb::UltraCircuitBuilder& ultra_circuit_builder, size_t index) + bb::UltraCircuitBuilder& ultra_circuit_builder, size_t index, size_t block_idx, UltraBlock& blk) { - auto& elliptic_block = ultra_circuit_builder.blocks.elliptic; std::vector gate_variables = {}; - bool is_elliptic_gate = elliptic_block.q_elliptic()[index] == 1; - bool is_elliptic_add_gate = elliptic_block.q_1()[index] != 0 && elliptic_block.q_m()[index] == 0; - bool is_elliptic_dbl_gate = elliptic_block.q_1()[index] == 0 && elliptic_block.q_m()[index] == 1; - if (is_elliptic_gate) { - auto right_idx = elliptic_block.w_r()[index]; - auto out_idx = elliptic_block.w_o()[index]; + if (!blk.q_elliptic()[index].is_zero()) { + bool is_elliptic_add_gate = !blk.q_1()[index].is_zero() && blk.q_m()[index].is_zero(); + bool is_elliptic_dbl_gate = blk.q_1()[index].is_zero() && blk.q_m()[index] == FF::one(); + auto right_idx = blk.w_r()[index]; + auto out_idx = blk.w_o()[index]; gate_variables.emplace_back(right_idx); gate_variables.emplace_back(out_idx); - if (index != elliptic_block.size() - 1) { + if (index != blk.size() - 1) { if (is_elliptic_add_gate) { // if this gate is ecc_add_gate, we have to get indices x2, x3, y3, y2 from the next gate - gate_variables.emplace_back(elliptic_block.w_l()[index + 1]); - gate_variables.emplace_back(elliptic_block.w_r()[index + 1]); - gate_variables.emplace_back(elliptic_block.w_o()[index + 1]); - gate_variables.emplace_back(elliptic_block.w_4()[index + 1]); + gate_variables.emplace_back(blk.w_l()[index + 1]); + gate_variables.emplace_back(blk.w_r()[index + 1]); + gate_variables.emplace_back(blk.w_o()[index + 1]); + gate_variables.emplace_back(blk.w_4()[index + 1]); } if (is_elliptic_dbl_gate) { // if this gate is ecc_dbl_gate, we have to indices x3, y3 from right and output wires - gate_variables.emplace_back(elliptic_block.w_r()[index + 1]); - gate_variables.emplace_back(elliptic_block.w_o()[index + 1]); + gate_variables.emplace_back(blk.w_r()[index + 1]); + gate_variables.emplace_back(blk.w_o()[index + 1]); } } + this->process_gate_variables(ultra_circuit_builder, gate_variables, index, block_idx); } - this->process_gate_variables(ultra_circuit_builder, gate_variables); return gate_variables; } /** * @brief this method creates connected components from sorted constraints - * - * @tparam FF - * @param ultra_circuit_builder - * @param index - * @return std::vector + * @tparam FF field type + * @param ultra_circuit_builder circuit builder containing the gates + * @param index index of the current gate + * @param block_idx index of the current block + * @param block block containing the gates + * @return std::vector vector of connected variables from the gate + * @details Processes delta range constraints by collecting all wire indices + * from the current gate */ - template inline std::vector Graph_::get_sort_constraint_connected_component( - bb::UltraCircuitBuilder& ultra_circuit_builder, size_t index) + bb::UltraCircuitBuilder& ultra_circuit_builder, size_t index, size_t blk_idx, UltraBlock& block) { - auto& delta_range_block = ultra_circuit_builder.blocks.delta_range; std::vector gate_variables = {}; - if (delta_range_block.q_delta_range()[index] == 1) { - auto left_idx = delta_range_block.w_l()[index]; - auto right_idx = delta_range_block.w_r()[index]; - auto out_idx = delta_range_block.w_o()[index]; - auto fourth_idx = delta_range_block.w_4()[index]; + if (!block.q_delta_range()[index].is_zero()) { + auto left_idx = block.w_l()[index]; + auto right_idx = block.w_r()[index]; + auto out_idx = block.w_o()[index]; + auto fourth_idx = block.w_4()[index]; gate_variables.insert(gate_variables.end(), { left_idx, right_idx, out_idx, fourth_idx }); } - this->process_gate_variables(ultra_circuit_builder, gate_variables); + this->process_gate_variables(ultra_circuit_builder, gate_variables, index, blk_idx); return gate_variables; } /** * @brief this method creates connected components from plookup gates - * - * @tparam FF - * @param ultra_circuit_builder - * @param index - * @return std::vector + * @tparam FF field type + * @param ultra_circuit_builder circuit builder containing the gates + * @param index index of the current gate + * @param block_idx index of the current block + * @param block block containing the gates + * @return std::vector vector of connected variables from the gate + * @details Processes plookup gates by collecting variables based on selector values, + * including variables from the next gate when necessary */ - template inline std::vector Graph_::get_plookup_gate_connected_component( - bb::UltraCircuitBuilder& ultra_circuit_builder, size_t index) + bb::UltraCircuitBuilder& ultra_circuit_builder, size_t index, size_t blk_idx, UltraBlock& block) { std::vector gate_variables; - auto& lookup_block = ultra_circuit_builder.blocks.lookup; - auto q_2 = lookup_block.q_2()[index]; - auto q_m = lookup_block.q_m()[index]; - auto q_c = lookup_block.q_c()[index]; - auto left_idx = lookup_block.w_l()[index]; - auto right_idx = lookup_block.w_r()[index]; - auto out_idx = lookup_block.w_o()[index]; - gate_variables.emplace_back(left_idx); - gate_variables.emplace_back(right_idx); - gate_variables.emplace_back(out_idx); - if (index < lookup_block.size() - 1) { - if (q_2 != 0 || q_m != 0 || q_c != 0) { - if (q_2 != 0) { - gate_variables.emplace_back(lookup_block.w_l()[index + 1]); + auto q_lookup_type = block.q_lookup_type()[index]; + if (!q_lookup_type.is_zero()) { + auto q_2 = block.q_2()[index]; + auto q_m = block.q_m()[index]; + auto q_c = block.q_c()[index]; + auto left_idx = block.w_l()[index]; + auto right_idx = block.w_r()[index]; + auto out_idx = block.w_o()[index]; + gate_variables.emplace_back(left_idx); + gate_variables.emplace_back(right_idx); + gate_variables.emplace_back(out_idx); + if (index < block.size() - 1) { + if (!q_2.is_zero()) { + gate_variables.emplace_back(block.w_l()[index + 1]); } - if (q_m != 0) { - gate_variables.emplace_back(lookup_block.w_r()[index + 1]); + if (!q_m.is_zero()) { + gate_variables.emplace_back(block.w_r()[index + 1]); } - if (q_c != 0) { - gate_variables.emplace_back(lookup_block.w_o()[index + 1]); + if (!q_c.is_zero()) { + gate_variables.emplace_back(block.w_o()[index + 1]); } } + this->process_gate_variables(ultra_circuit_builder, gate_variables, index, blk_idx); } - this->process_gate_variables(ultra_circuit_builder, gate_variables); return gate_variables; } /** - * @brief Construct a new Graph from Ultra Circuit Builder - * @tparam FF - * @param ultra_circuit_constructor + * @brief this method creates connected components from poseidon2 gates + * @tparam FF field type + * @param ultra_circuit_builder circuit builder containing the gates + * @param index index of the current gate + * @param blk_idx index of the current block + * @param block block containing the gates + * @return std::vector vector of connected variables from the gate + */ +template +inline std::vector Graph_::get_poseido2s_gate_connected_component( + bb::UltraCircuitBuilder& ultra_circuit_builder, size_t index, size_t blk_idx, UltraBlock& block) +{ + std::vector gate_variables; + auto internal_selector = block.q_poseidon2_internal()[index]; + auto external_selector = block.q_poseidon2_external()[index]; + if (!internal_selector.is_zero() || !external_selector.is_zero()) { + gate_variables.insert(gate_variables.end(), + { block.w_l()[index], block.w_r()[index], block.w_o()[index], block.w_4()[index] }); + if (index != block.size() - 1) { + gate_variables.insert( + gate_variables.end(), + { block.w_l()[index + 1], block.w_r()[index + 1], block.w_o()[index + 1], block.w_4()[index + 1] }); + } + this->process_gate_variables(ultra_circuit_builder, gate_variables, index, blk_idx); + } + return gate_variables; +} + +/** + * @brief this method creates connected components from auxiliary gates, including bigfield operations, + * RAM and ROM consistency checks + * @tparam FF field type + * @param ultra_builder circuit builder containing the gates + * @param index index of the current gate + * @param blk_idx index of the current block + * @param block block containing the gates + * @return std::vector vector of connected variables from the gate */ +template +inline std::vector Graph_::get_auxiliary_gate_connected_component(bb::UltraCircuitBuilder& ultra_builder, + size_t index, + size_t blk_idx, + UltraBlock& block) +{ + std::vector gate_variables; + if (!block.q_aux()[index].is_zero()) { + auto q_1 = block.q_1()[index]; + auto q_2 = block.q_2()[index]; + auto q_3 = block.q_3()[index]; + auto q_4 = block.q_4()[index]; + auto q_m = block.q_m()[index]; + auto q_arith = block.q_arith()[index]; + [[maybe_unused]] auto q_c = block.q_c()[index]; + auto w_l = block.w_l()[index]; + auto w_r = block.w_r()[index]; + auto w_o = block.w_o()[index]; + auto w_4 = block.w_4()[index]; + if (q_3 == FF::one() && q_4 == FF::one()) { + // bigfield limb accumulation 1 + ASSERT(q_arith.is_zero()); + if (index < block.size() - 1) { + gate_variables.insert(gate_variables.end(), + { w_l, w_r, w_o, w_4, block.w_l()[index + 1], block.w_r()[index + 1] }); + } + } else if (q_3 == FF::one() && q_m == FF::one()) { + ASSERT(q_arith.is_zero()); + // bigfield limb accumulation 2 + if (index < block.size() - 1) { + gate_variables.insert(gate_variables.end(), + { w_o, + w_4, + block.w_l()[index + 1], + block.w_r()[index + 1], + block.w_o()[index + 1], + block.w_4()[index + 1] }); + } + } else if (q_2 == FF::one() && (q_3 == FF::one() || q_4 == FF::one() || q_m == FF::one())) { + ASSERT(q_arith.is_zero()); + // bigfield product cases + if (index < block.size() - 1) { + std::vector limb_subproduct_vars = { + w_l, w_r, block.w_l()[index + 1], block.w_r()[index + 1] + }; + if (q_3 == FF::one()) { + // bigfield product 1 + ASSERT(q_4.is_zero() && q_m.is_zero()); + gate_variables.insert( + gate_variables.end(), limb_subproduct_vars.begin(), limb_subproduct_vars.end()); + gate_variables.insert(gate_variables.end(), { w_o, w_4 }); + } + if (q_4 == FF::one()) { + // bigfield product 2 + ASSERT(q_3.is_zero() && q_m.is_zero()); + std::vector non_native_field_gate_2 = { w_l, w_4, w_r, w_o, block.w_o()[index + 1] }; + gate_variables.insert( + gate_variables.end(), non_native_field_gate_2.begin(), non_native_field_gate_2.end()); + gate_variables.emplace_back(block.w_4()[index + 1]); + gate_variables.insert( + gate_variables.end(), limb_subproduct_vars.begin(), limb_subproduct_vars.end()); + } + if (q_m == FF::one()) { + // bigfield product 3 + ASSERT(q_4.is_zero() && q_3.is_zero()); + gate_variables.insert( + gate_variables.end(), limb_subproduct_vars.begin(), limb_subproduct_vars.end()); + gate_variables.insert(gate_variables.end(), + { w_4, block.w_o()[index + 1], block.w_4()[index + 1] }); + } + } + } else if (q_1 == FF::one() && q_4 == FF::one()) { + ASSERT(q_arith.is_zero()); + // ram timestamp check + if (index < block.size() - 1) { + gate_variables.insert(gate_variables.end(), + { block.w_r()[index + 1], + block.w_r()[index], + block.w_l()[index], + block.w_l()[index + 1], + block.w_o()[index] }); + } + } else if (q_1 == FF::one() && q_2 == FF::one()) { + ASSERT(q_arith.is_zero()); + // rom constitency check + if (index < block.size() - 1) { + gate_variables.insert( + gate_variables.end(), + { block.w_l()[index], block.w_l()[index + 1], block.w_4()[index], block.w_4()[index + 1] }); + } + } else { + // ram constitency check + if (!q_arith.is_zero()) { + if (index < block.size() - 1) { + gate_variables.insert(gate_variables.end(), + { block.w_o()[index], + block.w_4()[index], + block.w_l()[index + 1], + block.w_r()[index + 1], + block.w_o()[index + 1], + block.w_4()[index + 1] }); + } + } + } + } + this->process_gate_variables(ultra_builder, gate_variables, index, blk_idx); + return gate_variables; +} + +/** + * @brief this method gets the ROM table connected component by processing ROM transcript records + * @tparam FF field type + * @param ultra_builder circuit builder containing the gates + * @param rom_array ROM transcript containing records with witness indices and gate information + * @return std::vector vector of connected variables from ROM table gates + */ +template +inline std::vector Graph_::get_rom_table_connected_component( + bb::UltraCircuitBuilder& ultra_builder, const UltraCircuitBuilder::RomTranscript& rom_array) +{ + size_t block_index = find_block_index(ultra_builder, ultra_builder.blocks.aux); + ASSERT(block_index == 5); + + // Every RomTranscript data structure has 2 main components that are interested for static analyzer: + // 1) records contains values that were put in the gate, we can use them to create connections between variables + // 2) states contains values witness indexes that we can find in the ROM record in the RomTrascript, so we can + // ignore state of the ROM transcript, because we still can connect all variables using variables from records. + std::vector rom_table_variables; + + for (const auto& record : rom_array.records) { + std::vector gate_variables; + size_t gate_index = record.gate_index; + + auto q_1 = ultra_builder.blocks.aux.q_1()[gate_index]; + auto q_2 = ultra_builder.blocks.aux.q_2()[gate_index]; + auto q_3 = ultra_builder.blocks.aux.q_3()[gate_index]; + auto q_4 = ultra_builder.blocks.aux.q_4()[gate_index]; + auto q_m = ultra_builder.blocks.aux.q_m()[gate_index]; + auto q_arith = ultra_builder.blocks.aux.q_arith()[gate_index]; + auto q_c = ultra_builder.blocks.aux.q_c()[gate_index]; + + auto index_witness = record.index_witness; + auto vc1_witness = record.value_column1_witness; // state[0] from RomTranscript + auto vc2_witness = record.value_column2_witness; // state[1] from RomTranscript + auto record_witness = record.record_witness; + + if (q_1 == FF::one() && q_m == FF::one() && q_2.is_zero() && q_3.is_zero() && q_4.is_zero() && q_c.is_zero() && + q_arith.is_zero()) { + // By default ROM read gate uses variables (w_1, w_2, w_3, w_4) = (index_witness, vc1_witness, vc2_witness, + // record_witness) So we can update all of them + gate_variables.emplace_back(index_witness); + if (vc1_witness != ultra_builder.zero_idx) { + gate_variables.emplace_back(vc1_witness); + } + if (vc2_witness != ultra_builder.zero_idx) { + gate_variables.emplace_back(vc2_witness); + } + gate_variables.emplace_back(record_witness); + } + this->process_gate_variables(ultra_builder, gate_variables, gate_index, block_index); + // after process_gate_variables function gate_variables constists of real variables indexes, so we can add all + // this variables in the final vector to connect all of them + if (!gate_variables.empty()) { + rom_table_variables.insert(rom_table_variables.end(), gate_variables.begin(), gate_variables.end()); + } + } + return rom_table_variables; +} + +/** + * @brief this method gets the RAM table connected component by processing RAM transcript records + * @tparam FF field type + * @param ultra_builder circuit builder containing the gates + * @param ram_array RAM transcript containing records with witness indices and gate information + * @return std::vector vector of connected variables from RAM table gates + */ +template +inline std::vector Graph_::get_ram_table_connected_component( + bb::UltraCircuitBuilder& ultra_builder, const UltraCircuitBuilder::RamTranscript& ram_array) +{ + size_t block_index = find_block_index(ultra_builder, ultra_builder.blocks.aux); + ASSERT(block_index == 5); + std::vector ram_table_variables; + for (const auto& record : ram_array.records) { + std::vector gate_variables; + size_t gate_index = record.gate_index; + + auto q_1 = ultra_builder.blocks.aux.q_1()[gate_index]; + auto q_2 = ultra_builder.blocks.aux.q_2()[gate_index]; + auto q_3 = ultra_builder.blocks.aux.q_3()[gate_index]; + auto q_4 = ultra_builder.blocks.aux.q_4()[gate_index]; + auto q_m = ultra_builder.blocks.aux.q_m()[gate_index]; + auto q_arith = ultra_builder.blocks.aux.q_arith()[gate_index]; + auto q_c = ultra_builder.blocks.aux.q_c()[gate_index]; + + auto index_witness = record.index_witness; + auto timestamp_witness = record.timestamp_witness; + auto value_witness = record.value_witness; + auto record_witness = record.record_witness; + + if (q_1 == FF::one() && q_m == FF::one() && q_2.is_zero() && q_3.is_zero() && q_4.is_zero() && + q_arith.is_zero() && (q_c.is_zero() || q_c == FF::one())) { + // By default RAM read/write gate uses variables (w_1, w_2, w_3, w_4) = (index_witness, timestamp_witness, + // value_witness, record_witness) So we can update all of them + gate_variables.emplace_back(index_witness); + if (timestamp_witness != ultra_builder.zero_idx) { + gate_variables.emplace_back(timestamp_witness); + } + if (value_witness != ultra_builder.zero_idx) { + gate_variables.emplace_back(value_witness); + } + gate_variables.emplace_back(record_witness); + } + this->process_gate_variables(ultra_builder, gate_variables, gate_index, block_index); + // after process_gate_variables function gate_variables constists of real variables indexes, so we can add all + // these variables in the final vector to connect all of them + ram_table_variables.insert(ram_table_variables.end(), gate_variables.begin(), gate_variables.end()); + } + return ram_table_variables; +} + +/** + * @brief Construct a new Graph from Ultra Circuit Builder + * @tparam FF field type used in the circuit + * @param ultra_circuit_constructor circuit builder containing all gates and variables + * @details This constructor initializes the graph structure by: + * 1) Creating data structures for tracking: + * - Number of gates each variable appears in (variables_gate_counts) + * - Adjacency lists for each variable (variable_adjacency_lists) + * - Degree of each variable (variables_degree) + * 2) Processing different types of gates: + * - Arithmetic gates + * - Elliptic curve gates + * - Plookup gates + * - Poseidon2 gates + * - Auxiliary gates + * - Delta range gates + * 3) Creating connections between variables that appear in the same gate + * 4) Special handling for sorted constraints in delta range blocks + */ template Graph_::Graph_(bb::UltraCircuitBuilder& ultra_circuit_constructor) { this->variables_gate_counts = @@ -217,49 +554,73 @@ template Graph_::Graph_(bb::UltraCircuitBuilder& ultra_circuit } std::map constant_variable_indices = ultra_circuit_constructor.constant_variable_indices; - const auto& arithmetic_block = ultra_circuit_constructor.blocks.arithmetic; - auto arithmetic_gates_numbers = arithmetic_block.size(); - bool arithmetic_gates_exist = arithmetic_gates_numbers > 0; - if (arithmetic_gates_exist) { - for (size_t i = 0; i < arithmetic_gates_numbers; i++) { - auto gate_variables = this->get_arithmetic_gate_connected_component(ultra_circuit_constructor, i); - this->connect_all_variables_in_vector(ultra_circuit_constructor, gate_variables, false); - } - } - const auto& elliptic_block = ultra_circuit_constructor.blocks.elliptic; - auto elliptic_gates_numbers = elliptic_block.size(); - bool elliptic_gates_exist = elliptic_gates_numbers > 0; - if (elliptic_gates_exist) { - for (size_t i = 0; i < elliptic_gates_numbers; i++) { - std::vector gate_variables = - this->get_elliptic_gate_connected_component(ultra_circuit_constructor, i); - this->connect_all_variables_in_vector(ultra_circuit_constructor, gate_variables, false); - } - } - const auto& range_block = ultra_circuit_constructor.blocks.delta_range; - auto range_gates = range_block.size(); - bool range_gates_exists = range_gates > 0; - if (range_gates_exists) { - std::vector sorted_variables; - for (size_t i = 0; i < range_gates; i++) { - auto current_gate = this->get_sort_constraint_connected_component(ultra_circuit_constructor, i); - if (current_gate.empty()) { - this->connect_all_variables_in_vector(ultra_circuit_constructor, sorted_variables, true); - sorted_variables.clear(); - } else { - sorted_variables.insert(sorted_variables.end(), current_gate.begin(), current_gate.end()); - } - } - } - - const auto& lookup_block = ultra_circuit_constructor.blocks.lookup; - auto lookup_gates = lookup_block.size(); - bool lookup_gates_exists = lookup_gates > 0; - if (lookup_gates_exists) { - for (size_t i = 0; i < lookup_gates; i++) { + auto block_data = ultra_circuit_constructor.blocks.get(); + for (size_t blk_idx = 1; blk_idx < block_data.size() - 1; blk_idx++) { + if (block_data[blk_idx].size() > 0) { + std::vector sorted_variables; + for (size_t gate_idx = 0; gate_idx < block_data[blk_idx].size(); gate_idx++) { + auto arithmetic_gates_variables = get_arithmetic_gate_connected_component( + ultra_circuit_constructor, gate_idx, blk_idx, block_data[blk_idx]); + if (!arithmetic_gates_variables.empty()) { + for (const auto& gate_variables : arithmetic_gates_variables) { + // info("size of arithmetic_gate == ", gate_variables.size()); + connect_all_variables_in_vector( + ultra_circuit_constructor, gate_variables, /*is_sorted_variables=*/false); + } + } + auto elliptic_gate_variables = get_elliptic_gate_connected_component( + ultra_circuit_constructor, gate_idx, blk_idx, block_data[blk_idx]); + connect_all_variables_in_vector( + ultra_circuit_constructor, elliptic_gate_variables, /*is_sorted_variables=*/false); + auto lookup_gate_variables = get_plookup_gate_connected_component( + ultra_circuit_constructor, gate_idx, blk_idx, block_data[blk_idx]); + connect_all_variables_in_vector( + ultra_circuit_constructor, lookup_gate_variables, /*is_sorted_variables=*/false); + auto poseidon2_gate_variables = get_poseido2s_gate_connected_component( + ultra_circuit_constructor, gate_idx, blk_idx, block_data[blk_idx]); + connect_all_variables_in_vector( + ultra_circuit_constructor, poseidon2_gate_variables, /*is_sorted_variables=*/false); + auto aux_gate_variables = get_auxiliary_gate_connected_component( + ultra_circuit_constructor, gate_idx, blk_idx, block_data[blk_idx]); + connect_all_variables_in_vector( + ultra_circuit_constructor, aux_gate_variables, /*is_sorted_variables=*/false); + if (arithmetic_gates_variables.empty() && elliptic_gate_variables.empty() && + lookup_gate_variables.empty() && poseidon2_gate_variables.empty() && aux_gate_variables.empty()) { + // if all vectors are empty it means that current block is delta range, and it needs another + // processing method + auto delta_range_gate_variables = get_sort_constraint_connected_component( + ultra_circuit_constructor, gate_idx, blk_idx, block_data[blk_idx]); + if (delta_range_gate_variables.empty()) { + connect_all_variables_in_vector( + ultra_circuit_constructor, sorted_variables, /*is_sorted_variables=*/true); + sorted_variables.clear(); + } else { + sorted_variables.insert(sorted_variables.end(), + delta_range_gate_variables.begin(), + delta_range_gate_variables.end()); + } + } + } + } + } + + const auto& rom_arrays = ultra_circuit_constructor.rom_arrays; + if (!rom_arrays.empty()) { + for (const auto& rom_array : rom_arrays) { std::vector variable_indices = - this->get_plookup_gate_connected_component(ultra_circuit_constructor, i); - this->connect_all_variables_in_vector(ultra_circuit_constructor, variable_indices, false); + this->get_rom_table_connected_component(ultra_circuit_constructor, rom_array); + this->connect_all_variables_in_vector( + ultra_circuit_constructor, variable_indices, /*is_sorted_variables=*/false); + } + } + + const auto& ram_arrays = ultra_circuit_constructor.ram_arrays; + if (!ram_arrays.empty()) { + for (const auto& ram_array : ram_arrays) { + std::vector variable_indices = + this->get_ram_table_connected_component(ultra_circuit_constructor, ram_array); + this->connect_all_variables_in_vector( + ultra_circuit_constructor, variable_indices, /*is_sorted_variables=*/false); } } } @@ -388,7 +749,8 @@ void Graph_::depth_first_search(const uint32_t& variable_index, /** * @brief this methond finds all connected components in the graph described by adjacency lists * @tparam FF - * @return std::vector> + * @return std::vector> list of connected components where each component is a vector of variable + * indices */ template std::vector> Graph_::find_connected_components() @@ -472,8 +834,7 @@ inline size_t Graph_::process_current_decompose_chain(bb::UltraCircuitBuilde } /** - * @brief this method gets the endpoints of the decompose chains. For that it has to clean variable_index - from unnecessary variables for example, left, right, output wires and go through all decompose chain + * @brief this method removes unnecessary variables from decompose chains * @tparam FF * @param ultra_circuit_builder * @param variables_in_one_gate @@ -522,8 +883,45 @@ inline void Graph_::remove_unnecessary_decompose_variables(bb::UltraCircuitB } } } + /** - * @brief this method removes false positive cass variables from aes plookup tables. + * @brief this method removes variables from range constraints that are not security critical + * @tparam FF field type + * @param ultra_builder circuit builder containing the range lists + * @details Right now static analyzer removes two types of variables: + * 1) Variables from delta_range_constraints created by finalize_circuit() + * 2) Variables from range_constraints created by range_constraint_into_two_limbs + */ +template +void Graph_::remove_unnecessary_range_constrains_variables(bb::UltraCircuitBuilder& ultra_builder) +{ + std::map range_lists = ultra_builder.range_lists; + std::unordered_set range_lists_tau_tags; + std::unordered_set range_lists_range_tags; + std::vector real_variable_tags = ultra_builder.real_variable_tags; + for (const auto& pair : range_lists) { + UltraCircuitBuilder::RangeList list = pair.second; + range_lists_tau_tags.insert(list.tau_tag); + range_lists_range_tags.insert(list.range_tag); + } + for (uint32_t real_index = 0; real_index < real_variable_tags.size(); real_index++) { + if (variables_in_one_gate.contains(real_index)) { + // this if helps us to remove variables from delta_range_constraints when finalize_circuit() function was + // called + if (range_lists_tau_tags.contains(real_variable_tags[real_index])) { + variables_in_one_gate.erase(real_index); + } + // this if helps us to remove variables from range_constraints when range_constraint_into_two_limbs function + // was called + if (range_lists_range_tags.contains(real_variable_tags[real_index])) { + variables_in_one_gate.erase(real_index); + } + } + } +} + +/** + * @brief this method removes false positive cases variables from aes plookup tables. * AES_SBOX_MAP, AES_SPARSE_MAP, AES_SPARSE_NORMALIZE tables are used in read_from_1_to_2_table function which * return values C2[0], so C3[0] isn't used anymore in these cases, but this situation isn't dangerous. * So, we have to remove these variables. @@ -553,7 +951,7 @@ inline void Graph_::remove_unnecessary_aes_plookup_variables(std::unordered_ if (variables_gate_counts[real_out_idx] != 1 || variables_gate_counts[real_right_idx] != 1) { bool find_out = find_position(real_out_idx); auto q_c = lookup_block.q_c()[gate_index]; - if (q_c == 0) { + if (q_c.is_zero()) { if (find_out) { variables_in_one_gate.erase(real_out_idx); } @@ -605,7 +1003,7 @@ inline void Graph_::remove_unnecessary_sha256_plookup_variables(std::unorder auto q_c = lookup_block.q_c()[gate_index]; bool find_out = find_position(real_out_idx); // bool find_right = find_position(real_right_idx); - if (q_c == 0) { + if (q_c.is_zero()) { if (find_out) { variables_in_one_gate.erase(real_out_idx); } @@ -699,21 +1097,62 @@ inline void Graph_::remove_unnecessary_plookup_variables(bb::UltraCircuitBui } } +/** + * @brief this method removes record witness variables from variables in one gate. + * initially record witness is added in the circuit as ctx->add_variable(0), where ctx -- circuit builder. + * then aren't used anymore, so we can remove from the static analyzer. + * @tparam FF + * @param ultra_builder + */ + +template inline void Graph_::remove_record_witness_variables(bb::UltraCircuitBuilder& ultra_builder) +{ + auto block_data = ultra_builder.blocks.get(); + size_t blk_idx = find_block_index(ultra_builder, ultra_builder.blocks.aux); + std::vector to_remove; + ASSERT(blk_idx == 5); + for (const auto& var_idx : variables_in_one_gate) { + KeyPair key = { var_idx, blk_idx }; + if (auto search = variable_gates.find(key); search != variable_gates.end()) { + std::vector gate_indexes = variable_gates[key]; + ASSERT(gate_indexes.size() == 1); + size_t gate_idx = gate_indexes[0]; + auto q_1 = block_data[blk_idx].q_1()[gate_idx]; + auto q_2 = block_data[blk_idx].q_2()[gate_idx]; + auto q_3 = block_data[blk_idx].q_3()[gate_idx]; + auto q_4 = block_data[blk_idx].q_4()[gate_idx]; + auto q_m = block_data[blk_idx].q_m()[gate_idx]; + auto q_arith = block_data[blk_idx].q_arith()[gate_idx]; + if (q_1 == FF::one() && q_m == FF::one() && q_2.is_zero() && q_3.is_zero() && q_4.is_zero() && + q_arith.is_zero()) { + // record witness can be in both ROM and RAM gates, so we can ignore q_c + // record witness is written as 4th variable in RAM/ROM read/write gate, so we can get 4th wire value + // and check it with our variable + if (this->to_real(ultra_builder, block_data[blk_idx].w_4()[gate_idx]) == var_idx) { + to_remove.emplace_back(var_idx); + } + } + } + } + for (const auto& elem : to_remove) { + variables_in_one_gate.erase(elem); + } +} + /** * @brief this method returns a final set of variables that were in one gate * @tparam FF - * @param ultra_circuit_builder - * @return std::unordered_set + * @param ultra_circuit_builder circuit builder containing the variables + * @return std::unordered_set set of variable indices */ template std::unordered_set Graph_::show_variables_in_one_gate(bb::UltraCircuitBuilder& ultra_circuit_builder) { - std::unordered_set variables_in_one_gate; for (const auto& pair : variables_gate_counts) { bool is_not_constant_variable = this->check_is_not_constant_variable(ultra_circuit_builder, pair.first); if (pair.second == 1 && pair.first != 0 && is_not_constant_variable) { - variables_in_one_gate.insert(pair.first); + this->variables_in_one_gate.insert(pair.first); } } auto range_lists = ultra_circuit_builder.range_lists; @@ -727,18 +1166,22 @@ std::unordered_set Graph_::show_variables_in_one_gate(bb::UltraCir } } } - this->remove_unnecessary_decompose_variables(ultra_circuit_builder, variables_in_one_gate, decompose_varialbes); - this->remove_unnecessary_plookup_variables(ultra_circuit_builder, variables_in_one_gate); + this->remove_unnecessary_decompose_variables( + ultra_circuit_builder, this->variables_in_one_gate, decompose_varialbes); + this->remove_unnecessary_plookup_variables(ultra_circuit_builder, this->variables_in_one_gate); + this->remove_unnecessary_range_constrains_variables(ultra_circuit_builder); + for (const auto& elem : this->fixed_variables) { + this->variables_in_one_gate.erase(elem); + } + this->remove_record_witness_variables(ultra_circuit_builder); return variables_in_one_gate; } /** - * @brief this method returns connected component with a given index and size of this component - * sometimes for debugging we want to check the size one of the connected component, so it would be - * useful to know its size - * @param connected_components - * @param index - * @return std::pair, size_t> + * @brief this method returns connected component with a given index and its size + * @param connected_components vector of all connected components + * @param index index of required component + * @return std::pair, size_t> pair of component and its size */ std::pair, size_t> get_connected_component_with_index( @@ -763,7 +1206,7 @@ std::pair, size_t> get_connected_component_with_index( template void Graph_::print_graph() { for (const auto& elem : variable_adjacency_lists) { - info("variable with index", elem.first); + info("variable with index ", elem.first); if (variable_adjacency_lists[elem.first].empty()) { info("is isolated"); } else { @@ -791,9 +1234,7 @@ template void Graph_::print_connected_components() } /** - * @brief this method prints a number of gates for each variable. - * while processing the arithmetic circuit, we count for each variable the number of gates it has participated in. - * sometimes for debugging purposes it is useful to see how many gates each variable has participated in. + * @brief this method prints a number of gates for each variable * @tparam FF */ @@ -805,20 +1246,38 @@ template void Graph_::print_variables_gate_counts() } /** - * @brief this method prints a number of edges for each variable. - * while processing the arithmetic circuit, we conut for each variable the number of edges, i.e. connections with other - * variables though the gates. perhaps in the future counting the number of edges for each vertex can be useful for - * analysis, and this function will be used for debugging. + * @brief this method prints a number of edges for each variable * @tparam FF + * @param ultra_builder */ -template void Graph_::print_variables_edge_counts() +template void Graph_::print_variables_in_one_gate(bb::UltraCircuitBuilder& ultra_builder) { - for (const auto& it : variables_degree) { - if (it.first != 0) { - info("variable index = ", it.first, "number of edges for this variable = ", it.second); + const auto& block_data = ultra_builder.blocks.get(); + for (const auto& [key, gates] : variable_gates) { + if (variables_in_one_gate.contains(key.first)) { + ASSERT(gates.size() == 1); + size_t gate_index = gates[0]; + UltraBlock block = block_data[key.second]; + info("---- printing gate selectors where variable with index ", key.first, " was found ----"); + info("q_m == ", block.q_m()[gate_index]); + info("q_c == ", block.q_c()[gate_index]); + info("q_1 == ", block.q_1()[gate_index]); + info("q_2 == ", block.q_2()[gate_index]); + info("q_3 == ", block.q_3()[gate_index]); + info("q_4 == ", block.q_4()[gate_index]); + info("q_arith == ", block.q_arith()[gate_index]); + info("q_delta_range == ", block.q_delta_range()[gate_index]); + info("q_elliptic == ", block.q_elliptic()[gate_index]); + info("q_aux == ", block.q_aux()[gate_index]); + info("q_lookup_type == ", block.q_lookup_type()[gate_index]); + info("q_poseidon2_external == ", block.q_poseidon2_external()[gate_index]); + info("q_poseidon2_internal == ", block.q_poseidon2_internal()[gate_index]); + info("---- finished printing ----"); } } } template class Graph_; + +} // namespace cdg diff --git a/barretenberg/cpp/src/barretenberg/boomerang_value_detection/graph.hpp b/barretenberg/cpp/src/barretenberg/boomerang_value_detection/graph.hpp index c4c88e1e159..a15bc0566f0 100644 --- a/barretenberg/cpp/src/barretenberg/boomerang_value_detection/graph.hpp +++ b/barretenberg/cpp/src/barretenberg/boomerang_value_detection/graph.hpp @@ -3,18 +3,60 @@ #include "barretenberg/stdlib_circuit_builders/ultra_circuit_builder.hpp" #include #include +#include #include #include #include #include +namespace cdg { + +using UltraBlock = bb::UltraTraceBlock; +/** + * We've added a new feature to the static analyzer that tracks which gates contain each variable. + * This is helpful for removing false-positive variables from the analyzer by using gate selectors + * combined with additional knowledge about variables (e.g., tau or range tags). + * + * This information is stored in an unordered map with keys of type std::pair, where: + * - uint32_t represents the real variable index + * - size_t represents the index of the UltraTraceBlock in the reference array of TraceBlocks + * contained within the Ultra Circuit Builder + * + * Since std::unordered_map doesn't provide default hash and equality functions for std::pair keys, + * we've implemented these ourselves. Our approach is based on the hash_combine function from the + * Boost library, which efficiently combines hashes of the two elements in the pair. + */ +using KeyPair = std::pair; + +struct KeyHasher { + size_t operator()(const KeyPair& pair) const + { + size_t combined_hash = 0; + // Golden ratio constant (2^32 / phi) used in hash combining for better distribution + constexpr size_t HASH_COMBINE_CONSTANT = 0x9e3779b9; + auto hash_combiner = [](size_t lhs, size_t rhs) { + return lhs ^ (rhs + HASH_COMBINE_CONSTANT + (lhs << 6) + (lhs >> 2)); + }; + combined_hash = hash_combiner(combined_hash, std::hash()(pair.first)); + combined_hash = hash_combiner(combined_hash, std::hash()(pair.second)); + return combined_hash; + } +}; + +struct KeyEquals { + bool operator()(const KeyPair& p1, const KeyPair& p2) const + { + return (p1.first == p2.first && p1.second == p2.second); + } +}; + /* - * this class describes arithmetic circuit as an undirected graph, where vertices are variables from circuit. - * edges describe connections between variables through gates. We want to find variables that weren't properly - * constrainted/some connections were missed using additional metrics, like in how much gate variable was and number of - * connected components in the graph. if variable was in one connected component, it means that this variable wasn't - * constrained properly. if number of connected components > 1, it means that there were missed some connections between - * variables. + * This class describes an arithmetic circuit as an undirected graph, where vertices are variables from the circuit. + * Edges describe connections between variables through gates. We want to find variables that weren't properly + * constrained or where some connections were missed using additional metrics, such as how many gates a variable appears + * in and the number of connected components in the graph. If a variable appears in only one gate, it means that this + * variable wasn't constrained properly. If the number of connected components > 1, it means that there were some missed + * connections between variables. */ template class Graph_ { public: @@ -30,19 +72,39 @@ template class Graph_ { { return ultra_circuit_constructor.real_variable_index[variable_index]; }; + size_t find_block_index(bb::UltraCircuitBuilder& ultra_builder, const UltraBlock& block); void process_gate_variables(bb::UltraCircuitBuilder& ultra_circuit_constructor, - std::vector& gate_variables); - + std::vector& gate_variables, + size_t gate_index, + size_t blk_idx); std::unordered_map get_variables_gate_counts() { return this->variables_gate_counts; }; - std::vector get_arithmetic_gate_connected_component(bb::UltraCircuitBuilder& ultra_circuit_builder, - size_t index); + std::vector> get_arithmetic_gate_connected_component( + bb::UltraCircuitBuilder& ultra_circuit_builder, size_t index, size_t block_idx, UltraBlock& blk); std::vector get_elliptic_gate_connected_component(bb::UltraCircuitBuilder& ultra_circuit_builder, - size_t index); + size_t index, + size_t block_idx, + UltraBlock& blk); std::vector get_plookup_gate_connected_component(bb::UltraCircuitBuilder& ultra_circuit_builder, - size_t index); + size_t index, + size_t block_idx, + UltraBlock& blk); std::vector get_sort_constraint_connected_component(bb::UltraCircuitBuilder& ultra_circuit_builder, - size_t index); + size_t index, + size_t block_idx, + UltraBlock& blk); + std::vector get_poseido2s_gate_connected_component(bb::UltraCircuitBuilder& ultra_circuit_builder, + size_t index, + size_t block_idx, + UltraBlock& blk); + std::vector get_auxiliary_gate_connected_component(bb::UltraCircuitBuilder& ultra_circuit_builder, + size_t index, + size_t block_idx, + UltraBlock& blk); + std::vector get_rom_table_connected_component(bb::UltraCircuitBuilder& ultra_circuit_builder, + const bb::UltraCircuitBuilder::RomTranscript& rom_array); + std::vector get_ram_table_connected_component(bb::UltraCircuitBuilder& ultra_builder, + const bb::UltraCircuitBuilder::RamTranscript& ram_array); void add_new_edge(const uint32_t& first_variable_index, const uint32_t& second_variable_index); std::vector get_variable_adjacency_list(const uint32_t& variable_index) @@ -89,6 +151,7 @@ template class Graph_ { const std::unordered_set& decompose_variables); void remove_unnecessary_plookup_variables(bb::UltraCircuitBuilder& ultra_circuit_builder, std::unordered_set& variables_in_on_gate); + void remove_unnecessary_range_constrains_variables(bb::UltraCircuitBuilder& ultra_builder); std::unordered_set show_variables_in_one_gate(bb::UltraCircuitBuilder& ultra_circuit_builder); void remove_unnecessary_aes_plookup_variables(std::unordered_set& variables_in_one_gate, @@ -99,11 +162,13 @@ template class Graph_ { bb::UltraCircuitBuilder& ultra_circuit_builder, bb::plookup::BasicTableId& table_id, size_t gate_index); + void remove_record_witness_variables(bb::UltraCircuitBuilder& ultra_builder); void print_graph(); void print_connected_components(); void print_variables_gate_counts(); void print_variables_edge_counts(); + void print_variables_in_one_gate(bb::UltraCircuitBuilder& ultra_builder); ~Graph_() = default; private: @@ -114,6 +179,13 @@ template class Graph_ { variables_gate_counts; // we use this data structure to count, how many gates use every variable std::unordered_map variables_degree; // we use this data structure to count, how many every variable have edges + std::unordered_map, KeyHasher, KeyEquals> + variable_gates; // we use this data structure to store gates and TraceBlocks for every variables, where static + // analyzer found them in the circuit. + std::unordered_set variables_in_one_gate; + std::unordered_set fixed_variables; }; -using Graph = Graph_; \ No newline at end of file +using Graph = Graph_; + +} // namespace cdg diff --git a/barretenberg/cpp/src/barretenberg/boomerang_value_detection/graph_description.test.cpp b/barretenberg/cpp/src/barretenberg/boomerang_value_detection/graph_description.test.cpp index 26f50cc8db1..e2234cae86d 100644 --- a/barretenberg/cpp/src/barretenberg/boomerang_value_detection/graph_description.test.cpp +++ b/barretenberg/cpp/src/barretenberg/boomerang_value_detection/graph_description.test.cpp @@ -11,12 +11,15 @@ #include using namespace bb; +using namespace cdg; /** - * @brief this test checks graph description of the circuit with arithmetic gates - the number of connected components = the number of pair (i, j), 0<=i, j <16, i.e 256 + * @brief Test graph description of circuit with arithmetic gates + * + * @details This test verifies that: + * - The number of connected components equals the number of pairs (i,j), where 0<=i,j<16 + * - Each pair creates an isolated component, resulting in 256 total components */ - TEST(boomerang_ultra_circuit_constructor, test_graph_for_arithmetic_gates) { UltraCircuitBuilder circuit_constructor = UltraCircuitBuilder(); @@ -38,17 +41,18 @@ TEST(boomerang_ultra_circuit_constructor, test_graph_for_arithmetic_gates) Graph graph = Graph(circuit_constructor); auto connected_components = graph.find_connected_components(); - auto num_connected_components = connected_components.size(); auto variables_in_one_gate = graph.show_variables_in_one_gate(circuit_constructor); - bool result = num_connected_components == 256; - EXPECT_EQ(result, true); + EXPECT_EQ(variables_in_one_gate.size(), 1024); + EXPECT_EQ(connected_components.size(), 256); } /** - * @brief This test checks graph description of Ultra Circuit Builder with arithmetic gates with shifts - * It must be one connected component, cause all gates have shifts + * @brief Test graph description of Ultra Circuit Builder with arithmetic gates with shifts + * + * @details This test verifies that: + * - When all gates have shifts, they form a single connected component + * - The shift operation connects all variables in the circuit */ - TEST(boomerang_ultra_circuit_constructor, test_graph_for_arithmetic_gates_with_shifts) { UltraCircuitBuilder circuit_constructor = UltraCircuitBuilder(); @@ -75,10 +79,13 @@ TEST(boomerang_ultra_circuit_constructor, test_graph_for_arithmetic_gates_with_s } /** - * @brief this test checks graph description of the circuit with boolean gates. - all variables must be isolated and the number of connected components = 0, all variables in one gate + * @brief Test graph description of circuit with boolean gates + * + * @details This test verifies that: + * - All variables are isolated with boolean gates + * - The number of connected components is 0 + * - All variables are in one gate */ - TEST(boomerang_ultra_circuit_constructor, test_graph_for_boolean_gates) { UltraCircuitBuilder circuit_constructor = UltraCircuitBuilder(); @@ -99,11 +106,13 @@ TEST(boomerang_ultra_circuit_constructor, test_graph_for_boolean_gates) } /** - * @brief this test checks graph decription for the circuit with one elliptic addition gate. - * The result is one connected component for 6 variables: - * x1, y1, x2, y2, x3, y3 + * @brief Test graph description for circuit with one elliptic addition gate + * + * @details This test verifies that: + * - The circuit forms one connected component containing 6 variables + * - The variables represent the coordinates of three points: (x1,y1), (x2,y2), (x3,y3) + * - Where (x3,y3) is the result of adding (x1,y1) and (x2,y2) */ - TEST(boomerang_ultra_circuit_constructor, test_graph_for_elliptic_add_gate) { typedef grumpkin::g1::affine_element affine_element; @@ -132,11 +141,13 @@ TEST(boomerang_ultra_circuit_constructor, test_graph_for_elliptic_add_gate) } /** - * @brief this test checks graph description of the circuit with one elliptic double gate. - The result is one connected component for 4 variables: - x1, y1, x3, y3 + * @brief Test graph description for circuit with one elliptic double gate + * + * @details This test verifies that: + * - The circuit forms one connected component containing 4 variables + * - The variables represent the coordinates of two points: (x1,y1) and (x3,y3) + * - Where (x3,y3) is the result of doubling (x1,y1) */ - TEST(boomerang_ultra_circuit_constructor, test_graph_for_elliptic_double_gate) { typedef grumpkin::g1::affine_element affine_element; @@ -161,12 +172,14 @@ TEST(boomerang_ultra_circuit_constructor, test_graph_for_elliptic_double_gate) } /** - * @brief this test checks the graph description of the circuit has elliptic addition and multiplication - gates together. The result is 2 connected components: - x1, y1, x2, y2, x3, y3, x4, y4 - x5, y5, x6, y6, x7, y7, x8, y8 + * @brief Test graph description for circuit with elliptic addition and multiplication gates + * + * @details This test verifies that: + * - The circuit forms 2 connected components + * - First component contains: x1, y1, x2, y2, x3, y3, x4, y4 + * - Second component contains: x5, y5, x6, y6, x7, y7, x8, y8 + * - Each component represents a separate elliptic curve operation sequence */ - TEST(boomerang_ultra_circuit_constructor, test_graph_for_elliptic_together) { UltraCircuitBuilder circuit_constructor = UltraCircuitBuilder(); @@ -216,11 +229,14 @@ TEST(boomerang_ultra_circuit_constructor, test_graph_for_elliptic_together) } /** - * @brief this test check graph description of the circuit with 2 sort_constraint. The result is 2 connected components: - a_idx, b_idx, c_idx, d_idx - e_idx, f_idx, g_idx, h_idx + * @brief Test graph description for circuit with 2 sort constraints + * + * @details This test verifies that: + * - The circuit forms 2 connected components + * - First component contains: a_idx, b_idx, c_idx, d_idx + * - Second component contains: e_idx, f_idx, g_idx, h_idx + * - Each sort constraint creates its own connected component */ - TEST(boomerang_ultra_circuit_constructor, test_graph_for_sort_constraints) { UltraCircuitBuilder circuit_constructor = UltraCircuitBuilder(); @@ -253,12 +269,14 @@ TEST(boomerang_ultra_circuit_constructor, test_graph_for_sort_constraints) } /** - * @brief this test checks graph description of the circuit with 2 sorted_constraints with edges. - The result is 2 connected components: - a_idx, b_idx, ... , h_idx - a1_idx, b1_idx, ..., h1_idx + * @brief Test graph description for circuit with 2 sorted constraints with edges + * + * @details This test verifies that: + * - The circuit forms 2 connected components + * - First component contains: a_idx through h_idx + * - Second component contains: a1_idx through h1_idx + * - Each sort constraint with edges creates its own connected component */ - TEST(boomerang_ultra_circuit_constructor, test_graph_for_sort_constraints_with_edges) { fr a = fr::one(); @@ -310,10 +328,12 @@ TEST(boomerang_ultra_circuit_constructor, test_graph_for_sort_constraints_with_e } /** - * @brief this test checks graph decription for circuit with gates that were created from plookup accumulators - the result is one connected component + * @brief Test graph description for circuit with gates created from plookup accumulators + * + * @details This test verifies that: + * - The circuit forms one connected component + * - Plookup accumulator gates connect all variables in the circuit */ - TEST(boomerang_ultra_circuit_constructor, test_graph_with_plookup_accumulators) { UltraCircuitBuilder circuit_builder = UltraCircuitBuilder(); @@ -339,10 +359,12 @@ TEST(boomerang_ultra_circuit_constructor, test_graph_with_plookup_accumulators) } /** - * @brief this test checks variable gates counts for variable from arithmetic gates without shifts - in circuit + * @brief Test variable gate counts for variables from arithmetic gates without shifts + * + * @details This test verifies that: + * - Each variable (except index 0) appears in exactly one gate + * - Variables with index 0 appear in no gates */ - TEST(boomerang_ultra_circuit_constructor, test_variables_gates_counts_for_arithmetic_gate) { UltraCircuitBuilder circuit_constructor = UltraCircuitBuilder(); @@ -372,11 +394,13 @@ TEST(boomerang_ultra_circuit_constructor, test_variables_gates_counts_for_arithm } /** - * @brief this test checks variables gates count for variable in circuit with gates with shifts. - * All variables except for zero index, which index == 0 mod 4 and index != 4 have gates count == 2. - * Other variables have gates count = 1. + * @brief Test variable gate counts for variables in circuit with gates with shifts + * + * @details This test verifies that: + * - Variables with index == 0 mod 4 and index != 4 have gate count == 2 + * - All other variables (except index 0) have gate count == 1 + * - Variables with index 0 have gate count == 0 */ - TEST(boomerang_ultra_circuit_constructor, test_variables_gates_counts_for_arithmetic_gate_with_shifts) { UltraCircuitBuilder circuit_constructor = UltraCircuitBuilder(); @@ -410,10 +434,12 @@ TEST(boomerang_ultra_circuit_constructor, test_variables_gates_counts_for_arithm } /** - * @brief this test checks variables gates count for variables in circuit with boolean gates - * all variables except for zero index must have gates count = 1. + * @brief Test variable gate counts for variables in circuit with boolean gates + * + * @details This test verifies that: + * - All variables (except index 0) have gate count == 1 + * - Variables with index 0 have gate count == 0 */ - TEST(boomerang_ultra_circuit_constructor, test_variables_gates_counts_for_boolean_gates) { UltraCircuitBuilder circuit_constructor = UltraCircuitBuilder(); @@ -434,10 +460,12 @@ TEST(boomerang_ultra_circuit_constructor, test_variables_gates_counts_for_boolea } /** - * @brief this test checks variables gate counts in circuit with sorted constraints. - * all variables in 2 connected components must have gates count = 1 + * @brief Test variable gate counts in circuit with sorted constraints + * + * @details This test verifies that: + * - All variables in both connected components have gate count == 1 + * - Each sort constraint creates a separate component with consistent gate counts */ - TEST(boomerang_ultra_circuit_constructor, test_variables_gates_counts_for_sorted_constraints) { UltraCircuitBuilder circuit_constructor = UltraCircuitBuilder(); @@ -478,10 +506,12 @@ TEST(boomerang_ultra_circuit_constructor, test_variables_gates_counts_for_sorted } /** - * @brief this test checks variable gates count for variables in circuit with sorted constraints with edges - * all variables in 2 connected components must have gates count = 1 + * @brief Test variable gate counts for variables in circuit with sorted constraints with edges + * + * @details This test verifies that: + * - All variables in both connected components have gate count == 1 + * - Each sort constraint with edges creates a separate component with consistent gate counts */ - TEST(boomerang_ultra_circuit_constructor, test_variables_gates_counts_for_sorted_constraints_with_edges) { fr a = fr::one(); @@ -541,10 +571,12 @@ TEST(boomerang_ultra_circuit_constructor, test_variables_gates_counts_for_sorted } /** - * @brief this test checks variables gates count for variables in circuit with 1 elliptic addition gates - * all variables in connected components must have gates count = 1 + * @brief Test variable gate counts for variables in circuit with elliptic addition gates + * + * @details This test verifies that: + * - All variables in the connected component have gate count == 1 + * - The component contains the 6 variables representing the coordinates of the points */ - TEST(boomerang_ultra_circuit_constructor, test_variables_gates_counts_for_ecc_add_gates) { typedef grumpkin::g1::affine_element affine_element; @@ -579,8 +611,11 @@ TEST(boomerang_ultra_circuit_constructor, test_variables_gates_counts_for_ecc_ad } /** - * @brief this test checks variables gates count for variables in circuit with 1 elliptic double gates - * all variables in connected components must have gates count = 1. + * @brief Test variable gate counts for variables in circuit with elliptic double gates + * + * @details This test verifies that: + * - All variables in the connected component have gate count == 1 + * - The component contains the 4 variables representing the coordinates of the point */ TEST(boomerang_ultra_circuit_constructor, test_variables_gates_counts_for_ecc_dbl_gate) @@ -622,8 +657,10 @@ std::vector add_variables(UltraCircuitBuilder& circuit_constructor, st } /** - * @brief this test checks graph description of circuit with range constraints. - * all variables must be in one connected component. + * @brief Test graph description of circuit with range constraints + * + * @details This test verifies that: + * - All variables must be in one connected component */ TEST(boomerang_ultra_circuit_constructor, test_graph_for_range_constraints) @@ -640,8 +677,10 @@ TEST(boomerang_ultra_circuit_constructor, test_graph_for_range_constraints) } /** - * @brief this checks graph description of circuit with decompose function. - * all variables must be in one connected component + * @brief Test graph description of circuit with decompose function + * + * @details This test verifies that: + * - All variables must be in one connected component */ TEST(boomerang_ultra_circuit_constructor, composed_range_constraint) diff --git a/barretenberg/cpp/src/barretenberg/boomerang_value_detection/graph_description_aes128.test.cpp b/barretenberg/cpp/src/barretenberg/boomerang_value_detection/graph_description_aes128.test.cpp index 9db54f96429..de25c3fe690 100644 --- a/barretenberg/cpp/src/barretenberg/boomerang_value_detection/graph_description_aes128.test.cpp +++ b/barretenberg/cpp/src/barretenberg/boomerang_value_detection/graph_description_aes128.test.cpp @@ -12,26 +12,35 @@ using namespace bb; using namespace bb::stdlib; +using namespace cdg; using Builder = UltraCircuitBuilder; -typedef stdlib::field_t field_pt; -typedef stdlib::witness_t witness_pt; +using field_pt = stdlib::field_t; +using witness_pt = stdlib::witness_t; -bool check_in_vector(const std::vector& input_vector, const uint32_t& real_var_index) +/** + * @brief Fix witness values in a vector to ensure they appear in multiple gates + * + * Static analyzer typically identifies variables in only one gate. For test input/output variables, + * we can filter them by fixing their witness values, which adds them to a second gate + * and prevents them from being flagged as potentially dangerous. + * + * @param input_vector Vector of field elements to fix + */ +void fix_vector_witness(std::vector& input_vector) { - for (const auto& elem : input_vector) { - if (elem.witness_index == real_var_index) { - return true; - } + for (auto& elem : input_vector) { + elem.fix_witness(); } - return false; } /** - * @brief this test checks graph description of circuit for AES128CBC - * graph must be consist from one connected component + * @brief Test graph description of AES128CBC circuit with 64 bytes of data + * + * @details This test verifies that: + * - The graph consists of one connected component + * - No variables are in only one gate */ - TEST(boomerang_stdlib_aes, test_graph_for_aes_64_bytes) { uint8_t key[16]{ 0x2b, 0x7e, 0x15, 0x16, 0x28, 0xae, 0xd2, 0xa6, 0xab, 0xf7, 0x15, 0x88, 0x09, 0xcf, 0x4f, 0x3c }; @@ -41,7 +50,7 @@ TEST(boomerang_stdlib_aes, test_graph_for_aes_64_bytes) 0x30, 0xc8, 0x1c, 0x46, 0xa3, 0x5c, 0xe4, 0x11, 0xe5, 0xfb, 0xc1, 0x19, 0x1a, 0x0a, 0x52, 0xef, 0xf6, 0x9f, 0x24, 0x45, 0xdf, 0x4f, 0x9b, 0x17, 0xad, 0x2b, 0x41, 0x7b, 0xe6, 0x6c, 0x37, 0x10 }; - const auto convert_bytes = [](uint8_t* data) { + auto convert_bytes = [](uint8_t* data) { uint256_t converted(0); for (uint64_t i = 0; i < 16; ++i) { uint256_t to_add = uint256_t((uint64_t)(data[i])) << uint256_t((15 - i) * 8); @@ -59,28 +68,35 @@ TEST(boomerang_stdlib_aes, test_graph_for_aes_64_bytes) witness_pt(&builder, fr(convert_bytes(in + 48))), }; + fix_vector_witness(in_field); + field_pt key_field(witness_pt(&builder, fr(convert_bytes(key)))); field_pt iv_field(witness_pt(&builder, fr(convert_bytes(iv)))); + key_field.fix_witness(); + iv_field.fix_witness(); - const auto result = stdlib::aes128::encrypt_buffer_cbc(in_field, iv_field, key_field); + auto result = stdlib::aes128::encrypt_buffer_cbc(in_field, iv_field, key_field); + fix_vector_witness(result); Graph graph = Graph(builder); auto connected_components = graph.find_connected_components(); - auto num_connected_components = connected_components.size(); - bool graph_result = num_connected_components == 1; - - EXPECT_EQ(graph_result, true); + EXPECT_EQ(connected_components.size(), 1); + auto variables_in_one_gate = graph.show_variables_in_one_gate(builder); + EXPECT_EQ(variables_in_one_gate.size(), 0); } /** - * @brief this test checks variables gate counts for variables in circuit for AES128CBC - * Some variables can be from input/output vectors, or they are key and iv, and they have variable - * gates count = 1, because it's the circuit for test. So, we can ignore these variables + * @brief Test variable gate counts for AES128CBC circuit + * + * @details This test verifies that: + * - The graph consists of one connected component + * - No variables appear in only one gate + * + * Note: Input/output vectors, key, and IV variables might normally appear in only one gate, + * but we fix their witness values to ensure they appear in multiple gates. */ - TEST(boomerang_stdlib_aes, test_variable_gates_count_for_aes128cbc) { - uint8_t key[16]{ 0x2b, 0x7e, 0x15, 0x16, 0x28, 0xae, 0xd2, 0xa6, 0xab, 0xf7, 0x15, 0x88, 0x09, 0xcf, 0x4f, 0x3c }; uint8_t iv[16]{ 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f }; uint8_t in[64]{ 0x6b, 0xc1, 0xbe, 0xe2, 0x2e, 0x40, 0x9f, 0x96, 0xe9, 0x3d, 0x7e, 0x11, 0x73, 0x93, 0x17, 0x2a, @@ -88,7 +104,7 @@ TEST(boomerang_stdlib_aes, test_variable_gates_count_for_aes128cbc) 0x30, 0xc8, 0x1c, 0x46, 0xa3, 0x5c, 0xe4, 0x11, 0xe5, 0xfb, 0xc1, 0x19, 0x1a, 0x0a, 0x52, 0xef, 0xf6, 0x9f, 0x24, 0x45, 0xdf, 0x4f, 0x9b, 0x17, 0xad, 0x2b, 0x41, 0x7b, 0xe6, 0x6c, 0x37, 0x10 }; - const auto convert_bytes = [](uint8_t* data) { + auto convert_bytes = [](uint8_t* data) { uint256_t converted(0); for (uint64_t i = 0; i < 16; ++i) { uint256_t to_add = uint256_t((uint64_t)(data[i])) << uint256_t((15 - i) * 8); @@ -106,18 +122,19 @@ TEST(boomerang_stdlib_aes, test_variable_gates_count_for_aes128cbc) witness_pt(&builder, fr(convert_bytes(in + 48))), }; + fix_vector_witness(in_field); + field_pt key_field(witness_pt(&builder, fr(convert_bytes(key)))); field_pt iv_field(witness_pt(&builder, fr(convert_bytes(iv)))); + key_field.fix_witness(); + iv_field.fix_witness(); - const auto result = stdlib::aes128::encrypt_buffer_cbc(in_field, iv_field, key_field); + auto result = stdlib::aes128::encrypt_buffer_cbc(in_field, iv_field, key_field); + fix_vector_witness(result); Graph graph = Graph(builder); + auto connected_components = graph.find_connected_components(); + EXPECT_EQ(connected_components.size(), 1); std::unordered_set variables_in_one_gate = graph.show_variables_in_one_gate(builder); - for (const auto& elem : variables_in_one_gate) { - bool result1 = check_in_vector(in_field, elem); - bool result2 = check_in_vector(result, elem); - bool check = - (result1 == 1) || (result2 == 1) || (elem == key_field.witness_index) || (elem == iv_field.witness_index); - EXPECT_EQ(check, true); - } -} \ No newline at end of file + EXPECT_EQ(variables_in_one_gate.size(), 0); +} diff --git a/barretenberg/cpp/src/barretenberg/boomerang_value_detection/graph_description_bigfield.test.cpp b/barretenberg/cpp/src/barretenberg/boomerang_value_detection/graph_description_bigfield.test.cpp new file mode 100644 index 00000000000..2d7b0fca267 --- /dev/null +++ b/barretenberg/cpp/src/barretenberg/boomerang_value_detection/graph_description_bigfield.test.cpp @@ -0,0 +1,423 @@ +#include "barretenberg/boomerang_value_detection/graph.hpp" +#include "barretenberg/numeric/random/engine.hpp" +#include "barretenberg/stdlib/primitives/bigfield/bigfield.hpp" + +#include "barretenberg/ecc/curves/bn254/fq.hpp" +#include "barretenberg/ecc/curves/bn254/fr.hpp" + +#include "barretenberg/circuit_checker/circuit_checker.hpp" +#include "barretenberg/common/test.hpp" +#include "barretenberg/stdlib/primitives/bool/bool.hpp" +#include "barretenberg/stdlib/primitives/byte_array/byte_array.hpp" +#include "barretenberg/stdlib/primitives/circuit_builders/circuit_builders.hpp" +#include "barretenberg/stdlib/primitives/curves/bn254.hpp" +#include "barretenberg/stdlib/primitives/field/field.hpp" +#include "barretenberg/transcript/origin_tag.hpp" +#include +#include + +using namespace bb; +using namespace cdg; + +namespace { +auto& engine = numeric::get_debug_randomness(); +} + +using Builder = UltraCircuitBuilder; +using bn254 = stdlib::bn254; +using fr_ct = bn254::ScalarField; +using fq_ct = bn254::BaseField; +using public_witness_ct = bn254::public_witness_ct; +using witness_ct = bn254::witness_ct; + +/** + * @brief Fix a bigfield element to prevent it from being identified as a variable in one gate. + * + * Static analyzer usually prints input and output variables as variables in one gate. In tests these variables + * are not dangerous and usually we can filter them by fixing the witness which adds a gate. Then these variables will + * be in 2 gates, and static analyzer won't print them. + * + * @param element The bigfield element to fix + */ +void fix_bigfield_element(const fq_ct& element) +{ + for (int i = 0; i < 4; i++) { + element.binary_basis_limbs[i].element.fix_witness(); + } + element.prime_basis_limb.fix_witness(); +} + +/** + * @brief Test graph description for bigfield constructors. + * + * @details Tests construction of: + * - Constant value + * - Witness from u512 + * - Small field witness + * - Mixed construction with lower limb addition + * + * The result is one connected component with one variable in one gate. + */ +TEST(boomerang_bigfield, test_graph_description_bigfield_constructors) +{ + Builder builder; + [[maybe_unused]] fq_ct constant = fq_ct(1); + [[maybe_unused]] fq_ct var = fq_ct::create_from_u512_as_witness(&builder, 1); + [[maybe_unused]] fr_ct small_var = witness_ct(&builder, fr(1)); + [[maybe_unused]] fq_ct mixed = fq_ct(1).add_to_lower_limb(small_var, 1); + [[maybe_unused]] fq_ct r; + + auto graph = Graph(builder); + auto connected_components = graph.find_connected_components(); + EXPECT_EQ(connected_components.size(), 1); + auto variables_in_one_gate = graph.show_variables_in_one_gate(builder); + EXPECT_EQ(variables_in_one_gate.size(), 1); +} + +/** + * @brief Test graph description for bigfield addition operations. + * + * @details Tests various addition combinations with fix_bigfield_element. + * + * The result is one connected component with no variables in one gate. + */ +TEST(boomerang_bigfield, test_graph_description_bigfield_addition) +{ + Builder builder; + [[maybe_unused]] fq_ct var = fq_ct::create_from_u512_as_witness(&builder, 1); + [[maybe_unused]] fr_ct small_var = witness_ct(&builder, fr(1)); + [[maybe_unused]] fq_ct mixed = fq_ct(1).add_to_lower_limb(small_var, 1); + [[maybe_unused]] fq_ct r; + [[maybe_unused]] fq_ct r1; + [[maybe_unused]] fq_ct r2; + + r = mixed + var; + fix_bigfield_element(r); + r1 = r + mixed; + fix_bigfield_element(r1); + r2 = r + var; + fix_bigfield_element(r2); + + auto graph = Graph(builder); + auto connected_components = graph.find_connected_components(); + EXPECT_EQ(connected_components.size(), 1); + auto variables_in_one_gate = graph.show_variables_in_one_gate(builder); + EXPECT_EQ(variables_in_one_gate.size(), 0); +} + +/** + * @brief Test graph description for bigfield subtraction operations. + * + * @details Tests all possible subtraction combinations between mixed, constant, and variable values. + * + * The result is one connected component with no variables in one gate. + */ +TEST(boomerang_bigfield, test_graph_description_bigfield_substraction) +{ + Builder builder; + [[maybe_unused]] fq_ct constant = fq_ct(1); + [[maybe_unused]] fq_ct var = fq_ct::create_from_u512_as_witness(&builder, 1); + [[maybe_unused]] fr_ct small_var = witness_ct(&builder, fr(1)); + [[maybe_unused]] fq_ct mixed = fq_ct(1).add_to_lower_limb(small_var, 1); + [[maybe_unused]] fq_ct r; + + r = mixed - mixed; + fix_bigfield_element(r); + r = mixed - constant; + fix_bigfield_element(r); + r = mixed - var; + fix_bigfield_element(r); + r = var - mixed; + fix_bigfield_element(r); + + auto graph = Graph(builder); + auto connected_components = graph.find_connected_components(); + EXPECT_EQ(connected_components.size(), 1); + auto variables_in_one_gate = graph.show_variables_in_one_gate(builder); + EXPECT_EQ(variables_in_one_gate.size(), 0); + for (const auto& elem : variables_in_one_gate) { + info("elem == ", elem); + } +} + +/** + * @brief Test graph description for bigfield multiplication operations. + * + * @details Tests all possible multiplication combinations. + * + * The result is one connected component with no variables in one gate. + */ +TEST(boomerang_bigfield, test_graph_description_bigfield_multiplication) +{ + Builder builder; + [[maybe_unused]] fq_ct constant = fq_ct(1); + [[maybe_unused]] fq_ct var = fq_ct::create_from_u512_as_witness(&builder, 1); + [[maybe_unused]] fr_ct small_var = witness_ct(&builder, fr(1)); + [[maybe_unused]] fq_ct mixed = fq_ct(1).add_to_lower_limb(small_var, 1); + [[maybe_unused]] fq_ct r; + + r = var * constant; + r = constant * constant; + r = mixed * var; + r = mixed * constant; + r = mixed * mixed; + auto graph = Graph(builder); + auto connected_components = graph.find_connected_components(); + EXPECT_EQ(connected_components.size(), 1); + auto variables_in_one_gate = graph.show_variables_in_one_gate(builder); + EXPECT_EQ(variables_in_one_gate.size(), 0); +} + +/** + * @brief Test graph description for bigfield division operations. + * + * @details Tests division operations with circuit checking. Each division operator creates + * one inverse variable for polynomial gate check (a * a_inv - 1 = 0). + * + * The result is one connected component with three variables in one gate. + */ + +TEST(boomerang_bigfield, test_graph_description_bigfield_division) +{ + Builder builder; + [[maybe_unused]] fq_ct constant = fq_ct(1); + [[maybe_unused]] fq_ct var = fq_ct::create_from_u512_as_witness(&builder, 1); + [[maybe_unused]] fr_ct small_var = witness_ct(&builder, fr(1)); + [[maybe_unused]] fq_ct mixed = fq_ct(1).add_to_lower_limb(small_var, 1); + [[maybe_unused]] fq_ct r; + + r = constant / var; + fix_bigfield_element(r); + r = constant / constant; + r = mixed / mixed; + fix_bigfield_element(r); + r = mixed / var; + fix_bigfield_element(r); + + CircuitChecker::check(builder); + auto graph = Graph(builder); + auto connected_components = graph.find_connected_components(); + EXPECT_EQ(connected_components.size(), 1); + auto variables_in_one_gate = graph.show_variables_in_one_gate(builder); + // every operator / in bigfield creates one inverse variable for poly gate to check a * a_inv - 1 = 0. + // it is the false case, but it will be secure just to check that there are no other variables except for them + // otherwise there is a possibility to remove dangerous variables from other functions. + EXPECT_EQ(variables_in_one_gate.size(), 3); +} + +/** + * @brief Test graph description for mixed bigfield operations. + * + * @details Tests combinations of addition, subtraction, multiplication and division operations. + * + * The result is one connected component with two variables in one gate. + */ +TEST(boomerang_bigfield, test_graph_description_bigfield_mix_operations) +{ + auto builder = Builder(); + fq_ct constant = fq_ct(1); + fq_ct var = fq_ct::create_from_u512_as_witness(&builder, 1); + fr_ct small_var = witness_ct(&builder, fr(1)); + fq_ct mixed = fq_ct(1).add_to_lower_limb(small_var, 1); + fq_ct r; + + r = mixed + mixed; + fix_bigfield_element(r); + r = mixed - mixed; + fix_bigfield_element(r); + r = mixed + var; + fix_bigfield_element(r); + r = mixed + constant; + fix_bigfield_element(r); + r = mixed - var; + fix_bigfield_element(r); + r = mixed - constant; + fix_bigfield_element(r); + r = var - mixed; + fix_bigfield_element(r); + + r = var * constant; + fix_bigfield_element(r); + r = constant / var; + fix_bigfield_element(r); + r = constant * constant; + r = constant / constant; + + r = mixed * var; + fix_bigfield_element(r); + r = mixed / var; + fix_bigfield_element(r); + r = mixed * mixed; + fix_bigfield_element(r); + r = mixed * constant; + fix_bigfield_element(r); + auto graph = Graph(builder); + auto connected_components = graph.find_connected_components(); + EXPECT_EQ(connected_components.size(), 1); + auto variables_in_one_gate = graph.show_variables_in_one_gate(builder); + EXPECT_EQ(variables_in_one_gate.size(), 2); +} + +/** + * @brief Test graph description for high/low bits constructor and operations. + * + * @details Tests bit-sliced construction and repeated additions. + * + * The result is one connected component with no variables in one gate. + */ +TEST(boomerang_bigfield, test_graph_description_constructor_high_low_bits_and_operations) +{ + auto builder = Builder(); + fq inputs[2]{ fq::random_element(), fq::random_element() }; + fq_ct a(witness_ct(&builder, fr(uint256_t(inputs[0]).slice(0, fq_ct::NUM_LIMB_BITS * 2))), + witness_ct(&builder, fr(uint256_t(inputs[0]).slice(fq_ct::NUM_LIMB_BITS * 2, fq_ct::NUM_LIMB_BITS * 4)))); + fq_ct b(witness_ct(&builder, fr(uint256_t(inputs[1]).slice(0, fq_ct::NUM_LIMB_BITS * 2))), + witness_ct(&builder, fr(uint256_t(inputs[1]).slice(fq_ct::NUM_LIMB_BITS * 2, fq_ct::NUM_LIMB_BITS * 4)))); + fq_ct c = a * b; + fq d = fq::random_element(); + fq_ct d1(witness_ct(&builder, fr(uint256_t(d).slice(0, fq_ct::NUM_LIMB_BITS * 2))), + witness_ct(&builder, fr(uint256_t(d).slice(fq_ct::NUM_LIMB_BITS * 2, fq_ct::NUM_LIMB_BITS * 4)))); + c = c + d1; + fix_bigfield_element(c); + auto graph = Graph(builder); + auto connected_components = graph.find_connected_components(); + EXPECT_EQ(connected_components.size(), 1); + auto variables_in_one_gate = graph.show_variables_in_one_gate(builder); + EXPECT_EQ(variables_in_one_gate.size(), 0); +} + +/** + * @brief Test graph description for multiple multiplication operations. + * + * @details Tests independent multiplication operations. + * + * The result is num_repetitions connected components with no variables in one gate. + */ +TEST(boomerang_bigfield, test_graph_description_mul_function) +{ + auto builder = Builder(); + fq inputs[2]{ fq::random_element(), fq::random_element() }; + fq_ct a(witness_ct(&builder, fr(uint256_t(inputs[0]).slice(0, fq_ct::NUM_LIMB_BITS * 2))), + witness_ct(&builder, fr(uint256_t(inputs[0]).slice(fq_ct::NUM_LIMB_BITS * 2, fq_ct::NUM_LIMB_BITS * 4)))); + fq_ct b(witness_ct(&builder, fr(uint256_t(inputs[1]).slice(0, fq_ct::NUM_LIMB_BITS * 2))), + witness_ct(&builder, fr(uint256_t(inputs[1]).slice(fq_ct::NUM_LIMB_BITS * 2, fq_ct::NUM_LIMB_BITS * 4)))); + fq_ct c = a * b; + fix_bigfield_element(c); + auto graph = Graph(builder); + auto connected_components = graph.find_connected_components(); + EXPECT_EQ(connected_components.size(), 1); + auto variables_in_one_gate = graph.show_variables_in_one_gate(builder); + EXPECT_EQ(variables_in_one_gate.size(), 0); +} + +/** + * @brief Test graph description for square operations. + * + * @details Tests repeated squaring operations on random inputs. + * + * The result is num_repetitions connected components with no variables in one gate. + */ +TEST(boomerang_bigfield, test_graph_description_sqr_function) +{ + auto builder = Builder(); + fq input = fq::random_element(); + fq_ct a(witness_ct(&builder, fr(uint256_t(input).slice(0, fq_ct::NUM_LIMB_BITS * 2))), + witness_ct(&builder, fr(uint256_t(input).slice(fq_ct::NUM_LIMB_BITS * 2, fq_ct::NUM_LIMB_BITS * 4)))); + fq_ct c = a.sqr(); + fix_bigfield_element(c); + auto graph = Graph(builder); + auto connected_components = graph.find_connected_components(); + EXPECT_EQ(connected_components.size(), 1); + auto variables_in_one_gate = graph.show_variables_in_one_gate(builder); + EXPECT_EQ(variables_in_one_gate.size(), 0); +} + +/** + * @brief Test graph description for multiply-add operations. + * + * @details Tests multiply-add operations with three inputs. + * + * The result is num_repetitions connected components with no variables in one gate. + */ +TEST(boomerang_bigfield, test_graph_description_madd_function) +{ + auto builder = Builder(); + fq inputs[3]{ fq::random_element(), fq::random_element(), fq::random_element() }; + fq_ct a(witness_ct(&builder, fr(uint256_t(inputs[0]).slice(0, fq_ct::NUM_LIMB_BITS * 2))), + witness_ct(&builder, fr(uint256_t(inputs[0]).slice(fq_ct::NUM_LIMB_BITS * 2, fq_ct::NUM_LIMB_BITS * 4)))); + fq_ct b(witness_ct(&builder, fr(uint256_t(inputs[1]).slice(0, fq_ct::NUM_LIMB_BITS * 2))), + witness_ct(&builder, fr(uint256_t(inputs[1]).slice(fq_ct::NUM_LIMB_BITS * 2, fq_ct::NUM_LIMB_BITS * 4)))); + fq_ct c(witness_ct(&builder, fr(uint256_t(inputs[2]).slice(0, fq_ct::NUM_LIMB_BITS * 2))), + witness_ct(&builder, fr(uint256_t(inputs[2]).slice(fq_ct::NUM_LIMB_BITS * 2, fq_ct::NUM_LIMB_BITS * 4)))); + fq_ct d = a.madd(b, { c }); + fix_bigfield_element(d); + auto graph = Graph(builder); + auto connected_components = graph.find_connected_components(); + EXPECT_EQ(connected_components.size(), 1); + auto variables_in_one_gate = graph.show_variables_in_one_gate(builder); + EXPECT_EQ(variables_in_one_gate.size(), 0); +} + +/** + * @brief Test graph description for multiple multiply-add operations. + * + * @details Tests batch multiply-add operations with multiple inputs. Uses arrays of size + * number_of_madds=16 for left multiply, right multiply and add values. + * + * The result is connected components with no variables in one gate. + */ +TEST(boomerang_bigfield, test_graph_description_mult_madd_function) +{ + auto builder = Builder(); + const size_t number_of_madds = 16; + fq mul_left_values[number_of_madds]; + fq mul_right_values[number_of_madds]; + fq to_add_values[number_of_madds]; + + std::vector mul_left; + std::vector mul_right; + std::vector to_add; + mul_left.reserve(number_of_madds); + mul_right.reserve(number_of_madds); + to_add.reserve(number_of_madds); + for (size_t j = 0; j < number_of_madds; j++) { + mul_left_values[j] = fq::random_element(); + mul_right_values[j] = fq::random_element(); + mul_left.emplace_back(fq_ct::create_from_u512_as_witness(&builder, uint512_t(uint256_t(mul_left_values[j])))); + mul_right.emplace_back(fq_ct::create_from_u512_as_witness(&builder, uint512_t(uint256_t(mul_right_values[j])))); + to_add_values[j] = fq::random_element(); + to_add.emplace_back(fq_ct::create_from_u512_as_witness(&builder, uint512_t(uint256_t(to_add_values[j])))); + } + fq_ct f = fq_ct::mult_madd(mul_left, mul_right, to_add); + fix_bigfield_element(f); + builder.finalize_circuit(false); + auto graph = Graph(builder); + auto variables_in_one_gate = graph.show_variables_in_one_gate(builder); + EXPECT_EQ(variables_in_one_gate.size(), 0); +} + +/** + * @brief Test graph description for high/low bits constructor. + * + * @details Tests basic multiplication with bit-sliced construction. + * + * The result is connected components with no variables in one gate. + */ +TEST(boomerang_bigfield, test_graph_description_constructor_high_low_bits) +{ + auto builder = Builder(); + fq mul_left_value = fq::random_element(); + fq mul_right_value = fq::random_element(); + // fq mul_right_value = fq::random_element(); + [[maybe_unused]] fq_ct mul_left = + fq_ct::create_from_u512_as_witness(&builder, uint512_t(uint256_t(mul_left_value))); + [[maybe_unused]] fq_ct mul_right = + fq_ct::create_from_u512_as_witness(&builder, uint512_t(uint256_t(mul_right_value))); + fq_ct product = mul_left * mul_right; + fix_bigfield_element(product); + builder.finalize_circuit(false); + auto graph = Graph(builder); + auto connected_components = graph.find_connected_components(); + auto variables_in_one_gate = graph.show_variables_in_one_gate(builder); + EXPECT_EQ(variables_in_one_gate.size(), 0); +} diff --git a/barretenberg/cpp/src/barretenberg/boomerang_value_detection/graph_description_blake2s.test.cpp b/barretenberg/cpp/src/barretenberg/boomerang_value_detection/graph_description_blake2s.test.cpp index 934e92b568a..b25381f4ab7 100644 --- a/barretenberg/cpp/src/barretenberg/boomerang_value_detection/graph_description_blake2s.test.cpp +++ b/barretenberg/cpp/src/barretenberg/boomerang_value_detection/graph_description_blake2s.test.cpp @@ -1,51 +1,63 @@ +#include "barretenberg/boomerang_value_detection/graph.hpp" #include "barretenberg/circuit_checker/circuit_checker.hpp" +#include "barretenberg/common/test.hpp" #include "barretenberg/crypto/blake2s/blake2s.hpp" +#include "barretenberg/numeric/random/engine.hpp" #include "barretenberg/stdlib/hash/blake2s/blake2s.hpp" #include "barretenberg/stdlib/hash/blake2s/blake2s_plookup.hpp" #include "barretenberg/stdlib/primitives/byte_array/byte_array.hpp" +#include "barretenberg/stdlib/primitives/circuit_builders/circuit_builders.hpp" #include "barretenberg/stdlib/primitives/packed_byte_array/packed_byte_array.hpp" -#include "barretenberg/stdlib_circuit_builders/ultra_circuit_builder.hpp" -#include "graph.hpp" -#include using namespace bb; -using namespace bb::stdlib; - +using namespace cdg; using Builder = UltraCircuitBuilder; - -using field_ct = field_t; -using witness_ct = witness_t; -using byte_array_ct = byte_array; -using byte_array_plookup = byte_array; -using public_witness_t = public_witness_t; +using field_ct = stdlib::field_t; +using witness_ct = stdlib::witness_t; +using byte_array_ct = stdlib::byte_array; +using public_witness_t = stdlib::public_witness_t; /** - * @brief this tests check graph description of circuit for blake2s for one and two blocks. - * all graphs must have one connected component. + * @brief Test graph description for Blake2s hash with single block input + * + * @details This test verifies that: + * - The graph has one connected component + * - No variables are in one gate + * - The plookup implementation correctly processes a single block input */ - -TEST(boomerang_stdlib_blake2s, test_graph_for_blake2s_single_block_plookup) +TEST(boomerang_stdlib_blake2s, graph_description_single_block_plookup) { Builder builder; std::string input = "abcdefghijklmnopqrstuvwxyz0123456789abcdefghijklmnopqrstuvwxyz01"; std::vector input_v(input.begin(), input.end()); - byte_array_plookup input_arr(&builder, input_v); - byte_array_plookup output = blake2s(input_arr); + byte_array_ct input_arr(&builder, input_v); + byte_array_ct output = stdlib::blake2s(input_arr); Graph graph = Graph(builder); auto connected_components = graph.find_connected_components(); EXPECT_EQ(connected_components.size(), 1); + auto variables_in_one_gate = graph.show_variables_in_one_gate(builder); + EXPECT_EQ(variables_in_one_gate.size(), 0); } -TEST(boomerang_stdlib_blake2s, test_graph_for_blake2s_double_block_plookup) +/** + * @brief Test graph description for Blake2s hash with double block input + * + * @details This test verifies that: + * - The graph has one connected component + * - No variables are in one gate + * - The plookup implementation correctly processes a multi-block input + * - The output matches the expected cryptographic hash + */ +TEST(boomerang_stdlib_blake2s, graph_description_double_block_plookup) { Builder builder; std::string input = "abcdefghijklmnopqrstuvwxyz0123456789abcdefghijklmnopqrstuvwxyz0123456789"; std::vector input_v(input.begin(), input.end()); - byte_array_plookup input_arr(&builder, input_v); - byte_array_plookup output = blake2s(input_arr); + byte_array_ct input_arr(&builder, input_v); + byte_array_ct output = stdlib::blake2s(input_arr); auto expected = crypto::blake2s(input_v); @@ -54,4 +66,6 @@ TEST(boomerang_stdlib_blake2s, test_graph_for_blake2s_double_block_plookup) Graph graph = Graph(builder); auto connected_components = graph.find_connected_components(); EXPECT_EQ(connected_components.size(), 1); + auto variables_in_one_gate = graph.show_variables_in_one_gate(builder); + EXPECT_EQ(variables_in_one_gate.size(), 0); } diff --git a/barretenberg/cpp/src/barretenberg/boomerang_value_detection/graph_description_blake3s.test.cpp b/barretenberg/cpp/src/barretenberg/boomerang_value_detection/graph_description_blake3s.test.cpp index d826f1e080b..940a33951a2 100644 --- a/barretenberg/cpp/src/barretenberg/boomerang_value_detection/graph_description_blake3s.test.cpp +++ b/barretenberg/cpp/src/barretenberg/boomerang_value_detection/graph_description_blake3s.test.cpp @@ -9,6 +9,7 @@ #include using namespace bb; +using namespace cdg; using byte_array = stdlib::byte_array; using public_witness_t = stdlib::public_witness_t; @@ -17,28 +18,39 @@ using public_witness_t_plookup = stdlib::public_witness_t input_v(input.begin(), input.end()); - byte_array_plookup input_arr(&builder, input_v); byte_array_plookup output = stdlib::blake3s(input_arr); - std::vector expected = blake3::blake3s(input_v); - - EXPECT_EQ(output.get_value(), expected); - Graph graph = Graph(builder); auto connected_components = graph.find_connected_components(); EXPECT_EQ(connected_components.size(), 1); + auto variables_in_one_gate = graph.show_variables_in_one_gate(builder); + EXPECT_EQ(variables_in_one_gate.size(), 0); } +/** + * @brief Test graph description for blake3s with a double block input + * + * The result should be one connected component with no variables in one gate, + * verifying that multi-block processing maintains proper connectivity + */ TEST(boomerang_stdlib_blake3s, test_double_block_plookup) { auto builder = UltraBuilder(); @@ -50,9 +62,9 @@ TEST(boomerang_stdlib_blake3s, test_double_block_plookup) std::vector expected = blake3::blake3s(input_v); - EXPECT_EQ(output.get_value(), expected); - Graph graph = Graph(builder); auto connected_components = graph.find_connected_components(); EXPECT_EQ(connected_components.size(), 1); + auto variables_in_one_gate = graph.show_variables_in_one_gate(builder); + EXPECT_EQ(variables_in_one_gate.size(), 0); } \ No newline at end of file diff --git a/barretenberg/cpp/src/barretenberg/boomerang_value_detection/graph_description_dynamic_array.test.cpp b/barretenberg/cpp/src/barretenberg/boomerang_value_detection/graph_description_dynamic_array.test.cpp new file mode 100644 index 00000000000..a8767b90397 --- /dev/null +++ b/barretenberg/cpp/src/barretenberg/boomerang_value_detection/graph_description_dynamic_array.test.cpp @@ -0,0 +1,88 @@ +#include "barretenberg/boomerang_value_detection/graph.hpp" +#include "barretenberg/common/test.hpp" +#include "barretenberg/numeric/random/engine.hpp" +#include "barretenberg/stdlib/primitives/bool/bool.hpp" +#include "barretenberg/stdlib/primitives/circuit_builders/circuit_builders.hpp" +#include "barretenberg/stdlib/primitives/memory/dynamic_array.hpp" + +using namespace bb; +using namespace cdg; +namespace { +auto& engine = bb::numeric::get_debug_randomness(); +} + +// Defining ultra-specific types for local testing. +using Builder = UltraCircuitBuilder; +using bool_ct = stdlib::bool_t; +using field_ct = stdlib::field_t; +using witness_ct = stdlib::witness_t; +using DynamicArray_ct = stdlib::DynamicArray; + +/** + * @brief this test checks graph description for dynamic array resize operation + * The result is one connected component with one variable in one gate, + * testing array initialization, pushing elements, and resizing operations + * @details Test includes: + * - Array initialization with max size + * - Sequential push of witness elements + * - Resize operation with witness size + */ +TEST(boomerang_stdlib_dynamic_array, graph_description_dynamic_array_method_resize_test) +{ + + Builder builder; + const size_t max_size = 10; + + DynamicArray_ct array(&builder, max_size); + + field_ct next_size = field_ct(witness_ct(&builder, (uint256_t)(max_size - 1))); + for (size_t i = 0; i < max_size; ++i) { + array.push(field_ct::from_witness(&builder, i)); + } + + array.resize(next_size, 7); + Graph graph = Graph(builder); + auto connected_components = graph.find_connected_components(); + auto variables_in_one_gate = graph.show_variables_in_one_gate(builder); + EXPECT_EQ(connected_components.size(), 1); + EXPECT_EQ(variables_in_one_gate.size(), max_size); +} + +/** + * @brief this test checks graph description for dynamic array consistency methods + * The result is one connected component with no variables in one gate, + * testing all array manipulation operations + * @details Test includes sequence of operations: + * - Sequential push of witness elements + * - Sequential pop of all elements + * - Array resize + * - Conditional push operations (true and false cases) + * - Conditional pop operations (true and false cases) + */ +TEST(boomerang_stdlib_dynamic_array, graph_description_dynamic_array_consistency_methods) +{ + Builder builder; + const size_t max_size = 10; + + DynamicArray_ct array(&builder, max_size); + + for (size_t i = 0; i < max_size; ++i) { + array.push(field_ct::from_witness(&builder, i)); + } + + for (size_t i = 0; i < max_size; ++i) { + array.pop(); + } + + array.resize(max_size - 1, 7); + + array.conditional_push(false, 100); + array.conditional_push(true, 100); + array.conditional_pop(false); + array.conditional_pop(true); + Graph graph = Graph(builder); + auto connected_components = graph.find_connected_components(); + EXPECT_EQ(connected_components.size(), 1); + auto variables_in_one_gate = graph.show_variables_in_one_gate(builder); + EXPECT_EQ(variables_in_one_gate.size(), max_size); +} \ No newline at end of file diff --git a/barretenberg/cpp/src/barretenberg/boomerang_value_detection/graph_description_poseidon2s_permutation.test.cpp b/barretenberg/cpp/src/barretenberg/boomerang_value_detection/graph_description_poseidon2s_permutation.test.cpp new file mode 100644 index 00000000000..0df624b3fca --- /dev/null +++ b/barretenberg/cpp/src/barretenberg/boomerang_value_detection/graph_description_poseidon2s_permutation.test.cpp @@ -0,0 +1,253 @@ +#include "barretenberg/boomerang_value_detection/graph.hpp" + +#include "barretenberg/crypto/poseidon2/poseidon2.hpp" +#include "barretenberg/crypto/poseidon2/poseidon2_params.hpp" +#include "barretenberg/stdlib/hash/poseidon2/poseidon2.hpp" +#include "barretenberg/stdlib/hash/poseidon2/poseidon2_permutation.hpp" + +#include "barretenberg/stdlib/primitives/circuit_builders/circuit_builders.hpp" +#include "barretenberg/stdlib/primitives/curves/bn254.hpp" + +#include "barretenberg/circuit_checker/circuit_checker.hpp" +#include "barretenberg/common/test.hpp" +#include "barretenberg/numeric/random/engine.hpp" +using namespace bb; +using namespace cdg; + +namespace { +auto& engine = numeric::get_debug_randomness(); +} + +using Params = crypto::Poseidon2Bn254ScalarFieldParams; +using Builder = UltraCircuitBuilder; +using Permutation = stdlib::Poseidon2Permutation; +using field_t = stdlib::field_t; +using witness_t = stdlib::witness_t; +using _curve = stdlib::bn254; +using byte_array_ct = _curve::byte_array_ct; +using fr_ct = typename _curve::ScalarField; +using witness_ct = typename _curve::witness_ct; + +/** + * @brief Check if a variable index is present in the input vector + * + * Static analyzer usually prints input and output variables as variables in one gate. In these tests output variables + * are not dangerous. We can filter them by checking that difference between their witness indexes and witness index + * of result <= 3 + * + * @param input_vector Vector of field elements to check against + * @param real_var_index Variable index to find + * @return true if the variable index is found in the input vector + * @return false if the variable index is not found + */ +bool check_in_input_vector(const std::vector& input_vector, const uint32_t& real_var_index) +{ + for (const auto& elem : input_vector) { + if (elem.witness_index == real_var_index) { + return true; + } + } + return false; +} + +/** + * @brief Test graph description for poseidon2 hash with random inputs + * + * The result should be one connected component, and only output variables must be in one gate + * + * @param num_inputs Number of random inputs to generate + */ +void test_poseidon2s_circuit(size_t num_inputs = 5) +{ + auto builder = Builder(); + std::vector inputs; + + for (size_t i = 0; i < num_inputs; ++i) { + auto element = fr::random_element(&engine); + inputs.emplace_back(field_t(witness_t(&builder, element))); + } + + for (auto& elem : inputs) { + elem.fix_witness(); + } + [[maybe_unused]] auto result = stdlib::poseidon2::hash(builder, inputs); + auto graph = Graph(builder); + auto connected_components = graph.find_connected_components(); + EXPECT_EQ(connected_components.size(), 1); + auto variables_in_one_gate = graph.show_variables_in_one_gate(builder); + std::unordered_set outputs{ + result.witness_index, result.witness_index + 1, result.witness_index + 2, result.witness_index + 3 + }; + for (const auto& elem : variables_in_one_gate) { + EXPECT_EQ(outputs.contains(elem), true); + } +} + +/** + * @brief Test graph description for poseidon2 hash with byte array input + * + * The result should be one connected component, and only output variables must be in one gate + * + * @param num_inputs Number of random bytes to generate + */ +void test_poseidon2s_hash_byte_array(size_t num_inputs = 5) +{ + Builder builder; + + std::vector input; + input.reserve(num_inputs); + for (size_t i = 0; i < num_inputs; ++i) { + input.push_back(engine.get_random_uint8()); + } + + byte_array_ct circuit_input(&builder, input); + auto result = stdlib::poseidon2::hash_buffer(builder, circuit_input); + auto graph = Graph(builder); + auto connected_components = graph.find_connected_components(); + EXPECT_EQ(connected_components.size(), 1); + auto variables_in_one_gate = graph.show_variables_in_one_gate(builder); + std::unordered_set outputs{ + result.witness_index, result.witness_index + 1, result.witness_index + 2, result.witness_index + 3 + }; + for (const auto& elem : variables_in_one_gate) { + EXPECT_EQ(outputs.contains(elem), true); + } +} + +/** + * @brief Test graph description for repeated poseidon2 hash operations + * + * The result should be one connected component with repeated hashing of pairs, + * only output variables from each hash operation must be in one gate + * + * @param num_inputs Number of hash iterations to perform + */ +void test_poseidon2s_hash_repeated_pairs(size_t num_inputs = 5) +{ + Builder builder; + + fr left_in = fr::random_element(); + fr right_in = fr::random_element(); + + fr_ct left = witness_ct(&builder, left_in); + fr_ct right = witness_ct(&builder, right_in); + right.fix_witness(); + std::unordered_set outputs{ left.witness_index }; + // num_inputs - 1 iterations since the first hash hashes two elements + for (size_t i = 0; i < num_inputs - 1; ++i) { + left = stdlib::poseidon2::hash(builder, { left, right }); + outputs.insert(left.witness_index + 1); + outputs.insert(left.witness_index + 2); + outputs.insert(left.witness_index + 3); + } + left.fix_witness(); + + auto graph = Graph(builder); + auto connected_components = graph.find_connected_components(); + EXPECT_EQ(connected_components.size(), 1); + auto variables_in_one_gate = graph.show_variables_in_one_gate(builder); + for (const auto& elem : variables_in_one_gate) { + EXPECT_EQ(outputs.contains(elem), true); + } +} + +/** + * @brief Test graph description for a single poseidon2 permutation + * + * The result should be one connected component with no variables in one gate, + * as permutation connects all variables through its internal structure + */ +TEST(boomerang_poseidon2s, test_graph_for_poseidon2s_one_permutation) +{ + std::array inputs; + auto builder = Builder(); + + for (size_t i = 0; i < Params::t; ++i) { + const auto element = fr::random_element(&engine); + inputs[i] = field_t(witness_t(&builder, element)); + } + + auto poseidon2permutation = Permutation(); + [[maybe_unused]] auto new_state = poseidon2permutation.permutation(&builder, inputs); + for (auto& elem : new_state) { + elem.fix_witness(); + } + + auto graph = Graph(builder); + auto connected_components = graph.find_connected_components(); + EXPECT_EQ(connected_components.size(), 1); + auto variables_in_one_gate = graph.show_variables_in_one_gate(builder); + EXPECT_EQ(variables_in_one_gate.size(), 0); +} + +/** + * @brief Test graph description for two separate poseidon2 permutations + * + * The result should be two connected components (one for each permutation) with no variables in one gate, + * verifying that different input sets create separate components + */ +TEST(boomerang_poseidon2s, test_graph_for_poseidon2s_two_permutations) +{ + // we want to check that 2 permutations for different inputs give different connected components + std::array input1; + std::array input2; + auto builder = Builder(); + + for (size_t i = 0; i < Params::t; ++i) { + const auto el1 = fr::random_element(&engine); + input1[i] = field_t(witness_t(&builder, el1)); + const auto el2 = fr::random_element(&engine); + input2[i] = field_t(witness_t(&builder, el2)); + } + + auto poseidon2permutation = Permutation(); + [[maybe_unused]] auto state1 = poseidon2permutation.permutation(&builder, input1); + [[maybe_unused]] auto state2 = poseidon2permutation.permutation(&builder, input2); + for (auto& elem : state1) { + elem.fix_witness(); + } + for (auto& elem : state2) { + elem.fix_witness(); + } + auto graph = Graph(builder); + auto connected_components = graph.find_connected_components(); + EXPECT_EQ(connected_components.size(), 2); + auto variables_in_one_gate = graph.show_variables_in_one_gate(builder); + EXPECT_EQ(variables_in_one_gate.size(), 0); +} + +/** + * @brief Test graph for poseidon2s with varying input sizes + */ +TEST(boomerang_poseidon2s, test_graph_for_poseidon2s) +{ + for (size_t num_inputs = 6; num_inputs < 100; num_inputs++) { + test_poseidon2s_circuit(num_inputs); + } +} + +/** + * @brief Test graph for poseidon2s with default input size + */ +TEST(boomerang_poseidon2s, test_graph_for_poseidon2s_for_one_input_size) +{ + test_poseidon2s_circuit(); +} + +/** + * @brief Test graph for poseidon2s hash with byte arrays of varying sizes + */ +TEST(boomerang_poseidon2s, test_graph_for_poseidon2s_hash_byte_array) +{ + for (size_t num_inputs = 6; num_inputs < 100; num_inputs++) { + test_poseidon2s_hash_byte_array(num_inputs); + } +} + +/** + * @brief Test graph for poseidon2s with repeated hash operations + */ +TEST(boomerang_poseidon2s, test_graph_for_poseidon2s_hash_repeated_pairs) +{ + test_poseidon2s_hash_repeated_pairs(); +} diff --git a/barretenberg/cpp/src/barretenberg/boomerang_value_detection/graph_description_ram_rom.test.cpp b/barretenberg/cpp/src/barretenberg/boomerang_value_detection/graph_description_ram_rom.test.cpp new file mode 100644 index 00000000000..1abdb9cc74b --- /dev/null +++ b/barretenberg/cpp/src/barretenberg/boomerang_value_detection/graph_description_ram_rom.test.cpp @@ -0,0 +1,174 @@ +#include "barretenberg/boomerang_value_detection/graph.hpp" +#include "barretenberg/common/test.hpp" +#include "barretenberg/stdlib/primitives/circuit_builders/circuit_builders.hpp" +#include "barretenberg/stdlib/primitives/memory/ram_table.hpp" +#include "barretenberg/stdlib/primitives/memory/rom_table.hpp" + +using namespace bb; +using namespace cdg; +namespace { +auto& engine = numeric::get_debug_randomness(); +} + +using Builder = UltraCircuitBuilder; +using field_ct = stdlib::field_t; +using witness_ct = stdlib::witness_t; +using rom_table_ct = stdlib::rom_table; +using ram_table_ct = stdlib::ram_table; + +/** + * @brief Test graph description for ROM table operations + * + * @details This test verifies that: + * - Reading random values at sequential indices creates one connected component + * - No variables are in one gate due to connections through table accesses + */ +TEST(boomerang_rom_ram_table, graph_description_rom_table) +{ + Builder builder; + + std::vector table_values; + const size_t table_size = 10; + for (size_t i = 0; i < table_size; ++i) { + table_values.emplace_back(witness_ct(&builder, bb::fr::random_element())); + } + for (auto& elem : table_values) { + elem.fix_witness(); + } + + rom_table_ct table(table_values); + std::unordered_set safety_variables; + + field_ct result = field_ct(witness_ct(&builder, (uint64_t)0)); + + for (size_t i = 0; i < 10; ++i) { + safety_variables.insert(result.witness_index); + field_ct index(witness_ct(&builder, (uint64_t)i)); + index.fix_witness(); + result += table[index]; + } + + result.fix_witness(); + Graph graph = Graph(builder); + auto connected_components = graph.find_connected_components(); + EXPECT_EQ(connected_components.size(), 1); + auto variables_in_one_gate = graph.show_variables_in_one_gate(builder); + for (const auto& elem : variables_in_one_gate) { + EXPECT_EQ(variables_in_one_gate.contains(elem), true); + } +} + +/** + * @brief Test graph description for RAM table read operations + * + * @details This test verifies that: + * - Reading random values at sequential indices creates one connected component + * - No variables are in one gate due to connections through table reads + */ +TEST(boomerang_rom_ram_table, graph_description_ram_table_read) +{ + Builder builder; + + std::vector table_values; + const size_t table_size = 10; + for (size_t i = 0; i < table_size; ++i) { + table_values.emplace_back(witness_ct(&builder, bb::fr::random_element())); + } + + for (auto& elem : table_values) { + elem.fix_witness(); + } + + ram_table_ct table(table_values); + field_ct result = field_ct(witness_ct(&builder, (uint64_t)0)); + std::unordered_set safety_variables; + + for (size_t i = 0; i < 10; ++i) { + safety_variables.insert(result.witness_index); + field_ct index(witness_ct(&builder, (uint64_t)i)); + index.fix_witness(); + result += table.read(index); + } + + result.fix_witness(); + Graph graph = Graph(builder); + auto connected_components = graph.find_connected_components(); + EXPECT_EQ(connected_components.size(), 1); + auto variables_in_one_gate = graph.show_variables_in_one_gate(builder); + for (const auto& elem : variables_in_one_gate) { + EXPECT_EQ(safety_variables.contains(elem), true); + } +} + +/** + * @brief Test graph description for RAM table write and read operations + * + * @details This test verifies that: + * - Alternating write and read operations create one connected component + * - Non-sequential access patterns work correctly + * - No variables are in one gate + * + * The test includes: + * - Initial zero initialization + * - Multiple update-read cycles + * - Non-sequential read access pattern + */ +TEST(boomerang_rom_ram_table, graph_description_ram_table_write) +{ + Builder builder; + const size_t table_size = 10; + + std::vector table_values(table_size); + ram_table_ct table(&builder, table_size); + + for (size_t i = 0; i < table_size; ++i) { + table.write(i, 0); + } + std::unordered_set safety_variables; + field_ct result(0); + safety_variables.insert(result.witness_index); + + const auto update = [&]() { + for (size_t i = 0; i < table_size / 2; ++i) { + table_values[2 * i] = fr::random_element(); + table_values[2 * i + 1] = fr::random_element(); + + // init with both constant and variable values + field_ct value1(witness_ct(&builder, table_values[2 * i])); + field_ct value2(witness_ct(&builder, table_values[2 * i + 1])); + value1.fix_witness(); + value2.fix_witness(); + table.write(2 * i, value1); + table.write(2 * i + 1, value2); + } + }; + + const auto read = [&]() { + for (size_t i = 0; i < table_size / 2; ++i) { + const size_t index = table_size - 2 - (i * 2); // access in something other than basic incremental order + field_ct index1(witness_ct(&builder, index)); + field_ct index2(witness_ct(&builder, index + 1)); + index1.fix_witness(); + index2.fix_witness(); + result += table.read(index1); + safety_variables.insert(result.witness_index); + result += table.read(index2); + safety_variables.insert(result.witness_index); + } + }; + + update(); + read(); + update(); + read(); + update(); + + result.fix_witness(); + Graph graph = Graph(builder); + auto connected_components = graph.find_connected_components(); + EXPECT_EQ(connected_components.size(), 1); + auto variables_in_one_gate = graph.show_variables_in_one_gate(builder); + for (const auto& elem : variables_in_one_gate) { + EXPECT_EQ(safety_variables.contains(elem), true); + } +} \ No newline at end of file diff --git a/barretenberg/cpp/src/barretenberg/boomerang_value_detection/graph_description_sha256.test.cpp b/barretenberg/cpp/src/barretenberg/boomerang_value_detection/graph_description_sha256.test.cpp index 0370398d81d..2d8a2f264ef 100644 --- a/barretenberg/cpp/src/barretenberg/boomerang_value_detection/graph_description_sha256.test.cpp +++ b/barretenberg/cpp/src/barretenberg/boomerang_value_detection/graph_description_sha256.test.cpp @@ -14,15 +14,40 @@ using namespace bb; using namespace bb::stdlib; +using namespace cdg; using Builder = UltraCircuitBuilder; +using byte_array_pt = byte_array; +using packed_byte_array_pt = packed_byte_array; +using field_pt = field_t; -using byte_array_ct = byte_array; -using packed_byte_array_ct = packed_byte_array; -using field_ct = field_t; +/** + static analyzer usually prints input and output variables as variables in one gate. In tests these variables + are not dangerous and usually we can filter them by adding gate for fixing witness. Then these variables will be + in 2 gates, and static analyzer won't print them. functions fix_vector and fix_byte_array do it + for vector of variables and packed_byte_array respectively +*/ + +void fix_vector(std::vector& vector) +{ + for (auto& elem : vector) { + elem.fix_witness(); + } +} + +void fix_byte_array(packed_byte_array_pt& input) +{ + std::vector limbs = input.get_limbs(); + fix_vector(limbs); +} /** - all these tests check graph description for sha256 circuits. All circuits have to consist from 1 connected component + * @brief Test for SHA256 circuit graph analysis + * + * These tests verify that SHA256 circuits have the expected graph structure: + * - Each circuit should consist of exactly 1 connected component + * - Each variable should appear in multiple gates after witness fixing + * The test mirrors the test in stdlib. */ TEST(boomerang_stdlib_sha256, test_graph_for_sha256_55_bytes) @@ -30,23 +55,37 @@ TEST(boomerang_stdlib_sha256, test_graph_for_sha256_55_bytes) // 55 bytes is the largest number of bytes that can be hashed in a single block, // accounting for the single padding bit, and the 64 size bits required by the SHA-256 standard. auto builder = Builder(); - packed_byte_array_ct input(&builder, "An 8 character password? Snow White and the 7 Dwarves.."); + packed_byte_array_pt input(&builder, "An 8 character password? Snow White and the 7 Dwarves.."); + fix_byte_array(input); - packed_byte_array_ct output_bits = stdlib::sha256(input); + packed_byte_array_pt output_bits = stdlib::sha256(input); - std::vector output = output_bits.to_unverified_byte_slices(4); + std::vector output = output_bits.to_unverified_byte_slices(4); + fix_vector(output); Graph graph = Graph(builder); auto connected_components = graph.find_connected_components(); EXPECT_EQ(connected_components.size(), 1); + auto variables_in_one_gate = graph.show_variables_in_one_gate(builder); + EXPECT_EQ(variables_in_one_gate.size(), 0); } +/** + * @brief Test SHA256 circuit graph analysis with NIST test vector 5 + * + * This test verifies the graph structure of a SHA256 circuit when processing + * a large input of 1000 repeated 'A' characters (NIST test vector 5). + * + * The test checks that: + * - The circuit consists of exactly 1 connected component + * - No variables appear in only one gate after witness fixing + * + * This is marked as a HEAVY_TEST due to the large input size requiring + * significant computation. + */ HEAVY_TEST(boomerang_stdlib_sha256, test_graph_for_sha256_NIST_vector_five) { - typedef stdlib::field_t field_pt; - typedef stdlib::packed_byte_array packed_byte_array_pt; - - auto builder = UltraCircuitBuilder(); + auto builder = Builder(); packed_byte_array_pt input( &builder, @@ -61,11 +100,103 @@ HEAVY_TEST(boomerang_stdlib_sha256, test_graph_for_sha256_NIST_vector_five) "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA" "AAAAAAAAAA"); + fix_byte_array(input); packed_byte_array_pt output_bits = stdlib::sha256(input); std::vector output = output_bits.to_unverified_byte_slices(4); + fix_vector(output); + + Graph graph = Graph(builder); + auto connected_components = graph.find_connected_components(); + auto variables_in_one_gate = graph.show_variables_in_one_gate(builder); + EXPECT_EQ(variables_in_one_gate.size(), 0); + EXPECT_EQ(connected_components.size(), 1); +} +/** + * @brief Test SHA256 circuit graph analysis with NIST test vector 1 + * + * This test verifies the graph structure of a SHA256 circuit when processing + * the input string "abc" (NIST test vector 1). + * + * The test checks that: + * - The circuit consists of exactly 1 connected component + * - No variables appear in only one gate after witness fixing + */ +TEST(boomerang_stdlib_sha256, test_graph_for_sha256_NIST_vector_one) +{ + auto builder = Builder(); + packed_byte_array_pt input(&builder, "abc"); + fix_byte_array(input); + packed_byte_array_pt output_bits = stdlib::sha256(input); + fix_byte_array(output_bits); Graph graph = Graph(builder); auto connected_components = graph.find_connected_components(); EXPECT_EQ(connected_components.size(), 1); -} \ No newline at end of file + std::unordered_set variables_in_one_gate = graph.show_variables_in_one_gate(builder); + EXPECT_EQ(variables_in_one_gate.size(), 0); +} + +/** + * @brief Test SHA256 circuit graph analysis with NIST test vector 2 + * + * This test verifies the graph structure of a SHA256 circuit when processing + * the input string "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq" (NIST test vector 2). + */ +TEST(boomerang_stdlib_sha256, test_graph_for_sha256_NIST_vector_two) +{ + auto builder = Builder(); + packed_byte_array_pt input(&builder, "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq"); + fix_byte_array(input); + packed_byte_array_pt output_bits = stdlib::sha256(input); + fix_byte_array(output_bits); + Graph graph = Graph(builder); + auto connected_components = graph.find_connected_components(); + EXPECT_EQ(connected_components.size(), 1); + std::unordered_set variables_in_one_gate = graph.show_variables_in_one_gate(builder); + EXPECT_EQ(variables_in_one_gate.size(), 0); +} + +/** + * @brief Test SHA256 circuit graph analysis with NIST test vector 3 + * + * This test verifies the graph structure of a SHA256 circuit when processing + * the input byte 0xbd + */ +TEST(boomerang_stdlib_sha256, test_graph_for_sha256_NIST_vector_three) +{ + auto builder = Builder(); + + // one byte, 0xbd + packed_byte_array_pt input(&builder, std::vector{ 0xbd }); + fix_byte_array(input); + packed_byte_array_pt output_bits = stdlib::sha256(input); + fix_byte_array(output_bits); + Graph graph = Graph(builder); + auto connected_components = graph.find_connected_components(); + EXPECT_EQ(connected_components.size(), 1); + std::unordered_set variables_in_one_gate = graph.show_variables_in_one_gate(builder); + EXPECT_EQ(variables_in_one_gate.size(), 0); +} + +/** + * @brief Test SHA256 circuit graph analysis with NIST test vector 4 + * + * This test verifies the graph structure of a SHA256 circuit when processing + * 4 bytes "c98c8e55" (NIST test vector 4). + */ +TEST(boomerang_stdlib_sha256, test_graph_for_sha256_NIST_vector_four) +{ + auto builder = Builder(); + + // 4 bytes, 0xc98c8e55 + packed_byte_array_pt input(&builder, std::vector{ 0xc9, 0x8c, 0x8e, 0x55 }); + fix_byte_array(input); + packed_byte_array_pt output_bits = stdlib::sha256(input); + fix_byte_array(output_bits); + Graph graph = Graph(builder); + auto connected_components = graph.find_connected_components(); + EXPECT_EQ(connected_components.size(), 1); + std::unordered_set variables_in_one_gate = graph.show_variables_in_one_gate(builder); + EXPECT_EQ(variables_in_one_gate.size(), 0); +} diff --git a/barretenberg/cpp/src/barretenberg/boomerang_value_detection/variable_gates_count.test.cpp b/barretenberg/cpp/src/barretenberg/boomerang_value_detection/variable_gates_count.test.cpp index d07ebeeac4f..066b8f03e97 100644 --- a/barretenberg/cpp/src/barretenberg/boomerang_value_detection/variable_gates_count.test.cpp +++ b/barretenberg/cpp/src/barretenberg/boomerang_value_detection/variable_gates_count.test.cpp @@ -10,6 +10,7 @@ #include using namespace bb; +using namespace cdg; TEST(boomerang_ultra_circuit_constructor, test_variable_gates_count_for_decompose) { @@ -24,7 +25,7 @@ TEST(boomerang_ultra_circuit_constructor, test_variable_gates_count_for_decompos Graph graph = Graph(circuit_constructor); std::unordered_set variables_in_on_gate = graph.show_variables_in_one_gate(circuit_constructor); - EXPECT_EQ(variables_in_on_gate.size(), 1); + EXPECT_EQ(variables_in_on_gate.size(), 0); } TEST(boomerang_ultra_circuit_constructor, test_variable_gates_count_for_decompose2) @@ -40,7 +41,7 @@ TEST(boomerang_ultra_circuit_constructor, test_variable_gates_count_for_decompos Graph graph = Graph(circuit_constructor); auto variables_in_on_gate = graph.show_variables_in_one_gate(circuit_constructor); - EXPECT_EQ(variables_in_on_gate.size(), 1); + EXPECT_EQ(variables_in_on_gate.size(), 0); } TEST(boomerang_utils, test_selectors_for_decompose) @@ -85,7 +86,7 @@ TEST(boomerang_ultra_circuit_constructor, test_variable_gates_count_for_two_deco Graph graph = Graph(circuit_constructor); std::unordered_set variables_in_one_gate = graph.show_variables_in_one_gate(circuit_constructor); - EXPECT_EQ(variables_in_one_gate.size(), 2); + EXPECT_EQ(variables_in_one_gate.size(), 0); } TEST(boomerang_ultra_circuit_constructor, test_decompose_with_boolean_gates) @@ -126,5 +127,5 @@ TEST(boomerang_ultra_circuit_constructor, test_decompose_for_6_bit_number) Graph graph = Graph(circuit_constructor); std::unordered_set variables_in_on_gate = graph.show_variables_in_one_gate(circuit_constructor); - EXPECT_EQ(variables_in_on_gate.size(), 1); + EXPECT_EQ(variables_in_on_gate.size(), 0); } diff --git a/barretenberg/cpp/src/barretenberg/boomerang_value_detection/variables_gate_counts.sha256.test.cpp b/barretenberg/cpp/src/barretenberg/boomerang_value_detection/variables_gate_counts.sha256.test.cpp deleted file mode 100644 index c85b25a5217..00000000000 --- a/barretenberg/cpp/src/barretenberg/boomerang_value_detection/variables_gate_counts.sha256.test.cpp +++ /dev/null @@ -1,180 +0,0 @@ -#include "barretenberg/boomerang_value_detection/graph.hpp" -#include "barretenberg/circuit_checker/circuit_checker.hpp" -#include "barretenberg/common/test.hpp" -#include "barretenberg/crypto/sha256/sha256.hpp" -#include "barretenberg/stdlib/hash/sha256/sha256.hpp" -#include "barretenberg/stdlib/primitives/byte_array/byte_array.hpp" -#include "barretenberg/stdlib/primitives/packed_byte_array/packed_byte_array.hpp" -#include "barretenberg/stdlib_circuit_builders/plookup_tables/plookup_tables.hpp" -#include "barretenberg/stdlib_circuit_builders/ultra_circuit_builder.hpp" - -#include "barretenberg/numeric/bitop/rotate.hpp" -#include "barretenberg/numeric/bitop/sparse_form.hpp" -#include "barretenberg/numeric/random/engine.hpp" - -#include - -#include - -using namespace bb; -using namespace bb::stdlib; - -using Builder = UltraCircuitBuilder; - -using byte_array_ct = byte_array; -using packed_byte_array_ct = packed_byte_array; -using witness_ct = stdlib::witness_t; -using field_ct = field_t; - -bool check_in_byte_array(const uint32_t& real_var_index, const packed_byte_array_ct& byte_array) -{ - std::vector> limbs = byte_array.get_limbs(); - for (const auto& elem : limbs) { - if (elem.witness_index == real_var_index) { - return true; - } - } - return false; -} - -bool check_in_range_lists(const uint32_t& real_var_index, const uint64_t& target_range, const Builder& builder) -{ - auto range_lists = builder.range_lists; - auto target_list = range_lists[target_range]; - for (const auto elem : target_list.variable_indices) { - if (elem == real_var_index) { - return true; - } - } - return false; -} - -/** - * @brief all these tests check circuits for sha256 NIST VECTORS to find variables that won't properly constrained, - * i.e. have variable gates count = 1. Some variables can be from input/output vectors or from range_constraints, - * and they are not dangerous. - */ - -TEST(boomerang_stdlib_sha256, test_variables_gate_counts_for_sha256_55_bytes) -{ - // 55 bytes is the largest number of bytes that can be hashed in a single block, - // accounting for the single padding bit, and the 64 size bits required by the SHA-256 standard. - auto builder = Builder(); - packed_byte_array_ct input(&builder, "An 8 character password? Snow White and the 7 Dwarves.."); - - packed_byte_array_ct output_bits = stdlib::sha256(input); - - // std::vector output = output_bits.to_unverified_byte_slices(4); - - Graph graph = Graph(builder); - std::unordered_set variables_in_on_gate = graph.show_variables_in_one_gate(builder); - std::vector vector_variables_in_on_gate(variables_in_on_gate.begin(), variables_in_on_gate.end()); - std::sort(vector_variables_in_on_gate.begin(), vector_variables_in_on_gate.end()); - for (const auto& elem : vector_variables_in_on_gate) { - bool result1 = check_in_byte_array(elem, input); - bool result2 = check_in_byte_array(elem, output_bits); - bool result3 = check_in_range_lists(elem, 3, builder); - bool check = (result1 == 1) || (result2 == 1) || (result3 == 1); - EXPECT_EQ(check, true); - } -} - -TEST(boomerang_stdlib_sha256, test_variable_gates_count_for_sha256_NIST_vector_one) -{ - auto builder = Builder(); - packed_byte_array_ct input(&builder, "abc"); - packed_byte_array_ct output_bits = stdlib::sha256(input); - - Graph graph = Graph(builder); - std::unordered_set variables_in_one_gate = graph.show_variables_in_one_gate(builder); - for (const auto& elem : variables_in_one_gate) { - bool result1 = check_in_byte_array(elem, input); - bool result2 = check_in_byte_array(elem, output_bits); - bool result3 = check_in_range_lists(elem, 3, builder); - bool check = (result1 == 1) || (result2 == 1) || (result3 == 1); - EXPECT_EQ(check, true); - } -} - -TEST(boomerang_stdlib_sha256, test_variable_gates_count_for_sha256_NIST_vector_two) -{ - auto builder = Builder(); - - packed_byte_array_ct input(&builder, "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq"); - - packed_byte_array_ct output_bits = stdlib::sha256(input); - Graph graph = Graph(builder); - std::unordered_set variables_in_one_gate = graph.show_variables_in_one_gate(builder); - for (const auto& elem : variables_in_one_gate) { - bool result1 = check_in_byte_array(elem, input); - bool result2 = check_in_byte_array(elem, output_bits); - bool result3 = check_in_range_lists(elem, 3, builder); - bool check = (result1 == 1) || (result2 == 1) || (result3 == 1); - EXPECT_EQ(check, true); - } -} - -TEST(boomerang_stdlib_sha256, test_variable_gates_count_sha256_NIST_vector_three) -{ - auto builder = Builder(); - - // one byte, 0xbd - packed_byte_array_ct input(&builder, std::vector{ 0xbd }); - packed_byte_array_ct output_bits = stdlib::sha256(input); - Graph graph = Graph(builder); - std::unordered_set variables_in_one_gate = graph.show_variables_in_one_gate(builder); - for (const auto& elem : variables_in_one_gate) { - bool result1 = check_in_byte_array(elem, input); - bool result2 = check_in_byte_array(elem, output_bits); - bool result3 = check_in_range_lists(elem, 3, builder); - bool check = (result1 == 1) || (result2 == 1) || (result3 == 1); - EXPECT_EQ(check, true); - } -} - -TEST(boomerang_stdlib_sha256, test_variable_gates_count_sha256_NIST_vector_four) -{ - auto builder = Builder(); - - // 4 bytes, 0xc98c8e55 - packed_byte_array_ct input(&builder, std::vector{ 0xc9, 0x8c, 0x8e, 0x55 }); - packed_byte_array_ct output_bits = stdlib::sha256(input); - Graph graph = Graph(builder); - std::unordered_set variables_in_one_gate = graph.show_variables_in_one_gate(builder); - for (const auto& elem : variables_in_one_gate) { - bool result1 = check_in_byte_array(elem, input); - bool result2 = check_in_byte_array(elem, output_bits); - bool result3 = check_in_range_lists(elem, 3, builder); - bool check = (result1 == 1) || (result2 == 1) || (result3 == 1); - EXPECT_EQ(check, true); - } -} - -HEAVY_TEST(boomerang_stdlib_sha256, test_variable_gates_count_for_sha256_NIST_vector_five) -{ - auto builder = Builder(); - - packed_byte_array_ct input( - &builder, - "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA" - "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA" - "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA" - "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA" - "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA" - "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA" - "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA" - "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA" - "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA" - "AAAAAAAAAA"); - - packed_byte_array_ct output_bits = stdlib::sha256(input); - Graph graph = Graph(builder); - std::unordered_set variables_in_on_gate = graph.show_variables_in_one_gate(builder); - for (const auto& elem : variables_in_on_gate) { - bool result1 = check_in_byte_array(elem, input); - bool result2 = check_in_byte_array(elem, output_bits); - bool result3 = check_in_range_lists(elem, 3, builder); - bool check = (result1 == 1) || (result2 == 1) || (result3 == 1); - EXPECT_EQ(check, true); - } -} \ No newline at end of file diff --git a/barretenberg/cpp/src/barretenberg/commitment_schemes/shplonk/shplemini.hpp b/barretenberg/cpp/src/barretenberg/commitment_schemes/shplonk/shplemini.hpp index e533f05bd28..22536c1762b 100644 --- a/barretenberg/cpp/src/barretenberg/commitment_schemes/shplonk/shplemini.hpp +++ b/barretenberg/cpp/src/barretenberg/commitment_schemes/shplonk/shplemini.hpp @@ -605,8 +605,8 @@ template class ShpleminiVerifier_ { denominators[2] = denominators[0]; denominators[3] = denominators[0]; - // compute the scalars to be multiplied against the commitments [libra_concatenated], [big_sum], [big_sum], and - // [libra_quotient] + // compute the scalars to be multiplied against the commitments [libra_concatenated], [grand_sum], [grand_sum], + // and [libra_quotient] for (size_t idx = 0; idx < NUM_SMALL_IPA_EVALUATIONS; idx++) { Fr scaling_factor = denominators[idx] * shplonk_challenge_power; batching_scalars[idx] = -scaling_factor; diff --git a/barretenberg/cpp/src/barretenberg/commitment_schemes/small_subgroup_ipa/small_subgroup_ipa.cpp b/barretenberg/cpp/src/barretenberg/commitment_schemes/small_subgroup_ipa/small_subgroup_ipa.cpp index c0c6434faa6..42bd380dc58 100644 --- a/barretenberg/cpp/src/barretenberg/commitment_schemes/small_subgroup_ipa/small_subgroup_ipa.cpp +++ b/barretenberg/cpp/src/barretenberg/commitment_schemes/small_subgroup_ipa/small_subgroup_ipa.cpp @@ -85,7 +85,6 @@ template SmallSubgroupIPAProver::SmallSubgroupIPAProver(TranslationData& translation_data, const FF evaluation_challenge_x, const FF batching_challenge_v, - const FF claimed_inner_product, const std::shared_ptr& transcript, std::shared_ptr& commitment_key) : SmallSubgroupIPAProver(transcript, commitment_key) @@ -93,7 +92,6 @@ SmallSubgroupIPAProver::SmallSubgroupIPAProver(TranslationData) { - this->claimed_inner_product = claimed_inner_product; label_prefix = "Translation:"; interpolation_domain = translation_data.interpolation_domain; concatenated_polynomial = translation_data.masked_concatenated_polynomial; @@ -101,6 +99,12 @@ SmallSubgroupIPAProver::SmallSubgroupIPAProver(TranslationDatasend_to_verifier(label_prefix + "masking_term_eval", claimed_inner_product); } } @@ -396,17 +400,10 @@ typename Flavor::Curve::ScalarField SmallSubgroupIPAProver::compute_clai */ template typename Flavor::Curve::ScalarField SmallSubgroupIPAProver::compute_claimed_translation_inner_product( - TranslationData& translation_data, - const FF& evaluation_challenge_x, - const FF& batching_challenge_v) + TranslationData& translation_data) { FF claimed_inner_product{ 0 }; if constexpr (IsAnyOf) { - const std::vector coeffs_lagrange_basis = - compute_eccvm_challenge_coeffs(evaluation_challenge_x, batching_challenge_v); - - Polynomial challenge_polynomial_lagrange(coeffs_lagrange_basis); - for (size_t idx = 0; idx < SUBGROUP_SIZE; idx++) { claimed_inner_product += translation_data.concatenated_polynomial_lagrange.at(idx) * challenge_polynomial_lagrange.at(idx); diff --git a/barretenberg/cpp/src/barretenberg/commitment_schemes/small_subgroup_ipa/small_subgroup_ipa.hpp b/barretenberg/cpp/src/barretenberg/commitment_schemes/small_subgroup_ipa/small_subgroup_ipa.hpp index 714e484b39c..98a560e0473 100644 --- a/barretenberg/cpp/src/barretenberg/commitment_schemes/small_subgroup_ipa/small_subgroup_ipa.hpp +++ b/barretenberg/cpp/src/barretenberg/commitment_schemes/small_subgroup_ipa/small_subgroup_ipa.hpp @@ -8,6 +8,7 @@ #include "barretenberg/polynomials/univariate.hpp" #include "barretenberg/stdlib/primitives/curves/grumpkin.hpp" #include "barretenberg/sumcheck/zk_sumcheck_data.hpp" +#include "small_subgroup_ipa_utils.hpp" #include #include @@ -87,9 +88,6 @@ template class SmallSubgroupIPAProver { // Fixed generator of H static constexpr FF subgroup_generator = Curve::subgroup_generator; - // The SmallSubgroupIPA claim - FF claimed_inner_product; - // Interpolation domain {1, g, \ldots, g^{SUBGROUP_SIZE - 1}} used by ECCVM std::array interpolation_domain; // We use IFFT over BN254 scalar field @@ -123,6 +121,9 @@ template class SmallSubgroupIPAProver { std::shared_ptr commitment_key; public: + // The SmallSubgroupIPA claim + FF claimed_inner_product{ 0 }; + // Default constructor to initialize all polynomials, transcript, and commitment key. SmallSubgroupIPAProver(const std::shared_ptr& transcript, std::shared_ptr& commitment_key); @@ -138,7 +139,6 @@ template class SmallSubgroupIPAProver { SmallSubgroupIPAProver(TranslationData& translation_data, const FF evaluation_challenge_x, const FF batching_challenge_v, - const FF claimed_inner_product, const std::shared_ptr& transcript, std::shared_ptr& commitment_key); @@ -161,9 +161,7 @@ template class SmallSubgroupIPAProver { const std::vector& multivariate_challenge, const size_t& log_circuit_size); - static FF compute_claimed_translation_inner_product(TranslationData& translation_data, - const FF& evaluation_challenge_x, - const FF& batching_challenge_v); + FF compute_claimed_translation_inner_product(TranslationData& translation_data); Polynomial static compute_monomial_coefficients(std::span lagrange_coeffs, const std::array& interpolation_domain, @@ -178,6 +176,16 @@ template class SmallSubgroupIPAProver { // Getters for test purposes const Polynomial& get_batched_polynomial() const { return grand_sum_identity_polynomial; } const Polynomial& get_challenge_polynomial() const { return challenge_polynomial; } + + std::array evaluation_points(const FF& small_ipa_evaluation_challenge) + { + return compute_evaluation_points(small_ipa_evaluation_challenge, Curve::subgroup_generator); + } + + std::array evaluation_labels() + { + return get_evaluation_labels(label_prefix); + }; }; /*! @@ -390,6 +398,14 @@ template class SmallSubgroupIPAVerifier { result.begin(), result.end(), result.begin(), [&](FF& denominator) { return denominator * numerator; }); return result; } + static std::array evaluation_points(const FF& small_ipa_evaluation_challenge) + { + return compute_evaluation_points(small_ipa_evaluation_challenge, Curve::subgroup_generator); + } + static std::array evaluation_labels(const std::string& label_prefix) + { + return get_evaluation_labels(label_prefix); + }; }; /** diff --git a/barretenberg/cpp/src/barretenberg/commitment_schemes/small_subgroup_ipa/small_subgroup_ipa.test.cpp b/barretenberg/cpp/src/barretenberg/commitment_schemes/small_subgroup_ipa/small_subgroup_ipa.test.cpp index 061d6cb2e0f..be9e821af91 100644 --- a/barretenberg/cpp/src/barretenberg/commitment_schemes/small_subgroup_ipa/small_subgroup_ipa.test.cpp +++ b/barretenberg/cpp/src/barretenberg/commitment_schemes/small_subgroup_ipa/small_subgroup_ipa.test.cpp @@ -34,7 +34,7 @@ template class SmallSubgroupIPATest : public ::testing::Test { // A helper to evaluate the four IPA witness polynomials at x, x*g, x, x std::array evaluate_small_ipa_witnesses( - const std::array, 4>& witness_polynomials) + const std::array, NUM_SMALL_IPA_EVALUATIONS>& witness_polynomials) { // Hard-coded pattern of evaluation: (x, x*g, x, x) return { witness_polynomials[0].evaluate(evaluation_challenge), @@ -289,25 +289,19 @@ TYPED_TEST(SmallSubgroupIPATest, TranslationMaskingTermConsistency) const FF evaluation_challenge_x = FF::random_element(); const FF batching_challenge_v = FF::random_element(); - const FF claimed_inner_product = Prover::compute_claimed_translation_inner_product( - translation_data, evaluation_challenge_x, batching_challenge_v); - - Prover small_subgroup_ipa_prover(translation_data, - evaluation_challenge_x, - batching_challenge_v, - claimed_inner_product, - prover_transcript, - ck); + Prover small_subgroup_ipa_prover( + translation_data, evaluation_challenge_x, batching_challenge_v, prover_transcript, ck); small_subgroup_ipa_prover.prove(); const std::array small_ipa_evaluations = this->evaluate_small_ipa_witnesses(small_subgroup_ipa_prover.get_witness_polynomials()); - bool consistency_checked = Verifier::check_eccvm_evaluations_consistency(small_ipa_evaluations, - this->evaluation_challenge, - evaluation_challenge_x, - batching_challenge_v, - claimed_inner_product); + bool consistency_checked = + Verifier::check_eccvm_evaluations_consistency(small_ipa_evaluations, + this->evaluation_challenge, + evaluation_challenge_x, + batching_challenge_v, + small_subgroup_ipa_prover.claimed_inner_product); EXPECT_TRUE(consistency_checked); } @@ -348,24 +342,19 @@ TYPED_TEST(SmallSubgroupIPATest, TranslationMaskingTermConsistencyFailure) const FF evaluation_challenge_x = FF::random_element(); const FF batching_challenge_v = FF::random_element(); - const FF claimed_inner_product = FF::random_element(); - - Prover small_subgroup_ipa_prover(translation_data, - evaluation_challenge_x, - batching_challenge_v, - claimed_inner_product, - prover_transcript, - ck); + Prover small_subgroup_ipa_prover( + translation_data, evaluation_challenge_x, batching_challenge_v, prover_transcript, ck); small_subgroup_ipa_prover.prove(); const std::array small_ipa_evaluations = this->evaluate_small_ipa_witnesses(small_subgroup_ipa_prover.get_witness_polynomials()); - bool consistency_checked = Verifier::check_eccvm_evaluations_consistency(small_ipa_evaluations, - this->evaluation_challenge, - evaluation_challenge_x, - batching_challenge_v, - claimed_inner_product); + bool consistency_checked = + Verifier::check_eccvm_evaluations_consistency(small_ipa_evaluations, + this->evaluation_challenge, + evaluation_challenge_x, + batching_challenge_v, + /*tampered claimed inner product = */ FF::random_element()); EXPECT_TRUE(!consistency_checked); } diff --git a/barretenberg/cpp/src/barretenberg/commitment_schemes/small_subgroup_ipa/small_subgroup_ipa_utils.hpp b/barretenberg/cpp/src/barretenberg/commitment_schemes/small_subgroup_ipa/small_subgroup_ipa_utils.hpp new file mode 100644 index 00000000000..cebd939d1b4 --- /dev/null +++ b/barretenberg/cpp/src/barretenberg/commitment_schemes/small_subgroup_ipa/small_subgroup_ipa_utils.hpp @@ -0,0 +1,47 @@ +#pragma once + +#include "barretenberg/common/ref_array.hpp" +#include "barretenberg/constants.hpp" + +#include +#include + +namespace bb { + +/** + * @brief Shared by Prover and Verifier. `label_prefix` is either `Libra:` or `Translation:`. + */ +inline std::array get_evaluation_labels(const std::string& label_prefix) +{ + return { label_prefix + "concatenation_eval", + label_prefix + "grand_sum_shift_eval", + label_prefix + "grand_sum_eval", + label_prefix + "quotient_eval" }; +}; + +/** + * @brief The verification of Grand Sum Identity requires the evaluations G(r), A(g * r), A(r), Q(r). Shared by Prover + * and Verifier. + */ +template +inline std::array compute_evaluation_points(const FF& small_ipa_evaluation_challenge, + const FF& subgroup_generator) +{ + return { small_ipa_evaluation_challenge, + small_ipa_evaluation_challenge * subgroup_generator, + small_ipa_evaluation_challenge, + small_ipa_evaluation_challenge }; +} + +/** + * @brief Contains commitments to polynomials [G], [A], and [Q]. See \ref SmallSubgroupIPAProver docs. + */ +template struct SmallSubgroupIPACommitments { + Commitment concatenated, grand_sum, quotient; + // The grand sum commitment is returned twice since we are opening the corresponding polynomial at 2 points. + RefArray get_all() + { + return { concatenated, grand_sum, grand_sum, quotient }; // {[G], [A], [A], [Q]} + }; +}; +} // namespace bb diff --git a/barretenberg/cpp/src/barretenberg/eccvm/eccvm_flavor.hpp b/barretenberg/cpp/src/barretenberg/eccvm/eccvm_flavor.hpp index ba0272d69cc..f5900be3d18 100644 --- a/barretenberg/cpp/src/barretenberg/eccvm/eccvm_flavor.hpp +++ b/barretenberg/cpp/src/barretenberg/eccvm/eccvm_flavor.hpp @@ -99,6 +99,10 @@ class ECCVMFlavor { // define the containers for storing the contributions from each relation in Sumcheck using SumcheckTupleOfTuplesOfUnivariates = decltype(create_sumcheck_tuple_of_tuples_of_univariates()); + // The sub-protocol `compute_translation_opening_claims` outputs an opening claim for the batched univariate + // evaluation of `op`, `Px`, `Py`, `z1`, and `z2`, and an array of opening claims for the evaluations of the + // SmallSubgroupIPA witness polynomials. + static constexpr size_t NUM_TRANSLATION_OPENING_CLAIMS = NUM_SMALL_IPA_EVALUATIONS + 1; using TupleOfArraysOfValues = decltype(create_tuple_of_arrays_of_values()); // TODO(https://github.com/AztecProtocol/barretenberg/issues/989): refine access specifiers in flavors, this is @@ -1011,11 +1015,19 @@ class ECCVMFlavor { std::vector gemini_fold_comms; std::vector gemini_fold_evals; Commitment shplonk_q_comm; + Commitment translation_concatenated_masking_term_commitment; + FF translation_masking_term_eval; FF translation_eval_op; FF translation_eval_px; FF translation_eval_py; FF translation_eval_z1; FF translation_eval_z2; + Commitment translation_grand_sum_commitment; + Commitment translation_quotient_commitment; + FF translation_concatenation_eval; + FF translation_grand_sum_shift_eval; + FF translation_grand_sum_eval; + FF translation_quotient_eval; Commitment shplonk_q2_comm; Transcript() = default; @@ -1240,6 +1252,8 @@ class ECCVMFlavor { libra_quotient_eval = deserialize_from_buffer(proof_data, num_frs_read); shplonk_q_comm = deserialize_from_buffer(proof_data, num_frs_read); + translation_concatenated_masking_term_commitment = + NativeTranscript::template deserialize_from_buffer(proof_data, num_frs_read); translation_eval_op = NativeTranscript::template deserialize_from_buffer(NativeTranscript::proof_data, num_frs_read); translation_eval_px = @@ -1251,6 +1265,20 @@ class ECCVMFlavor { translation_eval_z2 = NativeTranscript::template deserialize_from_buffer(NativeTranscript::proof_data, num_frs_read); + translation_masking_term_eval = + NativeTranscript::template deserialize_from_buffer(proof_data, num_frs_read); + translation_grand_sum_commitment = + NativeTranscript::template deserialize_from_buffer(proof_data, num_frs_read); + translation_quotient_commitment = + NativeTranscript::template deserialize_from_buffer(proof_data, num_frs_read); + translation_concatenation_eval = + NativeTranscript::template deserialize_from_buffer(proof_data, num_frs_read); + translation_grand_sum_shift_eval = + NativeTranscript::template deserialize_from_buffer(proof_data, num_frs_read); + translation_grand_sum_eval = + NativeTranscript::template deserialize_from_buffer(proof_data, num_frs_read); + translation_quotient_eval = + NativeTranscript::template deserialize_from_buffer(proof_data, num_frs_read); shplonk_q2_comm = NativeTranscript::template deserialize_from_buffer(proof_data, num_frs_read); } @@ -1391,12 +1419,21 @@ class ECCVMFlavor { NativeTranscript::template serialize_to_buffer(libra_quotient_eval, proof_data); NativeTranscript::template serialize_to_buffer(shplonk_q_comm, proof_data); + NativeTranscript::template serialize_to_buffer(translation_concatenated_masking_term_commitment, + proof_data); NativeTranscript::template serialize_to_buffer(translation_eval_op, NativeTranscript::proof_data); NativeTranscript::template serialize_to_buffer(translation_eval_px, NativeTranscript::proof_data); NativeTranscript::template serialize_to_buffer(translation_eval_py, NativeTranscript::proof_data); NativeTranscript::template serialize_to_buffer(translation_eval_z1, NativeTranscript::proof_data); NativeTranscript::template serialize_to_buffer(translation_eval_z2, NativeTranscript::proof_data); + NativeTranscript::template serialize_to_buffer(translation_masking_term_eval, proof_data); + NativeTranscript::template serialize_to_buffer(translation_grand_sum_commitment, proof_data); + NativeTranscript::template serialize_to_buffer(translation_quotient_commitment, proof_data); + NativeTranscript::template serialize_to_buffer(translation_concatenation_eval, proof_data); + NativeTranscript::template serialize_to_buffer(translation_grand_sum_shift_eval, proof_data); + NativeTranscript::template serialize_to_buffer(translation_grand_sum_eval, proof_data); + NativeTranscript::template serialize_to_buffer(translation_quotient_eval, proof_data); NativeTranscript::template serialize_to_buffer(shplonk_q2_comm, NativeTranscript::proof_data); ASSERT(NativeTranscript::proof_data.size() == old_proof_length); diff --git a/barretenberg/cpp/src/barretenberg/eccvm/eccvm_prover.cpp b/barretenberg/cpp/src/barretenberg/eccvm/eccvm_prover.cpp index a25631c6a3e..549d8a9370e 100644 --- a/barretenberg/cpp/src/barretenberg/eccvm/eccvm_prover.cpp +++ b/barretenberg/cpp/src/barretenberg/eccvm/eccvm_prover.cpp @@ -149,7 +149,7 @@ void ECCVMProver::execute_pcs_rounds() polynomial_batcher.set_unshifted(key->polynomials.get_unshifted()); polynomial_batcher.set_to_be_shifted_by_one(key->polynomials.get_to_be_shifted()); - const OpeningClaim multivariate_to_univariate_opening_claim = + OpeningClaim multivariate_to_univariate_opening_claim = Shplemini::prove(key->circuit_size, polynomial_batcher, sumcheck_output.challenge, @@ -159,10 +159,9 @@ void ECCVMProver::execute_pcs_rounds() sumcheck_output.round_univariates, sumcheck_output.round_univariate_evaluations); - const OpeningClaim translation_opening_claim = ECCVMProver::compute_translation_opening_claim(); + ECCVMProver::compute_translation_opening_claims(); - const std::array opening_claims = { multivariate_to_univariate_opening_claim, - translation_opening_claim }; + opening_claims.back() = std::move(multivariate_to_univariate_opening_claim); // Reduce the opening claims to a single opening claim via Shplonk const OpeningClaim batch_opening_claim = Shplonk::prove(key->commitment_key, opening_claims, transcript); @@ -196,45 +195,116 @@ ECCVMProof ECCVMProver::construct_proof() } /** - * @brief The evaluations of the wires `op`, `Px`, `Py`, `z_1`, and `z_2` as univariate polynomials have to be proved as - * they are used in the 'TranslatorVerifier::verify_translation' sub-protocol and its recursive counterpart. To increase - * the efficiency, we produce an OpeningClaim that is fed to Shplonk along with the OpeningClaim produced by Shplemini. + * @brief To link the ECCVM Transcript wires `op`, `Px`, `Py`, `z1`, and `z2` to the accumulator computed by the + * translator, we verify their evaluations as univariates. For efficiency reasons, we batch these evaluations. + * + * @details As a sub-protocol of ECCVM, we are batch opening the `op`, `Px`, `Py`, `z1`, and `z2` wires as univariates + * (as opposed to their openings as multilinears performed after Sumcheck). We often refer to these polynomials as + * `translation_polynomials` \f$ T_i \f$ for \f$ i=0, \ldots, 4\f$. + * Below, the `evaluation_challenge_x` is denoted by \f$ x \f$ and `batching_challenge_v` is denoted by \f$v\f$. + * + * The batched translation evaluation + * \f{align}{ + * \sum_{i=0}^4 T_i(x) \cdot v^i + * \f} + * is used by the \ref TranslatorVerifier to bind the ECCOpQueues over BN254 and Grumpkin. Namely, we + * check that the field element \f$ A = \text{accumulated_result} \f$ accumulated from the Ultra ECCOpQueue by + * TranslatorProver satisfies + * \f{align}{ x\cdot A = \sum_{i=0}^4 T_i(x) \cdot v^i, \f} + * where \f$ x \f$ is an artifact of our implementation of shiftable polynomials. + * + * This check gets trickier when the witness wires in ECCVM are masked. Namely, we randomize the last \f$ + * \text{MASKING_OFFSET} \f$ coefficients of \f$ T_i \f$. Let \f$ N = \text{circuit_size} - + * \text{MASKING_OFFSET}\f$. Denote + * \f{align}{ \widetilde{T}_i(X) = T_i(X) + X^N \cdot m_i(X). \f} + * + * Informally speaking, to preserve ZK, the \ref ECCVMVerifier must never obtain the commitments to \f$ T_i \f$ or + * the evaluations \f$ T_i(x) \f$ of the unmasked wires. + * + * With masking, the identity above becomes + * \f{align}{ x\cdot A = \sum_i (\widetilde{T}_i - X^N \cdot m_i(X)) v^i =\sum_i \widetilde{T}_i v^i - X^N \cdot \sum_i + * m_i(X) v^i \f} + * + * The prover could send the evals of \f$ \widetilde{T}_i \f$ without revealing witness information. Moreover, the + * prover could prove the evaluation \f$ x^N \cdot \sum m_i(x) v^i \f$ using SmallSubgroupIPA argument. Namely, before + * obtaining \f$ x \f$ and \f$ v \f$, the prover sends a commitment to the polynomial \f$ \widetilde{M} = M + Z_H \cdot + * R\f$, where the coefficients of \f$ M \f$ are given by the concatenation \f{align}{ M = (m_0||m_1||m_2||m_3||m_4 || + * \vec{0}) \f} in the Lagrange basis over the small multiplicative subgroup \f$ H \f$, where \f$ Z_H \f$ is the + * vanishing polynomial \f$ X^{|H|} -1 \f$ and \f$ R(X) \f$ is a random polynomial of degree \f$ 2 \f$. \ref + * SmallSubgroupIPAProver allows us to prove the inner product of \f$ M \f$ against the `challenge_polynomial` + * \f{align}{ ( 1, x , x^2 , x^3, v , v\cdot x ,\ldots, ... , v^4, v^4 x , v^4 x^2 , v^4 x^3, \vec{0} )\f} + * without revealing any other witness information apart from the claimed inner product. + * + * @return Ppopulate `opening_claims`. * - * @return ProverOpeningClaim */ -ProverOpeningClaim ECCVMProver::compute_translation_opening_claim() +void ECCVMProver::compute_translation_opening_claims() { - // Collect the polynomials and evaluations to be batched + // Used to capture the batched evaluation of unmasked `translation_polynomials` while preserving ZK + using SmallIPA = SmallSubgroupIPAProver; + + // Initialize SmallSubgroupIPA structures + std::array evaluation_labels; + std::array evaluation_points; + + // Collect the polynomials to be batched RefArray translation_polynomials{ key->polynomials.transcript_op, key->polynomials.transcript_Px, key->polynomials.transcript_Py, key->polynomials.transcript_z1, key->polynomials.transcript_z2 }; - // Get the challenge at which we evaluate all transcript polynomials as univariates + // Extract the masking terms of `translation_polynomials`, concatenate them in the Lagrange basis over SmallSubgroup + // H, mask the resulting polynomial, and commit to it + TranslationData translation_data(translation_polynomials, transcript, key->commitment_key); + + // Get a challenge to evaluate the `translation_polynomials` as univariates evaluation_challenge_x = transcript->template get_challenge("Translation:evaluation_challenge_x"); - // Evaluate the transcript polynomials as univariates and add their evaluations at x to the transcript + // Evaluate `translation_polynomial` as univariates and add their evaluations at x to the transcript for (auto [eval, poly, label] : - zip_view(translation_evaluations.get_all(), translation_polynomials, translation_labels)) { + zip_view(translation_evaluations.get_all(), translation_polynomials, translation_evaluations.labels)) { eval = poly.evaluate(evaluation_challenge_x); transcript->template send_to_verifier(label, eval); } // Get another challenge to batch the evaluations of the transcript polynomials - translation_batching_challenge_v = transcript->template get_challenge("Translation:batching_challenge_v"); + batching_challenge_v = transcript->template get_challenge("Translation:batching_challenge_v"); + + SmallIPA translation_masking_term_prover( + translation_data, evaluation_challenge_x, batching_challenge_v, transcript, key->commitment_key); + translation_masking_term_prover.prove(); + + // Get the challenge to check evaluations of the SmallSubgroupIPA witness polynomials + FF small_ipa_evaluation_challenge = + transcript->template get_challenge("Translation:small_ipa_evaluation_challenge"); + + // Populate SmallSubgroupIPA opening claims: + // 1. Get the evaluation points and labels + evaluation_points = translation_masking_term_prover.evaluation_points(small_ipa_evaluation_challenge); + evaluation_labels = translation_masking_term_prover.evaluation_labels(); + // 2. Compute the evaluations of witness polynomials at corresponding points, send them to the verifier, and create + // the opening claims + for (size_t idx = 0; idx < NUM_SMALL_IPA_EVALUATIONS; idx++) { + auto witness_poly = translation_masking_term_prover.get_witness_polynomials()[idx]; + const FF evaluation = witness_poly.evaluate(evaluation_points[idx]); + transcript->send_to_verifier(evaluation_labels[idx], evaluation); + opening_claims[idx] = { .polynomial = witness_poly, .opening_pair = { evaluation_points[idx], evaluation } }; + } - // Construct the batched polynomial and batched evaluation to produce the batched opening claim + // Compute the opening claim for the masked evaluations of `op`, `Px`, `Py`, `z1`, and `z2` at + // `evaluation_challenge_x` batched by the powers of `batching_challenge_v`. Polynomial batched_translation_univariate{ key->circuit_size }; FF batched_translation_evaluation{ 0 }; FF batching_scalar = FF(1); for (auto [polynomial, eval] : zip_view(translation_polynomials, translation_evaluations.get_all())) { batched_translation_univariate.add_scaled(polynomial, batching_scalar); batched_translation_evaluation += eval * batching_scalar; - batching_scalar *= translation_batching_challenge_v; + batching_scalar *= batching_challenge_v; } - return { .polynomial = batched_translation_univariate, - .opening_pair = { evaluation_challenge_x, batched_translation_evaluation } }; + // Add the batched claim to the array of SmallSubgroupIPA opening claims. + opening_claims[NUM_SMALL_IPA_EVALUATIONS] = { batched_translation_univariate, + { evaluation_challenge_x, batched_translation_evaluation } }; } } // namespace bb diff --git a/barretenberg/cpp/src/barretenberg/eccvm/eccvm_prover.hpp b/barretenberg/cpp/src/barretenberg/eccvm/eccvm_prover.hpp index e930f00d9c2..83b68df9dd6 100644 --- a/barretenberg/cpp/src/barretenberg/eccvm/eccvm_prover.hpp +++ b/barretenberg/cpp/src/barretenberg/eccvm/eccvm_prover.hpp @@ -45,18 +45,19 @@ class ECCVMProver { ECCVMProof export_proof(); ECCVMProof construct_proof(); - OpeningClaim compute_translation_opening_claim(); + void compute_translation_opening_claims(); std::shared_ptr transcript; std::shared_ptr ipa_transcript; bool fixed_size; - TranslationEvaluations translation_evaluations; + // Final ShplonkProver consumes an array consisting of Translation Opening Claims and a + // `multivariate_to_univariate_opening_claim` + static constexpr size_t NUM_OPENING_CLAIMS = ECCVMFlavor::NUM_TRANSLATION_OPENING_CLAIMS + 1; + std::array opening_claims; - std::array translation_labels = { - "Translation:op", "Translation:Px", "Translation:Py", "Translation:z1", "Translation:z2" - }; + TranslationEvaluations translation_evaluations; std::vector public_inputs; @@ -68,7 +69,7 @@ class ECCVMProver { ZKData zk_sumcheck_data; FF evaluation_challenge_x; - FF translation_batching_challenge_v; + FF batching_challenge_v; SumcheckOutput sumcheck_output; }; diff --git a/barretenberg/cpp/src/barretenberg/eccvm/eccvm_transcript.test.cpp b/barretenberg/cpp/src/barretenberg/eccvm/eccvm_transcript.test.cpp index 6ae4ae9ba0c..4e961de3dbd 100644 --- a/barretenberg/cpp/src/barretenberg/eccvm/eccvm_transcript.test.cpp +++ b/barretenberg/cpp/src/barretenberg/eccvm/eccvm_transcript.test.cpp @@ -183,6 +183,7 @@ class ECCVMTranscriptTests : public ::testing::Test { manifest_expected.add_challenge(round, "Shplonk:z"); round++; + manifest_expected.add_entry(round, "Translation:concatenated_masking_term_commitment", frs_per_G); manifest_expected.add_challenge(round, "Translation:evaluation_challenge_x"); round++; @@ -194,6 +195,16 @@ class ECCVMTranscriptTests : public ::testing::Test { manifest_expected.add_challenge(round, "Translation:batching_challenge_v"); round++; + manifest_expected.add_entry(round, "Translation:masking_term_eval", frs_per_Fr); + manifest_expected.add_entry(round, "Translation:grand_sum_commitment", frs_per_G); + manifest_expected.add_entry(round, "Translation:quotient_commitment", frs_per_G); + manifest_expected.add_challenge(round, "Translation:small_ipa_evaluation_challenge"); + + round++; + manifest_expected.add_entry(round, "Translation:concatenation_eval", frs_per_Fr); + manifest_expected.add_entry(round, "Translation:grand_sum_shift_eval", frs_per_Fr); + manifest_expected.add_entry(round, "Translation:grand_sum_eval", frs_per_Fr); + manifest_expected.add_entry(round, "Translation:quotient_eval", frs_per_Fr); manifest_expected.add_challenge(round, "Shplonk:nu"); round++; diff --git a/barretenberg/cpp/src/barretenberg/eccvm/eccvm_translation_data.hpp b/barretenberg/cpp/src/barretenberg/eccvm/eccvm_translation_data.hpp index f5e0e577f49..0fc13e574c2 100644 --- a/barretenberg/cpp/src/barretenberg/eccvm/eccvm_translation_data.hpp +++ b/barretenberg/cpp/src/barretenberg/eccvm/eccvm_translation_data.hpp @@ -72,7 +72,7 @@ template class TranslationData { compute_concatenated_polynomials(transcript_polynomials); // Commit to M(X) + Z_H(X)*R(X), where R is a random polynomial of WITNESS_MASKING_TERM_LENGTH. - transcript->template send_to_verifier("Translation:masking_term_commitment", + transcript->template send_to_verifier("Translation:concatenated_masking_term_commitment", commitment_key->commit(masked_concatenated_polynomial)); } /** diff --git a/barretenberg/cpp/src/barretenberg/eccvm/eccvm_verifier.cpp b/barretenberg/cpp/src/barretenberg/eccvm/eccvm_verifier.cpp index 580b1353fe4..4b89bf68ac3 100644 --- a/barretenberg/cpp/src/barretenberg/eccvm/eccvm_verifier.cpp +++ b/barretenberg/cpp/src/barretenberg/eccvm/eccvm_verifier.cpp @@ -1,6 +1,7 @@ #include "./eccvm_verifier.hpp" #include "barretenberg/commitment_schemes/shplonk/shplemini.hpp" #include "barretenberg/commitment_schemes/shplonk/shplonk.hpp" +#include "barretenberg/commitment_schemes/small_subgroup_ipa/small_subgroup_ipa.hpp" #include "barretenberg/sumcheck/sumcheck.hpp" namespace bb { @@ -26,7 +27,7 @@ bool ECCVMVerifier::verify_proof(const ECCVMProof& proof) VerifierCommitments commitments{ key }; CommitmentLabels commitment_labels; - const auto circuit_size = transcript->template receive_from_prover("circuit_size"); + circuit_size = transcript->template receive_from_prover("circuit_size"); ASSERT(circuit_size == key->circuit_size); for (auto [comm, label] : zip_view(commitments.get_wires(), commitment_labels.get_wires())) { @@ -96,20 +97,19 @@ bool ECCVMVerifier::verify_proof(const ECCVMProof& proof) sumcheck_output.round_univariate_evaluations); // Reduce the accumulator to a single opening claim - const OpeningClaim multivariate_to_univariate_opening_claim = + OpeningClaim multivariate_to_univariate_opening_claim = PCS::reduce_batch_opening_claim(sumcheck_batch_opening_claims); - // Produce the opening claim for batch opening of 'op', 'Px', 'Py', 'z1', and 'z2' wires as univariate polynomials - translation_commitments = { commitments.transcript_op, - commitments.transcript_Px, - commitments.transcript_Py, - commitments.transcript_z1, - commitments.transcript_z2 }; + // Produce the opening claim for batch opening of `op`, `Px`, `Py`, `z1`, and `z2` wires as univariate polynomials + std::array translation_commitments = { commitments.transcript_op, + commitments.transcript_Px, + commitments.transcript_Py, + commitments.transcript_z1, + commitments.transcript_z2 }; - const OpeningClaim translation_opening_claim = compute_translation_opening_claim(translation_commitments); + compute_translation_opening_claims(translation_commitments); - const std::array opening_claims = { multivariate_to_univariate_opening_claim, - translation_opening_claim }; + opening_claims.back() = multivariate_to_univariate_opening_claim; // Construct and verify the combined opening claim const OpeningClaim batch_opening_claim = @@ -119,43 +119,97 @@ bool ECCVMVerifier::verify_proof(const ECCVMProof& proof) PCS::reduce_verify(key->pcs_verification_key, batch_opening_claim, ipa_transcript); vinfo("eccvm sumcheck verified?: ", sumcheck_output.verified); vinfo("batch opening verified?: ", batched_opening_verified); - return sumcheck_output.verified && batched_opening_verified && consistency_checked; + return sumcheck_output.verified && batched_opening_verified && consistency_checked && + translation_masking_consistency_checked; } /** - * @brief To link the ECCVM Transcript wires 'op', 'Px', 'Py', 'z1', and 'z2' to the accumulator computed by the + * @brief To link the ECCVM Transcript wires `op`, `Px`, `Py`, `z1`, and `z2` to the accumulator computed by the * translator, we verify their evaluations as univariates. For efficiency reasons, we batch these evaluations. * - * @param translation_commitments Commitments to 'op', 'Px', 'Py', 'z1', and 'z2' - * @return OpeningClaim + * @details For details, see the docs of \ref ECCVMProver::compute_translation_opening_claims() method. + * + * @param translation_commitments Commitments to `op`, `Px`, `Py`, `z1`, and `z2` + * @return Populate `opening_claims`. */ -OpeningClaim ECCVMVerifier::compute_translation_opening_claim( +void ECCVMVerifier::compute_translation_opening_claims( const std::array& translation_commitments) { + TranslationEvaluations_ translation_evaluations; + + // Used to capture the batched evaluation of unmasked `translation_polynomials` while preserving ZK + using SmallIPA = SmallSubgroupIPAVerifier; + + // Initialize SmallSubgroupIPA structures + SmallSubgroupIPACommitments small_ipa_commitments; + std::array small_ipa_evaluations; + const std::array labels = SmallIPA::evaluation_labels("Translation:"); + + // Get a commitment to M + Z_H * R, where M is a concatenation of the masking terms of `translation_polynomials`, + // Z_H = X^{|H|} - 1, and R is a random degree 2 polynomial + small_ipa_commitments.concatenated = + transcript->template receive_from_prover("Translation:concatenated_masking_term_commitment"); + + // Get a challenge to evaluate `translation_polynomials` as univariates evaluation_challenge_x = transcript->template get_challenge("Translation:evaluation_challenge_x"); - // Construct arrays of commitments and evaluations to be batched, the evaluations being received from the prover - std::array translation_evaluations = { - transcript->template receive_from_prover("Translation:op"), - transcript->template receive_from_prover("Translation:Px"), - transcript->template receive_from_prover("Translation:Py"), - transcript->template receive_from_prover("Translation:z1"), - transcript->template receive_from_prover("Translation:z2") - }; + // Populate the translation evaluations {`op(x)`, `Px(x)`, `Py(x)`, `z1(x)`, `z2(x)`} to be batched + for (auto [eval, label] : zip_view(translation_evaluations.get_all(), translation_evaluations.labels)) { + eval = transcript->template receive_from_prover(label); + } // Get the batching challenge for commitments and evaluations batching_challenge_v = transcript->template get_challenge("Translation:batching_challenge_v"); + // Get the value ∑ mᵢ(x) ⋅ vⁱ + translation_masking_term_eval = transcript->template receive_from_prover("Translation:masking_term_eval"); + + // Receive commitments to the SmallSubgroupIPA witnesses that are computed once x and v are available + small_ipa_commitments.grand_sum = + transcript->template receive_from_prover("Translation:grand_sum_commitment"); + small_ipa_commitments.quotient = + transcript->template receive_from_prover("Translation:quotient_commitment"); + + // Get a challenge for the evaluations of the concatenated masking term G, grand sum A, its shift, and grand sum + // identity quotient Q + const FF small_ipa_evaluation_challenge = + transcript->template get_challenge("Translation:small_ipa_evaluation_challenge"); + + // Compute {r, r * g, r, r}, where r = `small_ipa_evaluation_challenge` + std::array evaluation_points = + SmallIPA::evaluation_points(small_ipa_evaluation_challenge); + + // Get the evaluations G(r), A(g * r), A(r), Q(r) + for (size_t idx = 0; idx < NUM_SMALL_IPA_EVALUATIONS; idx++) { + small_ipa_evaluations[idx] = transcript->template receive_from_prover(labels[idx]); + opening_claims[idx] = { { evaluation_points[idx], small_ipa_evaluations[idx] }, + small_ipa_commitments.get_all()[idx] }; + } + + // Check Grand Sum Identity at r + translation_masking_consistency_checked = + SmallIPA::check_eccvm_evaluations_consistency(small_ipa_evaluations, + small_ipa_evaluation_challenge, + evaluation_challenge_x, + batching_challenge_v, + translation_masking_term_eval); + // Compute the batched commitment and batched evaluation for the univariate opening claim Commitment batched_commitment = translation_commitments[0]; - FF batched_translation_evaluation = translation_evaluations[0]; + FF batched_translation_evaluation = translation_evaluations.get_all()[0]; FF batching_scalar = batching_challenge_v; for (size_t idx = 1; idx < NUM_TRANSLATION_EVALUATIONS; ++idx) { batched_commitment = batched_commitment + translation_commitments[idx] * batching_scalar; - batched_translation_evaluation += batching_scalar * translation_evaluations[idx]; + batched_translation_evaluation += batching_scalar * translation_evaluations.get_all()[idx]; batching_scalar *= batching_challenge_v; } - return { { evaluation_challenge_x, batched_translation_evaluation }, batched_commitment }; + // Place the claim to the array containing the SmallSubgroupIPA opening claims + opening_claims[NUM_SMALL_IPA_EVALUATIONS] = { { evaluation_challenge_x, batched_translation_evaluation }, + batched_commitment }; + + // Compute `translation_masking_term_eval` * `evaluation_challenge_x`^{circuit_size - MASKING_OFFSET} + shift_translation_masking_term_eval(evaluation_challenge_x, translation_masking_term_eval); }; + } // namespace bb diff --git a/barretenberg/cpp/src/barretenberg/eccvm/eccvm_verifier.hpp b/barretenberg/cpp/src/barretenberg/eccvm/eccvm_verifier.hpp index be7c71785f8..857e4d56360 100644 --- a/barretenberg/cpp/src/barretenberg/eccvm/eccvm_verifier.hpp +++ b/barretenberg/cpp/src/barretenberg/eccvm/eccvm_verifier.hpp @@ -23,10 +23,15 @@ class ECCVMVerifier { : ECCVMVerifier(std::make_shared(proving_key)){}; bool verify_proof(const ECCVMProof& proof); - OpeningClaim compute_translation_opening_claim( + void compute_translation_opening_claims( const std::array& translation_commitments); - std::array translation_commitments; + uint32_t circuit_size; + // Final ShplonkVerifier consumes an array consisting of Translation Opening Claims and a + // `multivariate_to_univariate_opening_claim` + static constexpr size_t NUM_OPENING_CLAIMS = ECCVMFlavor::NUM_TRANSLATION_OPENING_CLAIMS + 1; + std::array, NUM_OPENING_CLAIMS> opening_claims; + std::shared_ptr key; std::map commitments; std::shared_ptr transcript; @@ -35,5 +40,9 @@ class ECCVMVerifier { // Translation evaluation and batching challenges. They are propagated to the TranslatorVerifier FF evaluation_challenge_x; FF batching_challenge_v; + // The value ∑ mᵢ(x) ⋅ vⁱ which needs to be propagated to TranslatorVerifier + FF translation_masking_term_eval; + + bool translation_masking_consistency_checked = false; }; } // namespace bb diff --git a/barretenberg/cpp/src/barretenberg/goblin/goblin.hpp b/barretenberg/cpp/src/barretenberg/goblin/goblin.hpp index 1c0205e2c96..7fa5281e65b 100644 --- a/barretenberg/cpp/src/barretenberg/goblin/goblin.hpp +++ b/barretenberg/cpp/src/barretenberg/goblin/goblin.hpp @@ -200,7 +200,7 @@ class GoblinProver { */ void prove_translator() { - fq translation_batching_challenge_v = eccvm_prover->translation_batching_challenge_v; + fq translation_batching_challenge_v = eccvm_prover->batching_challenge_v; fq evaluation_challenge_x = eccvm_prover->evaluation_challenge_x; std::shared_ptr transcript = eccvm_prover->transcript; eccvm_key = eccvm_prover->key; @@ -300,7 +300,8 @@ class GoblinVerifier { proof.translator_proof, eccvm_verifier.evaluation_challenge_x, eccvm_verifier.batching_challenge_v); // TODO(https://github.com/AztecProtocol/barretenberg/issues/799): Ensure translation_evaluations are passed // correctly - bool translation_verified = translator_verifier.verify_translation(proof.translation_evaluations); + bool translation_verified = translator_verifier.verify_translation( + proof.translation_evaluations, eccvm_verifier.translation_masking_term_eval); vinfo("merge verified?: ", merge_verified); vinfo("eccvm verified?: ", eccvm_verified); diff --git a/barretenberg/cpp/src/barretenberg/goblin/translation_evaluations.hpp b/barretenberg/cpp/src/barretenberg/goblin/translation_evaluations.hpp index 619314c59d8..1b51732c915 100644 --- a/barretenberg/cpp/src/barretenberg/goblin/translation_evaluations.hpp +++ b/barretenberg/cpp/src/barretenberg/goblin/translation_evaluations.hpp @@ -8,15 +8,46 @@ namespace bb { /** * @brief Stores the evaluations from ECCVM, checked against the translator evaluations as a final step of translator. * - * @tparam BF The base field of the curve, translation evaluations are represented in the base field. - * @tparam FF The scalar field of the curve, used in Goblin to help convert the proof into a buffer for ACIR. + * @tparam BF The base field of BN254, translation evaluations are represented in the base field. + * @tparam FF The scalar field of BN254, used in Goblin to help convert the proof into a buffer for ACIR. Note that this + * struct is also used by ECCVMVerifiers, where the second template parameter is not required, hence we set it to `void` + * by default. */ -template struct TranslationEvaluations_ { +template struct TranslationEvaluations_ { BF op, Px, Py, z1, z2; static size_t size() { return field_conversion::calc_num_bn254_frs() * NUM_TRANSLATION_EVALUATIONS; } RefArray get_all() { return { op, Px, Py, z1, z2 }; } + std::array labels = { + "Translation:op", "Translation:Px", "Translation:Py", "Translation:z1", "Translation:z2" + }; + ; + MSGPACK_FIELDS(op, Px, Py, z1, z2); }; + +/** + * @brief Efficiently compute \f$ \text{translation_masking_term_eval} \cdot x^{N}\f$, where \f$ N = + * 2^{\text{CONST_ECCVM_LOG_N}} - \text{MASKING_OFFSET} \f$. + * @details As described in \ref ECCVMProver::compute_translation_opening_claims(), Translator's + * `accumulated_result` \f$ A \f$ satisfies \f{align}{ x\cdot A = \sum_i \widetilde{T}_i v^i - X^N \cdot + * \text{translation_masking_term_eval}. \f} Therefore, before propagating the `translation_masking_term_eval`, + * ECCVMVerifier needs to multiply it by \f$ x^N \f$. + */ +template +static void shift_translation_masking_term_eval(const FF& evaluation_challenge_x, FF& translation_masking_term_eval) +{ + // This method is only invoked within Goblin, which runs ECCVM with a fixed size. + static constexpr size_t ECCVM_FIXED_SIZE = 1UL << CONST_ECCVM_LOG_N; + + FF x_to_circuit_size = evaluation_challenge_x.pow(ECCVM_FIXED_SIZE); + + // Compute X^{MASKING_OFFSET} + const FF x_to_masking_offset = evaluation_challenge_x.pow(MASKING_OFFSET); + + // Update `translation_masking_term_eval` + translation_masking_term_eval *= x_to_circuit_size; + translation_masking_term_eval *= x_to_masking_offset.invert(); +}; } // namespace bb diff --git a/barretenberg/cpp/src/barretenberg/polynomials/polynomial.hpp b/barretenberg/cpp/src/barretenberg/polynomials/polynomial.hpp index d5888aa1710..eb507b55795 100644 --- a/barretenberg/cpp/src/barretenberg/polynomials/polynomial.hpp +++ b/barretenberg/cpp/src/barretenberg/polynomials/polynomial.hpp @@ -2,6 +2,7 @@ #include "barretenberg/common/mem.hpp" #include "barretenberg/common/op_count.hpp" #include "barretenberg/common/zip_view.hpp" +#include "barretenberg/constants.hpp" #include "barretenberg/crypto/sha256/sha256.hpp" #include "barretenberg/ecc/curves/grumpkin/grumpkin.hpp" #include "barretenberg/plonk_honk_shared/types/circuit_type.hpp" @@ -255,6 +256,21 @@ template class Polynomial { */ Polynomial& operator*=(Fr scaling_factor); + /** + * @brief Add random values to the coefficients of a polynomial. In practice, this is used for ensuring the + * commitment and evaluation of a polynomial don't leak information about the coefficients in the context of zero + * knowledge. + */ + void mask() + { + // Ensure there is sufficient space to add masking and also that we have memory allocated up to the virtual_size + ASSERT(virtual_size() >= MASKING_OFFSET); + ASSERT(virtual_size() == end_index()); + for (size_t i = virtual_size() - 1; i <= virtual_size() - MASKING_OFFSET; i--) { + at(i) = FF::random_element(); + } + } + std::size_t size() const { return coefficients_.size(); } std::size_t virtual_size() const { return coefficients_.virtual_size(); } void increase_virtual_size(const size_t size_in) { coefficients_.increase_virtual_size(size_in); }; @@ -400,7 +416,6 @@ template class Polynomial { // Namely, it supports polynomial shifts and 'virtual' zeroes past a size up until a 'virtual' size. SharedShiftedVirtualZeroesArray coefficients_; }; - // NOLINTNEXTLINE(cppcoreguidelines-avoid-c-arrays) template std::shared_ptr _allocate_aligned_memory(size_t n_elements) { @@ -514,4 +529,4 @@ template auto zip_polys(Poly&& poly, Polys&&. ASSERT((poly.start_index() == polys.start_index() && poly.end_index() == polys.end_index()) && ...); return zip_view(poly.indices(), poly.coeffs(), polys.coeffs()...); } -} // namespace bb \ No newline at end of file +} // namespace bb diff --git a/barretenberg/cpp/src/barretenberg/stdlib/eccvm_verifier/eccvm_recursive_verifier.cpp b/barretenberg/cpp/src/barretenberg/stdlib/eccvm_verifier/eccvm_recursive_verifier.cpp index c4f7659823e..deed4b9a67a 100644 --- a/barretenberg/cpp/src/barretenberg/stdlib/eccvm_verifier/eccvm_recursive_verifier.cpp +++ b/barretenberg/cpp/src/barretenberg/stdlib/eccvm_verifier/eccvm_recursive_verifier.cpp @@ -1,6 +1,7 @@ #include "./eccvm_recursive_verifier.hpp" #include "barretenberg/commitment_schemes/shplonk/shplemini.hpp" #include "barretenberg/commitment_schemes/shplonk/shplonk.hpp" +#include "barretenberg/goblin/translation_evaluations.hpp" #include "barretenberg/sumcheck/sumcheck.hpp" #include "barretenberg/transcript/transcript.hpp" @@ -118,10 +119,9 @@ ECCVMRecursiveVerifier_::verify_proof(const ECCVMProof& proof) commitments.transcript_z1, commitments.transcript_z2 }; // Reduce the univariate evaluations claims to a single claim to be batched by Shplonk - const OpeningClaim translation_opening_claim = compute_translation_opening_claim(translation_commitments); - // Construct and verify the combined opening claim - const std::array opening_claims = { multivariate_to_univariate_opening_claim, - translation_opening_claim }; + compute_translation_opening_claims(translation_commitments); + + opening_claims.back() = std::move(multivariate_to_univariate_opening_claim); const OpeningClaim batch_opening_claim = Shplonk::reduce_verification(key->pcs_verification_key->get_g1_identity(), opening_claims, transcript); @@ -130,44 +130,94 @@ ECCVMRecursiveVerifier_::verify_proof(const ECCVMProof& proof) } /** - * @brief To link the ECCVM Transcript wires 'op', 'Px', 'Py', 'z1', and 'z2' to the accumulator computed by the + * @brief To link the ECCVM Transcript wires `op`, `Px`, `Py`, `z1`, and `z2` to the accumulator computed by the * translator, we verify their evaluations as univariates. For efficiency reasons, we batch these evaluations. * - * @tparam Flavor ECCVMRecursiveFlavor_ - * @param translation_commitments Commitments to 'op', 'Px', 'Py', 'z1', and 'z2' - * @return OpeningClaim + * @details For details, see the docs of \ref ECCVMProver::compute_translation_opening_claims() method. + * + * @param translation_commitments Commitments to `op`, `Px`, `Py`, `z1`, and `z2` + * @return Populate `opening_claims`. */ template -OpeningClaim ECCVMRecursiveVerifier_::compute_translation_opening_claim( +void ECCVMRecursiveVerifier_::compute_translation_opening_claims( const std::vector& translation_commitments) { + TranslationEvaluations_ translation_evaluations; + + // Used to capture the batched evaluation of unmasked `translation_polynomials` while preserving ZK + using SmallIPA = SmallSubgroupIPAVerifier; + + // Initialize SmallSubgroupIPA structures + SmallSubgroupIPACommitments small_ipa_commitments; + std::array small_ipa_evaluations; + std::array labels = SmallIPA::evaluation_labels("Translation:"); + + // Get a commitment to M + Z_H * R, where M is a concatenation of the masking terms of `translation_polynomials`, + // Z_H = X^{|H|} - 1, and R is a random degree 2 polynomial + small_ipa_commitments.concatenated = + transcript->template receive_from_prover("Translation:concatenated_masking_term_commitment"); + + // Get a challenge to evaluate `translation_polynomials` as univariates evaluation_challenge_x = transcript->template get_challenge("Translation:evaluation_challenge_x"); - // Construct the array of evaluations to be batched, the evaluations being received from the prover - std::array translation_evaluations = { - transcript->template receive_from_prover("Translation:op"), - transcript->template receive_from_prover("Translation:Px"), - transcript->template receive_from_prover("Translation:Py"), - transcript->template receive_from_prover("Translation:z1"), - transcript->template receive_from_prover("Translation:z2") - }; + // Populate the translation evaluations {`op(x)`, `Px(x)`, `Py(x)`, `z1(x)`, `z2(x)`} to be batched + for (auto [eval, label] : zip_view(translation_evaluations.get_all(), translation_evaluations.labels)) { + eval = transcript->template receive_from_prover(label); + } // Get the batching challenge for commitments and evaluations batching_challenge_v = transcript->template get_challenge("Translation:batching_challenge_v"); + // Get the value ∑ mᵢ(x) ⋅ vⁱ + translation_masking_term_eval = transcript->template receive_from_prover("Translation:masking_term_eval"); + + // Receive commitments to the SmallSubgroupIPA witnesses that are computed once x and v are available + small_ipa_commitments.grand_sum = + transcript->template receive_from_prover("Translation:grand_sum_commitment"); + small_ipa_commitments.quotient = + transcript->template receive_from_prover("Translation:quotient_commitment"); + + // Get a challenge for the evaluations of the concatenated masking term G, grand sum A, its shift, and grand sum + // idenity qutient Q + const FF small_ipa_evaluation_challenge = + transcript->template get_challenge("Translation:small_ipa_evaluation_challenge"); + + // Compute {r, r * g, r , r}, where r = `small_ipa_evaluation_challenge` + std::array evaluation_points = + SmallIPA::evaluation_points(small_ipa_evaluation_challenge); + + // Get the evaluations G(r), A(r), A(g*r), Q(r) + for (size_t idx = 0; idx < NUM_SMALL_IPA_EVALUATIONS; idx++) { + small_ipa_evaluations[idx] = transcript->template receive_from_prover(labels[idx]); + opening_claims[idx] = { { evaluation_points[idx], small_ipa_evaluations[idx] }, + small_ipa_commitments.get_all()[idx] }; + } + + // Check Grand Sum Identity at r + SmallIPA::check_eccvm_evaluations_consistency(small_ipa_evaluations, + small_ipa_evaluation_challenge, + evaluation_challenge_x, + batching_challenge_v, + translation_masking_term_eval); + // Compute the batched commitment and batched evaluation for the univariate opening claim - auto batched_translation_evaluation = translation_evaluations[0]; + auto batched_translation_evaluation = translation_evaluations.get_all()[0]; auto batching_scalar = batching_challenge_v; std::vector batching_challenges = { FF::one() }; for (size_t idx = 1; idx < NUM_TRANSLATION_EVALUATIONS; ++idx) { - batched_translation_evaluation += batching_scalar * translation_evaluations[idx]; + batched_translation_evaluation += batching_scalar * translation_evaluations.get_all()[idx]; batching_challenges.emplace_back(batching_scalar); batching_scalar *= batching_challenge_v; } const Commitment batched_commitment = Commitment::batch_mul(translation_commitments, batching_challenges); - return { { evaluation_challenge_x, batched_translation_evaluation }, batched_commitment }; + // Place the claim to the array containing the SmallSubgroupIPA opening claims + opening_claims[NUM_SMALL_IPA_EVALUATIONS] = { { evaluation_challenge_x, batched_translation_evaluation }, + batched_commitment }; + + // Compute `translation_masking_term_eval` * `evaluation_challenge_x`^{circuit_size - MASKING_OFFSET} + shift_translation_masking_term_eval(evaluation_challenge_x, translation_masking_term_eval); }; template class ECCVMRecursiveVerifier_>; diff --git a/barretenberg/cpp/src/barretenberg/stdlib/eccvm_verifier/eccvm_recursive_verifier.hpp b/barretenberg/cpp/src/barretenberg/stdlib/eccvm_verifier/eccvm_recursive_verifier.hpp index 046a3bf6220..6864dc3e689 100644 --- a/barretenberg/cpp/src/barretenberg/stdlib/eccvm_verifier/eccvm_recursive_verifier.hpp +++ b/barretenberg/cpp/src/barretenberg/stdlib/eccvm_verifier/eccvm_recursive_verifier.hpp @@ -22,19 +22,21 @@ template class ECCVMRecursiveVerifier_ { // TODO(https://github.com/AztecProtocol/barretenberg/issues/991): switch recursive verifiers to StdlibProof std::pair, std::shared_ptr> verify_proof(const ECCVMProof& proof); + void compute_translation_opening_claims(const std::vector& translation_commitments); std::shared_ptr key; Builder* builder; std::shared_ptr transcript; std::shared_ptr ipa_transcript; + // Final ShplonkVerifier consumes an array consisting of Translation Opening Claims and a + // `multivariate_to_univariate_opening_claim` + static constexpr size_t NUM_OPENING_CLAIMS = ECCVMFlavor::NUM_TRANSLATION_OPENING_CLAIMS + 1; + std::array, NUM_OPENING_CLAIMS> opening_claims; + FF translation_masking_term_eval; // Translation evaluation and batching challenges. They are propagated to the TranslatorVerifier FF evaluation_challenge_x; FF batching_challenge_v; - - std::vector translation_commitments; - - OpeningClaim compute_translation_opening_claim(const std::vector& translation_commitments); }; } // namespace bb diff --git a/barretenberg/cpp/src/barretenberg/stdlib/goblin_verifier/goblin_recursive_verifier.cpp b/barretenberg/cpp/src/barretenberg/stdlib/goblin_verifier/goblin_recursive_verifier.cpp index 1ded2e85213..19278f0ea8a 100644 --- a/barretenberg/cpp/src/barretenberg/stdlib/goblin_verifier/goblin_recursive_verifier.cpp +++ b/barretenberg/cpp/src/barretenberg/stdlib/goblin_verifier/goblin_recursive_verifier.cpp @@ -34,7 +34,7 @@ GoblinRecursiveVerifierOutput GoblinRecursiveVerifier::verify(const GoblinProof& TranslatorBF::from_witness(builder, native_translation_evaluations.z2) }; - translator_verifier.verify_translation(translation_evaluations); + translator_verifier.verify_translation(translation_evaluations, eccvm_verifier.translation_masking_term_eval); MergeVerifier merge_verifier{ builder }; merge_verifier.verify_proof(proof.merge_proof); diff --git a/barretenberg/cpp/src/barretenberg/stdlib/translator_vm_verifier/translator_recursive_verifier.cpp b/barretenberg/cpp/src/barretenberg/stdlib/translator_vm_verifier/translator_recursive_verifier.cpp index 4e18ece2a90..3f97cd1e81e 100644 --- a/barretenberg/cpp/src/barretenberg/stdlib/translator_vm_verifier/translator_recursive_verifier.cpp +++ b/barretenberg/cpp/src/barretenberg/stdlib/translator_vm_verifier/translator_recursive_verifier.cpp @@ -150,7 +150,8 @@ template bool TranslatorRecursiveVerifier_::verify_translation( const TranslationEvaluations_< typename stdlib::bigfield, - typename Flavor::FF>& translation_evaluations) + typename Flavor::FF>& translation_evaluations, + const BF& translation_masking_term_eval) { const auto reconstruct_from_array = [&](const auto& arr) { return BF::construct_from_limbs(arr[0], arr[1], arr[2], arr[3]); @@ -170,7 +171,7 @@ bool TranslatorRecursiveVerifier_::verify_translation( const BF& z1 = translation_evaluations.z1; const BF& z2 = translation_evaluations.z2; - const BF eccvm_opening = (op + (v1 * Px) + (v2 * Py) + (v3 * z1) + (v4 * z2)); + const BF eccvm_opening = (op + (v1 * Px) + (v2 * Py) + (v3 * z1) + (v4 * z2)) - translation_masking_term_eval; // multiply by x here to deal with shift eccvm_opening.assert_equal(x * accumulated_result); return (eccvm_opening.get_value() == (x * accumulated_result).get_value()); diff --git a/barretenberg/cpp/src/barretenberg/stdlib/translator_vm_verifier/translator_recursive_verifier.hpp b/barretenberg/cpp/src/barretenberg/stdlib/translator_vm_verifier/translator_recursive_verifier.hpp index 4e73b1bf049..3faf36e0d73 100644 --- a/barretenberg/cpp/src/barretenberg/stdlib/translator_vm_verifier/translator_recursive_verifier.hpp +++ b/barretenberg/cpp/src/barretenberg/stdlib/translator_vm_verifier/translator_recursive_verifier.hpp @@ -44,6 +44,7 @@ template class TranslatorRecursiveVerifier_ { // TODO(https://github.com/AztecProtocol/barretenberg/issues/986): Ensure the translation is also recursively // verified somewhere - bool verify_translation(const TranslationEvaluations& translation_evaluations); + bool verify_translation(const TranslationEvaluations& translation_evaluations, + const BF& translation_masking_term_eval); }; } // namespace bb diff --git a/barretenberg/cpp/src/barretenberg/translator_vm/translator_prover.cpp b/barretenberg/cpp/src/barretenberg/translator_vm/translator_prover.cpp index e44e0d13e30..5aa69921239 100644 --- a/barretenberg/cpp/src/barretenberg/translator_vm/translator_prover.cpp +++ b/barretenberg/cpp/src/barretenberg/translator_vm/translator_prover.cpp @@ -38,6 +38,17 @@ void TranslatorProver::execute_preamble_round() transcript->send_to_verifier("accumulated_result", accumulated_result); } +/** + * @brief Utility to commit to witness polynomial and send the commitment to verifier. + * + * @param polynomial + * @param label + */ +void TranslatorProver::commit_to_witness_polynomial(Polynomial& polynomial, const std::string& label) +{ + transcript->send_to_verifier(label, key->proving_key->commitment_key->commit(polynomial)); +} + /** * @brief Compute commitments to wires and ordered range constraints. * @@ -48,13 +59,13 @@ void TranslatorProver::execute_wire_and_sorted_constraints_commitments_round() for (const auto& [wire, label] : zip_view(key->proving_key->polynomials.get_wires(), commitment_labels.get_wires())) { - transcript->send_to_verifier(label, key->proving_key->commitment_key->commit(wire)); + commit_to_witness_polynomial(wire, label); } // The ordered range constraints are of full circuit size. for (const auto& [ordered_range_constraint, label] : zip_view( key->proving_key->polynomials.get_ordered_constraints(), commitment_labels.get_ordered_constraints())) { - transcript->send_to_verifier(label, key->proving_key->commitment_key->commit(ordered_range_constraint)); + commit_to_witness_polynomial(ordered_range_constraint, label); } } @@ -105,8 +116,7 @@ void TranslatorProver::execute_grand_product_computation_round() // Compute constraint permutation grand product compute_grand_products(key->proving_key->polynomials, relation_parameters); - transcript->send_to_verifier(commitment_labels.z_perm, - key->proving_key->commitment_key->commit(key->proving_key->polynomials.z_perm)); + commit_to_witness_polynomial(key->proving_key->polynomials.z_perm, commitment_labels.z_perm); } /** diff --git a/barretenberg/cpp/src/barretenberg/translator_vm/translator_prover.hpp b/barretenberg/cpp/src/barretenberg/translator_vm/translator_prover.hpp index 164542d7223..14e6d3bc69c 100644 --- a/barretenberg/cpp/src/barretenberg/translator_vm/translator_prover.hpp +++ b/barretenberg/cpp/src/barretenberg/translator_vm/translator_prover.hpp @@ -33,6 +33,7 @@ class TranslatorProver { BB_PROFILE void execute_grand_product_computation_round(); BB_PROFILE void execute_relation_check_rounds(); BB_PROFILE void execute_pcs_rounds(); + void commit_to_witness_polynomial(Polynomial& polynomial, const std::string& label); HonkProof export_proof(); HonkProof construct_proof(); diff --git a/barretenberg/cpp/src/barretenberg/translator_vm/translator_verifier.cpp b/barretenberg/cpp/src/barretenberg/translator_vm/translator_verifier.cpp index 0688e02ce7d..24039a92b54 100644 --- a/barretenberg/cpp/src/barretenberg/translator_vm/translator_verifier.cpp +++ b/barretenberg/cpp/src/barretenberg/translator_vm/translator_verifier.cpp @@ -141,7 +141,8 @@ bool TranslatorVerifier::verify_proof(const HonkProof& proof, return verified && consistency_checked; } -bool TranslatorVerifier::verify_translation(const TranslationEvaluations& translation_evaluations) +bool TranslatorVerifier::verify_translation(const TranslationEvaluations& translation_evaluations, + const BF& translation_masking_term_eval) { const auto reconstruct_from_array = [&](const auto& arr) { const BF elt_0 = (static_cast(arr[0])); @@ -166,7 +167,7 @@ bool TranslatorVerifier::verify_translation(const TranslationEvaluations& transl const BF& z1 = translation_evaluations.z1; const BF& z2 = translation_evaluations.z2; - const BF eccvm_opening = (op + (v1 * Px) + (v2 * Py) + (v3 * z1) + (v4 * z2)); + const BF eccvm_opening = (op + (v1 * Px) + (v2 * Py) + (v3 * z1) + (v4 * z2)) - translation_masking_term_eval; // multiply by x here to deal with shift return x * accumulated_result == eccvm_opening; }; diff --git a/barretenberg/cpp/src/barretenberg/translator_vm/translator_verifier.hpp b/barretenberg/cpp/src/barretenberg/translator_vm/translator_verifier.hpp index acd003ff53b..8f388b6a184 100644 --- a/barretenberg/cpp/src/barretenberg/translator_vm/translator_verifier.hpp +++ b/barretenberg/cpp/src/barretenberg/translator_vm/translator_verifier.hpp @@ -34,6 +34,7 @@ class TranslatorVerifier { const BF& batching_challenge_v, const uint256_t& accumulated_result); bool verify_proof(const HonkProof& proof, const uint256_t& evaluation_input_x, const BF& batching_challenge_v); - bool verify_translation(const TranslationEvaluations& translation_evaluations); + bool verify_translation(const TranslationEvaluations& translation_evaluations, + const BF& translation_masking_term_eval); }; } // namespace bb diff --git a/barretenberg/cpp/src/barretenberg/ultra_honk/oink_prover.cpp b/barretenberg/cpp/src/barretenberg/ultra_honk/oink_prover.cpp index 54fb720cfcb..e15fc8911c5 100644 --- a/barretenberg/cpp/src/barretenberg/ultra_honk/oink_prover.cpp +++ b/barretenberg/cpp/src/barretenberg/ultra_honk/oink_prover.cpp @@ -235,18 +235,6 @@ template typename Flavor::RelationSeparator OinkProver void OinkProver::mask_witness_polynomial(Polynomial& polynomial) -{ - const size_t circuit_size = polynomial.virtual_size(); - for (size_t idx = 1; idx < MASKING_OFFSET; idx++) { - polynomial.at(circuit_size - idx) = FF::random_element(); - } -} - /** * @brief A uniform method to mask, commit, and send the corresponding commitment to the verifier. * @@ -259,9 +247,9 @@ void OinkProver::commit_to_witness_polynomial(Polynomial& polynomial const std::string& label, const CommitmentKey::CommitType type) { - // Mask if needed + // Mask the polynomial when proving in zero-knowledge if constexpr (Flavor::HasZK) { - mask_witness_polynomial(polynomial); + polynomial.mask(); }; typename Flavor::Commitment commitment; diff --git a/barretenberg/cpp/src/barretenberg/ultra_honk/oink_prover.hpp b/barretenberg/cpp/src/barretenberg/ultra_honk/oink_prover.hpp index 271ed2792a9..5009c33e92e 100644 --- a/barretenberg/cpp/src/barretenberg/ultra_honk/oink_prover.hpp +++ b/barretenberg/cpp/src/barretenberg/ultra_honk/oink_prover.hpp @@ -63,7 +63,6 @@ template class OinkProver { void execute_log_derivative_inverse_round(); void execute_grand_product_computation_round(); RelationSeparator generate_alphas_round(); - void mask_witness_polynomial(Polynomial& polynomial); void commit_to_witness_polynomial(Polynomial& polynomial, const std::string& label, const CommitmentKey::CommitType type = CommitmentKey::CommitType::Default); @@ -71,4 +70,4 @@ template class OinkProver { using MegaOinkProver = OinkProver; -} // namespace bb \ No newline at end of file +} // namespace bb diff --git a/barretenberg/cpp/src/barretenberg/vm2/common/instruction_spec.cpp b/barretenberg/cpp/src/barretenberg/vm2/common/instruction_spec.cpp index ba3238ea35d..0105bf5e664 100644 --- a/barretenberg/cpp/src/barretenberg/vm2/common/instruction_spec.cpp +++ b/barretenberg/cpp/src/barretenberg/vm2/common/instruction_spec.cpp @@ -84,162 +84,276 @@ const std::unordered_map> W const std::unordered_map WIRE_INSTRUCTION_SPEC = { { WireOpCode::ADD_8, - { .exec_opcode = ExecutionOpCode::ADD, .op_dc_selectors = WireOpCode_DC_SELECTORS.at(WireOpCode::ADD_8) } }, + { .exec_opcode = ExecutionOpCode::ADD, + .size_in_bytes = 5, + .op_dc_selectors = WireOpCode_DC_SELECTORS.at(WireOpCode::ADD_8) } }, { WireOpCode::ADD_16, - { .exec_opcode = ExecutionOpCode::ADD, .op_dc_selectors = WireOpCode_DC_SELECTORS.at(WireOpCode::ADD_16) } }, + { .exec_opcode = ExecutionOpCode::ADD, + .size_in_bytes = 8, + .op_dc_selectors = WireOpCode_DC_SELECTORS.at(WireOpCode::ADD_16) } }, { WireOpCode::SUB_8, - { .exec_opcode = ExecutionOpCode::SUB, .op_dc_selectors = WireOpCode_DC_SELECTORS.at(WireOpCode::SUB_8) } }, + { .exec_opcode = ExecutionOpCode::SUB, + .size_in_bytes = 5, + .op_dc_selectors = WireOpCode_DC_SELECTORS.at(WireOpCode::SUB_8) } }, { WireOpCode::SUB_16, - { .exec_opcode = ExecutionOpCode::SUB, .op_dc_selectors = WireOpCode_DC_SELECTORS.at(WireOpCode::SUB_16) } }, + { .exec_opcode = ExecutionOpCode::SUB, + .size_in_bytes = 8, + .op_dc_selectors = WireOpCode_DC_SELECTORS.at(WireOpCode::SUB_16) } }, { WireOpCode::MUL_8, - { .exec_opcode = ExecutionOpCode::MUL, .op_dc_selectors = WireOpCode_DC_SELECTORS.at(WireOpCode::MUL_8) } }, + { .exec_opcode = ExecutionOpCode::MUL, + .size_in_bytes = 5, + .op_dc_selectors = WireOpCode_DC_SELECTORS.at(WireOpCode::MUL_8) } }, { WireOpCode::MUL_16, - { .exec_opcode = ExecutionOpCode::MUL, .op_dc_selectors = WireOpCode_DC_SELECTORS.at(WireOpCode::MUL_16) } }, + { .exec_opcode = ExecutionOpCode::MUL, + .size_in_bytes = 8, + .op_dc_selectors = WireOpCode_DC_SELECTORS.at(WireOpCode::MUL_16) } }, { WireOpCode::DIV_8, - { .exec_opcode = ExecutionOpCode::DIV, .op_dc_selectors = WireOpCode_DC_SELECTORS.at(WireOpCode::DIV_8) } }, + { .exec_opcode = ExecutionOpCode::DIV, + .size_in_bytes = 5, + .op_dc_selectors = WireOpCode_DC_SELECTORS.at(WireOpCode::DIV_8) } }, { WireOpCode::DIV_16, - { .exec_opcode = ExecutionOpCode::DIV, .op_dc_selectors = WireOpCode_DC_SELECTORS.at(WireOpCode::DIV_16) } }, + { .exec_opcode = ExecutionOpCode::DIV, + .size_in_bytes = 8, + .op_dc_selectors = WireOpCode_DC_SELECTORS.at(WireOpCode::DIV_16) } }, { WireOpCode::FDIV_8, - { .exec_opcode = ExecutionOpCode::FDIV, .op_dc_selectors = WireOpCode_DC_SELECTORS.at(WireOpCode::FDIV_8) } }, + { .exec_opcode = ExecutionOpCode::FDIV, + .size_in_bytes = 5, + .op_dc_selectors = WireOpCode_DC_SELECTORS.at(WireOpCode::FDIV_8) } }, { WireOpCode::FDIV_16, - { .exec_opcode = ExecutionOpCode::FDIV, .op_dc_selectors = WireOpCode_DC_SELECTORS.at(WireOpCode::FDIV_16) } }, + { .exec_opcode = ExecutionOpCode::FDIV, + .size_in_bytes = 8, + .op_dc_selectors = WireOpCode_DC_SELECTORS.at(WireOpCode::FDIV_16) } }, { WireOpCode::EQ_8, - { .exec_opcode = ExecutionOpCode::EQ, .op_dc_selectors = WireOpCode_DC_SELECTORS.at(WireOpCode::EQ_8) } }, + { .exec_opcode = ExecutionOpCode::EQ, + .size_in_bytes = 5, + .op_dc_selectors = WireOpCode_DC_SELECTORS.at(WireOpCode::EQ_8) } }, { WireOpCode::EQ_16, - { .exec_opcode = ExecutionOpCode::EQ, .op_dc_selectors = WireOpCode_DC_SELECTORS.at(WireOpCode::EQ_16) } }, + { .exec_opcode = ExecutionOpCode::EQ, + .size_in_bytes = 8, + .op_dc_selectors = WireOpCode_DC_SELECTORS.at(WireOpCode::EQ_16) } }, { WireOpCode::LT_8, - { .exec_opcode = ExecutionOpCode::LT, .op_dc_selectors = WireOpCode_DC_SELECTORS.at(WireOpCode::LT_8) } }, + { .exec_opcode = ExecutionOpCode::LT, + .size_in_bytes = 5, + .op_dc_selectors = WireOpCode_DC_SELECTORS.at(WireOpCode::LT_8) } }, { WireOpCode::LT_16, - { .exec_opcode = ExecutionOpCode::LT, .op_dc_selectors = WireOpCode_DC_SELECTORS.at(WireOpCode::LT_16) } }, + { .exec_opcode = ExecutionOpCode::LT, + .size_in_bytes = 8, + .op_dc_selectors = WireOpCode_DC_SELECTORS.at(WireOpCode::LT_16) } }, { WireOpCode::LTE_8, - { .exec_opcode = ExecutionOpCode::LTE, .op_dc_selectors = WireOpCode_DC_SELECTORS.at(WireOpCode::LTE_8) } }, + { .exec_opcode = ExecutionOpCode::LTE, + .size_in_bytes = 5, + .op_dc_selectors = WireOpCode_DC_SELECTORS.at(WireOpCode::LTE_8) } }, { WireOpCode::LTE_16, - { .exec_opcode = ExecutionOpCode::LTE, .op_dc_selectors = WireOpCode_DC_SELECTORS.at(WireOpCode::LTE_16) } }, + { .exec_opcode = ExecutionOpCode::LTE, + .size_in_bytes = 8, + .op_dc_selectors = WireOpCode_DC_SELECTORS.at(WireOpCode::LTE_16) } }, { WireOpCode::AND_8, - { .exec_opcode = ExecutionOpCode::AND, .op_dc_selectors = WireOpCode_DC_SELECTORS.at(WireOpCode::AND_8) } }, + { .exec_opcode = ExecutionOpCode::AND, + .size_in_bytes = 5, + .op_dc_selectors = WireOpCode_DC_SELECTORS.at(WireOpCode::AND_8) } }, { WireOpCode::AND_16, - { .exec_opcode = ExecutionOpCode::AND, .op_dc_selectors = WireOpCode_DC_SELECTORS.at(WireOpCode::AND_16) } }, + { .exec_opcode = ExecutionOpCode::AND, + .size_in_bytes = 8, + .op_dc_selectors = WireOpCode_DC_SELECTORS.at(WireOpCode::AND_16) } }, { WireOpCode::OR_8, - { .exec_opcode = ExecutionOpCode::OR, .op_dc_selectors = WireOpCode_DC_SELECTORS.at(WireOpCode::OR_8) } }, + { .exec_opcode = ExecutionOpCode::OR, + .size_in_bytes = 5, + .op_dc_selectors = WireOpCode_DC_SELECTORS.at(WireOpCode::OR_8) } }, { WireOpCode::OR_16, - { .exec_opcode = ExecutionOpCode::OR, .op_dc_selectors = WireOpCode_DC_SELECTORS.at(WireOpCode::OR_16) } }, + { .exec_opcode = ExecutionOpCode::OR, + .size_in_bytes = 8, + .op_dc_selectors = WireOpCode_DC_SELECTORS.at(WireOpCode::OR_16) } }, { WireOpCode::XOR_8, - { .exec_opcode = ExecutionOpCode::XOR, .op_dc_selectors = WireOpCode_DC_SELECTORS.at(WireOpCode::XOR_8) } }, + { .exec_opcode = ExecutionOpCode::XOR, + .size_in_bytes = 5, + .op_dc_selectors = WireOpCode_DC_SELECTORS.at(WireOpCode::XOR_8) } }, { WireOpCode::XOR_16, - { .exec_opcode = ExecutionOpCode::XOR, .op_dc_selectors = WireOpCode_DC_SELECTORS.at(WireOpCode::XOR_16) } }, + { .exec_opcode = ExecutionOpCode::XOR, + .size_in_bytes = 8, + .op_dc_selectors = WireOpCode_DC_SELECTORS.at(WireOpCode::XOR_16) } }, { WireOpCode::NOT_8, - { .exec_opcode = ExecutionOpCode::NOT, .op_dc_selectors = WireOpCode_DC_SELECTORS.at(WireOpCode::NOT_8) } }, + { .exec_opcode = ExecutionOpCode::NOT, + .size_in_bytes = 4, + .op_dc_selectors = WireOpCode_DC_SELECTORS.at(WireOpCode::NOT_8) } }, { WireOpCode::NOT_16, - { .exec_opcode = ExecutionOpCode::NOT, .op_dc_selectors = WireOpCode_DC_SELECTORS.at(WireOpCode::NOT_16) } }, + { .exec_opcode = ExecutionOpCode::NOT, + .size_in_bytes = 6, + .op_dc_selectors = WireOpCode_DC_SELECTORS.at(WireOpCode::NOT_16) } }, { WireOpCode::SHL_8, - { .exec_opcode = ExecutionOpCode::SHL, .op_dc_selectors = WireOpCode_DC_SELECTORS.at(WireOpCode::SHL_8) } }, + { .exec_opcode = ExecutionOpCode::SHL, + .size_in_bytes = 5, + .op_dc_selectors = WireOpCode_DC_SELECTORS.at(WireOpCode::SHL_8) } }, { WireOpCode::SHL_16, - { .exec_opcode = ExecutionOpCode::SHL, .op_dc_selectors = WireOpCode_DC_SELECTORS.at(WireOpCode::SHL_16) } }, + { .exec_opcode = ExecutionOpCode::SHL, + .size_in_bytes = 8, + .op_dc_selectors = WireOpCode_DC_SELECTORS.at(WireOpCode::SHL_16) } }, { WireOpCode::SHR_8, - { .exec_opcode = ExecutionOpCode::SHR, .op_dc_selectors = WireOpCode_DC_SELECTORS.at(WireOpCode::SHR_8) } }, + { .exec_opcode = ExecutionOpCode::SHR, + .size_in_bytes = 5, + .op_dc_selectors = WireOpCode_DC_SELECTORS.at(WireOpCode::SHR_8) } }, { WireOpCode::SHR_16, - { .exec_opcode = ExecutionOpCode::SHR, .op_dc_selectors = WireOpCode_DC_SELECTORS.at(WireOpCode::SHR_16) } }, + { .exec_opcode = ExecutionOpCode::SHR, + .size_in_bytes = 8, + .op_dc_selectors = WireOpCode_DC_SELECTORS.at(WireOpCode::SHR_16) } }, { WireOpCode::CAST_8, - { .exec_opcode = ExecutionOpCode::CAST, .op_dc_selectors = WireOpCode_DC_SELECTORS.at(WireOpCode::CAST_8) } }, + { .exec_opcode = ExecutionOpCode::CAST, + .size_in_bytes = 5, + .op_dc_selectors = WireOpCode_DC_SELECTORS.at(WireOpCode::CAST_8) } }, { WireOpCode::CAST_16, - { .exec_opcode = ExecutionOpCode::CAST, .op_dc_selectors = WireOpCode_DC_SELECTORS.at(WireOpCode::CAST_16) } }, + { .exec_opcode = ExecutionOpCode::CAST, + .size_in_bytes = 7, + .op_dc_selectors = WireOpCode_DC_SELECTORS.at(WireOpCode::CAST_16) } }, { WireOpCode::GETENVVAR_16, { .exec_opcode = ExecutionOpCode::GETENVVAR, + .size_in_bytes = 5, .op_dc_selectors = WireOpCode_DC_SELECTORS.at(WireOpCode::GETENVVAR_16) } }, { WireOpCode::CALLDATACOPY, { .exec_opcode = ExecutionOpCode::CALLDATACOPY, + .size_in_bytes = 8, .op_dc_selectors = WireOpCode_DC_SELECTORS.at(WireOpCode::CALLDATACOPY) } }, { WireOpCode::SUCCESSCOPY, { .exec_opcode = ExecutionOpCode::SUCCESSCOPY, + .size_in_bytes = 4, .op_dc_selectors = WireOpCode_DC_SELECTORS.at(WireOpCode::SUCCESSCOPY) } }, { WireOpCode::RETURNDATASIZE, { .exec_opcode = ExecutionOpCode::RETURNDATASIZE, + .size_in_bytes = 4, .op_dc_selectors = WireOpCode_DC_SELECTORS.at(WireOpCode::RETURNDATASIZE) } }, { WireOpCode::RETURNDATACOPY, { .exec_opcode = ExecutionOpCode::RETURNDATACOPY, + .size_in_bytes = 8, .op_dc_selectors = WireOpCode_DC_SELECTORS.at(WireOpCode::RETURNDATACOPY) } }, { WireOpCode::JUMP_32, - { .exec_opcode = ExecutionOpCode::JUMP, .op_dc_selectors = WireOpCode_DC_SELECTORS.at(WireOpCode::JUMP_32) } }, + { .exec_opcode = ExecutionOpCode::JUMP, + .size_in_bytes = 5, + .op_dc_selectors = WireOpCode_DC_SELECTORS.at(WireOpCode::JUMP_32) } }, { WireOpCode::JUMPI_32, - { .exec_opcode = ExecutionOpCode::JUMPI, .op_dc_selectors = WireOpCode_DC_SELECTORS.at(WireOpCode::JUMPI_32) } }, + { .exec_opcode = ExecutionOpCode::JUMPI, + .size_in_bytes = 8, + .op_dc_selectors = WireOpCode_DC_SELECTORS.at(WireOpCode::JUMPI_32) } }, { WireOpCode::INTERNALCALL, { .exec_opcode = ExecutionOpCode::INTERNALCALL, + .size_in_bytes = 5, .op_dc_selectors = WireOpCode_DC_SELECTORS.at(WireOpCode::INTERNALCALL) } }, { WireOpCode::INTERNALRETURN, { .exec_opcode = ExecutionOpCode::INTERNALRETURN, + .size_in_bytes = 1, .op_dc_selectors = WireOpCode_DC_SELECTORS.at(WireOpCode::INTERNALRETURN) } }, { WireOpCode::SET_8, - { .exec_opcode = ExecutionOpCode::SET, .op_dc_selectors = WireOpCode_DC_SELECTORS.at(WireOpCode::SET_8) } }, + { .exec_opcode = ExecutionOpCode::SET, + .size_in_bytes = 5, + .op_dc_selectors = WireOpCode_DC_SELECTORS.at(WireOpCode::SET_8) } }, { WireOpCode::SET_16, - { .exec_opcode = ExecutionOpCode::SET, .op_dc_selectors = WireOpCode_DC_SELECTORS.at(WireOpCode::SET_16) } }, + { .exec_opcode = ExecutionOpCode::SET, + .size_in_bytes = 7, + .op_dc_selectors = WireOpCode_DC_SELECTORS.at(WireOpCode::SET_16) } }, { WireOpCode::SET_32, - { .exec_opcode = ExecutionOpCode::SET, .op_dc_selectors = WireOpCode_DC_SELECTORS.at(WireOpCode::SET_32) } }, + { .exec_opcode = ExecutionOpCode::SET, + .size_in_bytes = 9, + .op_dc_selectors = WireOpCode_DC_SELECTORS.at(WireOpCode::SET_32) } }, { WireOpCode::SET_64, - { .exec_opcode = ExecutionOpCode::SET, .op_dc_selectors = WireOpCode_DC_SELECTORS.at(WireOpCode::SET_64) } }, + { .exec_opcode = ExecutionOpCode::SET, + .size_in_bytes = 13, + .op_dc_selectors = WireOpCode_DC_SELECTORS.at(WireOpCode::SET_64) } }, { WireOpCode::SET_128, - { .exec_opcode = ExecutionOpCode::SET, .op_dc_selectors = WireOpCode_DC_SELECTORS.at(WireOpCode::SET_128) } }, + { .exec_opcode = ExecutionOpCode::SET, + .size_in_bytes = 21, + .op_dc_selectors = WireOpCode_DC_SELECTORS.at(WireOpCode::SET_128) } }, { WireOpCode::SET_FF, - { .exec_opcode = ExecutionOpCode::SET, .op_dc_selectors = WireOpCode_DC_SELECTORS.at(WireOpCode::SET_FF) } }, + { .exec_opcode = ExecutionOpCode::SET, + .size_in_bytes = 37, + .op_dc_selectors = WireOpCode_DC_SELECTORS.at(WireOpCode::SET_FF) } }, { WireOpCode::MOV_8, - { .exec_opcode = ExecutionOpCode::MOV, .op_dc_selectors = WireOpCode_DC_SELECTORS.at(WireOpCode::MOV_8) } }, + { .exec_opcode = ExecutionOpCode::MOV, + .size_in_bytes = 4, + .op_dc_selectors = WireOpCode_DC_SELECTORS.at(WireOpCode::MOV_8) } }, { WireOpCode::MOV_16, - { .exec_opcode = ExecutionOpCode::MOV, .op_dc_selectors = WireOpCode_DC_SELECTORS.at(WireOpCode::MOV_16) } }, + { .exec_opcode = ExecutionOpCode::MOV, + .size_in_bytes = 6, + .op_dc_selectors = WireOpCode_DC_SELECTORS.at(WireOpCode::MOV_16) } }, { WireOpCode::SLOAD, - { .exec_opcode = ExecutionOpCode::SLOAD, .op_dc_selectors = WireOpCode_DC_SELECTORS.at(WireOpCode::SLOAD) } }, + { .exec_opcode = ExecutionOpCode::SLOAD, + .size_in_bytes = 6, + .op_dc_selectors = WireOpCode_DC_SELECTORS.at(WireOpCode::SLOAD) } }, { WireOpCode::SSTORE, - { .exec_opcode = ExecutionOpCode::SSTORE, .op_dc_selectors = WireOpCode_DC_SELECTORS.at(WireOpCode::SSTORE) } }, + { .exec_opcode = ExecutionOpCode::SSTORE, + .size_in_bytes = 6, + .op_dc_selectors = WireOpCode_DC_SELECTORS.at(WireOpCode::SSTORE) } }, { WireOpCode::NOTEHASHEXISTS, { .exec_opcode = ExecutionOpCode::NOTEHASHEXISTS, + .size_in_bytes = 8, .op_dc_selectors = WireOpCode_DC_SELECTORS.at(WireOpCode::NOTEHASHEXISTS) } }, { WireOpCode::EMITNOTEHASH, { .exec_opcode = ExecutionOpCode::EMITNOTEHASH, + .size_in_bytes = 4, .op_dc_selectors = WireOpCode_DC_SELECTORS.at(WireOpCode::EMITNOTEHASH) } }, { WireOpCode::NULLIFIEREXISTS, { .exec_opcode = ExecutionOpCode::NULLIFIEREXISTS, + .size_in_bytes = 8, .op_dc_selectors = WireOpCode_DC_SELECTORS.at(WireOpCode::NULLIFIEREXISTS) } }, { WireOpCode::EMITNULLIFIER, { .exec_opcode = ExecutionOpCode::EMITNULLIFIER, + .size_in_bytes = 4, .op_dc_selectors = WireOpCode_DC_SELECTORS.at(WireOpCode::EMITNULLIFIER) } }, { WireOpCode::L1TOL2MSGEXISTS, { .exec_opcode = ExecutionOpCode::L1TOL2MSGEXISTS, + .size_in_bytes = 8, .op_dc_selectors = WireOpCode_DC_SELECTORS.at(WireOpCode::L1TOL2MSGEXISTS) } }, { WireOpCode::GETCONTRACTINSTANCE, { .exec_opcode = ExecutionOpCode::GETCONTRACTINSTANCE, + .size_in_bytes = 9, .op_dc_selectors = WireOpCode_DC_SELECTORS.at(WireOpCode::GETCONTRACTINSTANCE) } }, { WireOpCode::EMITUNENCRYPTEDLOG, { .exec_opcode = ExecutionOpCode::EMITUNENCRYPTEDLOG, + .size_in_bytes = 6, .op_dc_selectors = WireOpCode_DC_SELECTORS.at(WireOpCode::EMITUNENCRYPTEDLOG) } }, { WireOpCode::SENDL2TOL1MSG, { .exec_opcode = ExecutionOpCode::SENDL2TOL1MSG, + .size_in_bytes = 6, .op_dc_selectors = WireOpCode_DC_SELECTORS.at(WireOpCode::SENDL2TOL1MSG) } }, { WireOpCode::CALL, - { .exec_opcode = ExecutionOpCode::CALL, .op_dc_selectors = WireOpCode_DC_SELECTORS.at(WireOpCode::CALL) } }, + { .exec_opcode = ExecutionOpCode::CALL, + .size_in_bytes = 13, + .op_dc_selectors = WireOpCode_DC_SELECTORS.at(WireOpCode::CALL) } }, { WireOpCode::STATICCALL, { .exec_opcode = ExecutionOpCode::STATICCALL, + .size_in_bytes = 13, .op_dc_selectors = WireOpCode_DC_SELECTORS.at(WireOpCode::STATICCALL) } }, { WireOpCode::RETURN, - { .exec_opcode = ExecutionOpCode::RETURN, .op_dc_selectors = WireOpCode_DC_SELECTORS.at(WireOpCode::RETURN) } }, + { .exec_opcode = ExecutionOpCode::RETURN, + .size_in_bytes = 6, + .op_dc_selectors = WireOpCode_DC_SELECTORS.at(WireOpCode::RETURN) } }, { WireOpCode::REVERT_8, - { .exec_opcode = ExecutionOpCode::REVERT, .op_dc_selectors = WireOpCode_DC_SELECTORS.at(WireOpCode::REVERT_8) } }, + { .exec_opcode = ExecutionOpCode::REVERT, + .size_in_bytes = 4, + .op_dc_selectors = WireOpCode_DC_SELECTORS.at(WireOpCode::REVERT_8) } }, { WireOpCode::REVERT_16, { .exec_opcode = ExecutionOpCode::REVERT, + .size_in_bytes = 6, .op_dc_selectors = WireOpCode_DC_SELECTORS.at(WireOpCode::REVERT_16) } }, { WireOpCode::DEBUGLOG, { .exec_opcode = ExecutionOpCode::DEBUGLOG, + .size_in_bytes = 10, .op_dc_selectors = WireOpCode_DC_SELECTORS.at(WireOpCode::DEBUGLOG) } }, { WireOpCode::POSEIDON2PERM, { .exec_opcode = ExecutionOpCode::POSEIDON2PERM, + .size_in_bytes = 6, .op_dc_selectors = WireOpCode_DC_SELECTORS.at(WireOpCode::POSEIDON2PERM) } }, { WireOpCode::SHA256COMPRESSION, { .exec_opcode = ExecutionOpCode::SHA256COMPRESSION, + .size_in_bytes = 8, .op_dc_selectors = WireOpCode_DC_SELECTORS.at(WireOpCode::SHA256COMPRESSION) } }, { WireOpCode::KECCAKF1600, { .exec_opcode = ExecutionOpCode::KECCAKF1600, + .size_in_bytes = 6, .op_dc_selectors = WireOpCode_DC_SELECTORS.at(WireOpCode::KECCAKF1600) } }, { WireOpCode::ECADD, - { .exec_opcode = ExecutionOpCode::ECADD, .op_dc_selectors = WireOpCode_DC_SELECTORS.at(WireOpCode::ECADD) } }, + { .exec_opcode = ExecutionOpCode::ECADD, + .size_in_bytes = 17, + .op_dc_selectors = WireOpCode_DC_SELECTORS.at(WireOpCode::ECADD) } }, { WireOpCode::TORADIXBE, { .exec_opcode = ExecutionOpCode::TORADIXBE, + .size_in_bytes = 13, .op_dc_selectors = WireOpCode_DC_SELECTORS.at(WireOpCode::TORADIXBE) } }, }; diff --git a/barretenberg/cpp/src/barretenberg/vm2/common/instruction_spec.hpp b/barretenberg/cpp/src/barretenberg/vm2/common/instruction_spec.hpp index 4fec33922e3..1e44b344720 100644 --- a/barretenberg/cpp/src/barretenberg/vm2/common/instruction_spec.hpp +++ b/barretenberg/cpp/src/barretenberg/vm2/common/instruction_spec.hpp @@ -28,6 +28,7 @@ struct ExecInstructionSpec { struct WireInstructionSpec { ExecutionOpCode exec_opcode; + uint32_t size_in_bytes; std::array op_dc_selectors; bool operator==(const WireInstructionSpec& other) const = default; diff --git a/barretenberg/cpp/src/barretenberg/vm2/common/instruction_spec.test.cpp b/barretenberg/cpp/src/barretenberg/vm2/common/instruction_spec.test.cpp new file mode 100644 index 00000000000..045c66ca4a8 --- /dev/null +++ b/barretenberg/cpp/src/barretenberg/vm2/common/instruction_spec.test.cpp @@ -0,0 +1,39 @@ +#include +#include + +#include "barretenberg/vm2/common/instruction_spec.hpp" +#include "barretenberg/vm2/common/opcodes.hpp" +#include "barretenberg/vm2/simulation/lib/serialization.hpp" + +namespace bb::avm2 { +namespace { + +size_t compute_instruction_size(WireOpCode wire_opcode, + const std::unordered_map>& wire_format, + const std::unordered_map& operand_type_sizes) +{ + size_t instr_size = 1; // Take into account the opcode byte + for (const auto& operand_type : wire_format.at(wire_opcode)) { + instr_size += operand_type_sizes.at(operand_type); + } + + return instr_size; +} + +// Test checking that the hardcoded size for each instruction specified in WIRE_INSTRUCTION_SPEC +// is correct. This test would fail only when we change the wire format of an instruction. +TEST(InstructionSpecTest, CheckAllInstructionSizes) +{ + const auto& wire_format = simulation::testonly::get_instruction_wire_formats(); + const auto& operand_type_sizes = simulation::testonly::get_operand_type_sizes(); + + for (int i = 0; i < static_cast(WireOpCode::LAST_OPCODE_SENTINEL); i++) { + const auto wire_opcode = static_cast(i); + const auto computed_size = compute_instruction_size(wire_opcode, wire_format, operand_type_sizes); + EXPECT_EQ(WIRE_INSTRUCTION_SPEC.at(wire_opcode).size_in_bytes, computed_size) + << "Incorrect size_in_bytes field for " << wire_opcode << " in WIRE_INSTRUCTION_SPEC."; + } +} + +} // namespace +} // namespace bb::avm2 diff --git a/barretenberg/cpp/src/barretenberg/vm2/common/memory_types.hpp b/barretenberg/cpp/src/barretenberg/vm2/common/memory_types.hpp index d3ba454fbb0..1e1cfe27322 100644 --- a/barretenberg/cpp/src/barretenberg/vm2/common/memory_types.hpp +++ b/barretenberg/cpp/src/barretenberg/vm2/common/memory_types.hpp @@ -6,6 +6,7 @@ namespace bb::avm2 { +// Adapt NUM_MEMORY_TAGS in fixtures.cpp if this enum is modified. enum class MemoryTag { FF, U1, diff --git a/barretenberg/cpp/src/barretenberg/vm2/constraining/relations/bc_decomposition.test.cpp b/barretenberg/cpp/src/barretenberg/vm2/constraining/relations/bc_decomposition.test.cpp index 2d85f9a63f4..2182eb9cc65 100644 --- a/barretenberg/cpp/src/barretenberg/vm2/constraining/relations/bc_decomposition.test.cpp +++ b/barretenberg/cpp/src/barretenberg/vm2/constraining/relations/bc_decomposition.test.cpp @@ -34,11 +34,7 @@ void init_trace(TestTraceContainer& trace) TEST(BytecodeDecompositionConstrainingTest, EmptyRow) { - TestTraceContainer trace({ - { { C::precomputed_first_row, 1 } }, - }); - - check_relation(trace); + check_relation(testing::empty_trace()); } TEST(BytecodeDecompositionConstrainingTest, SingleBytecode) diff --git a/barretenberg/cpp/src/barretenberg/vm2/constraining/relations/bc_hashing.test.cpp b/barretenberg/cpp/src/barretenberg/vm2/constraining/relations/bc_hashing.test.cpp index 389b19d39af..bd4088c8d92 100644 --- a/barretenberg/cpp/src/barretenberg/vm2/constraining/relations/bc_hashing.test.cpp +++ b/barretenberg/cpp/src/barretenberg/vm2/constraining/relations/bc_hashing.test.cpp @@ -41,11 +41,7 @@ using length_iv_relation = bb::avm2::lookup_bc_hashing_iv_is_len_relation; TEST(BytecodeHashingConstrainingTest, EmptyRow) { - TestTraceContainer trace({ - { { C::precomputed_first_row, 1 } }, - }); - - check_relation(trace); + check_relation(testing::empty_trace()); } TEST(BytecodeHashingConstrainingTest, SingleBytecodeHash) diff --git a/barretenberg/cpp/src/barretenberg/vm2/constraining/relations/bitwise.test.cpp b/barretenberg/cpp/src/barretenberg/vm2/constraining/relations/bitwise.test.cpp index decc3157975..5e9239f6e26 100644 --- a/barretenberg/cpp/src/barretenberg/vm2/constraining/relations/bitwise.test.cpp +++ b/barretenberg/cpp/src/barretenberg/vm2/constraining/relations/bitwise.test.cpp @@ -8,8 +8,13 @@ #include "barretenberg/vm2/constraining/testing/check_relation.hpp" #include "barretenberg/vm2/generated/flavor_settings.hpp" #include "barretenberg/vm2/generated/relations/bitwise.hpp" +#include "barretenberg/vm2/generated/relations/lookups_bitwise.hpp" +#include "barretenberg/vm2/testing/fixtures.hpp" #include "barretenberg/vm2/testing/macros.hpp" #include "barretenberg/vm2/tracegen/bitwise_trace.hpp" +#include "barretenberg/vm2/tracegen/lib/lookup_into_bitwise.hpp" +#include "barretenberg/vm2/tracegen/lib/lookup_into_indexed_by_clk.hpp" +#include "barretenberg/vm2/tracegen/precomputed_trace.hpp" #include "barretenberg/vm2/tracegen/test_trace_container.hpp" namespace bb::avm2::constraining { @@ -21,13 +26,15 @@ using FF = AvmFlavorSettings::FF; using C = Column; using bitwise = bb::avm2::bitwise; +using tracegen::LookupIntoBitwise; +using tracegen::LookupIntoIndexedByClk; +using tracegen::PrecomputedTraceBuilder; +using lookup_bitwise_byte_operations = bb::avm2::lookup_bitwise_byte_operations_relation; +using lookup_bitwise_integral_tag_length = bb::avm2::lookup_bitwise_integral_tag_length_relation; + TEST(BitwiseConstrainingTest, EmptyRow) { - TestTraceContainer trace({ - { { C::precomputed_first_row, 1 } }, - }); - - check_relation(trace); + check_relation(testing::empty_trace()); } // Testing a positive AND operation for each integral type (U1, U8, ... U128) @@ -318,5 +325,34 @@ TEST(BitwiseConstrainingTest, NegativeWrongAccumulation) EXPECT_THROW_WITH_MESSAGE(check_relation(trace, bitwise::SR_BITW_ACC_REL_C), "BITW_ACC_REL_C"); } +TEST(BitwiseConstrainingTest, MixedOperationsInteractions) +{ + TestTraceContainer trace; + BitwiseTraceBuilder builder; + PrecomputedTraceBuilder precomputed_builder; + + builder.process( + { + { .operation = BitwiseOperation::OR, .tag = MemoryTag::U1, .a = 1, .b = 0, .res = 1 }, + { .operation = BitwiseOperation::AND, .tag = MemoryTag::U32, .a = 13793, .b = 10590617, .res = 4481 }, + { .operation = BitwiseOperation::XOR, .tag = MemoryTag::U16, .a = 5323, .b = 321, .res = 5514 }, + { .operation = BitwiseOperation::XOR, .tag = MemoryTag::U32, .a = 13793, .b = 10590617, .res = 10595448 }, + { .operation = BitwiseOperation::AND, .tag = MemoryTag::U8, .a = 85, .b = 175, .res = 5 }, + { .operation = BitwiseOperation::AND, .tag = MemoryTag::U8, .a = 85, .b = 175, .res = 5 }, + }, + trace); + + precomputed_builder.process_misc(trace, 256 * 256 * 3); + precomputed_builder.process_bitwise(trace); + precomputed_builder.process_integral_tag_length(trace); + + LookupIntoBitwise().process(trace); + LookupIntoIndexedByClk().process(trace); + + check_relation(trace); + check_interaction(trace); + check_interaction(trace); +} + } // namespace -} // namespace bb::avm2::constraining \ No newline at end of file +} // namespace bb::avm2::constraining diff --git a/barretenberg/cpp/src/barretenberg/vm2/constraining/relations/class_id_derivation.test.cpp b/barretenberg/cpp/src/barretenberg/vm2/constraining/relations/class_id_derivation.test.cpp index 97c1adb7314..303a587db0a 100644 --- a/barretenberg/cpp/src/barretenberg/vm2/constraining/relations/class_id_derivation.test.cpp +++ b/barretenberg/cpp/src/barretenberg/vm2/constraining/relations/class_id_derivation.test.cpp @@ -54,12 +54,7 @@ ContractClass generate_contract_class() TEST(ClassIdDerivationConstrainingTest, EmptyRow) { - TestTraceContainer trace({ - { { C::precomputed_first_row, 1 } }, - { { C::precomputed_clk, 0 } }, - }); - - check_relation(trace); + check_relation(testing::empty_trace()); } TEST(ClassIdDerivationConstrainingTest, Basic) diff --git a/barretenberg/cpp/src/barretenberg/vm2/constraining/relations/instr_fetching.test.cpp b/barretenberg/cpp/src/barretenberg/vm2/constraining/relations/instr_fetching.test.cpp index 04ea01b1af5..1a69d21a606 100644 --- a/barretenberg/cpp/src/barretenberg/vm2/constraining/relations/instr_fetching.test.cpp +++ b/barretenberg/cpp/src/barretenberg/vm2/constraining/relations/instr_fetching.test.cpp @@ -5,12 +5,16 @@ #include #include +#include "barretenberg/vm2/common/instruction_spec.hpp" #include "barretenberg/vm2/constraining/testing/check_relation.hpp" #include "barretenberg/vm2/generated/columns.hpp" #include "barretenberg/vm2/generated/flavor_settings.hpp" #include "barretenberg/vm2/generated/relations/instr_fetching.hpp" +#include "barretenberg/vm2/testing/fixtures.hpp" #include "barretenberg/vm2/testing/macros.hpp" #include "barretenberg/vm2/tracegen/bytecode_trace.hpp" +#include "barretenberg/vm2/tracegen/lib/lookup_builder.hpp" +#include "barretenberg/vm2/tracegen/lib/lookup_into_indexed_by_clk.hpp" #include "barretenberg/vm2/tracegen/precomputed_trace.hpp" #include "barretenberg/vm2/tracegen/test_trace_container.hpp" @@ -18,20 +22,19 @@ namespace bb::avm2::constraining { namespace { using tracegen::BytecodeTraceBuilder; +using tracegen::PrecomputedTraceBuilder; using tracegen::TestTraceContainer; using FF = AvmFlavorSettings::FF; using C = Column; using instr_fetching = bb::avm2::instr_fetching; using simulation::Instruction; +using simulation::InstructionFetchingEvent; using simulation::Operand; +using testing::random_bytes; TEST(InstrFetchingConstrainingTest, EmptyRow) { - TestTraceContainer trace({ - { { C::precomputed_clk, 1 } }, - }); - - check_relation(trace); + check_relation(testing::empty_trace()); } // Basic positive test with a hardcoded bytecode for ADD_8 @@ -43,10 +46,9 @@ TEST(InstrFetchingConstrainingTest, Add8WithTraceGen) .opcode = WireOpCode::ADD_8, .indirect = 3, .operands = { Operand::u8(0x34), Operand::u8(0x35), Operand::u8(0x36) }, - .size_in_bytes = 5, }; - std::vector bytecode = { static_cast(WireOpCode::ADD_8), 0x03, 0x34, 0x35, 0x36 }; + std::vector bytecode = add_8_instruction.serialize(); builder.process_instruction_fetching({ { .bytecode_id = 1, .pc = 0, @@ -74,29 +76,9 @@ TEST(InstrFetchingConstrainingTest, EcaddWithTraceGen) Operand::u16(0x127d), Operand::u16(0x127e), Operand::u16(0x127f) }, - .size_in_bytes = 17, - }; - - std::vector bytecode = { - static_cast(WireOpCode::ECADD), - 0x1f, - 0x1f, - 0x12, - 0x79, - 0x12, - 0x7a, - 0x12, - 0x7b, - 0x12, - 0x7c, - 0x12, - 0x7d, - 0x12, - 0x7e, - 0x12, - 0x7f, }; + std::vector bytecode = ecadd_instruction.serialize(); builder.process_instruction_fetching({ { .bytecode_id = 1, .pc = 0, .instruction = ecadd_instruction, @@ -107,40 +89,258 @@ TEST(InstrFetchingConstrainingTest, EcaddWithTraceGen) check_relation(trace); } +// Helper routine generating a vector of instruction fetching events for each +// opcode. +std::vector gen_instr_events_each_opcode() +{ + std::vector bytecode; + std::vector instructions; + constexpr auto num_opcodes = static_cast(WireOpCode::LAST_OPCODE_SENTINEL); + instructions.reserve(num_opcodes); + std::array pc_positions; + + for (size_t i = 0; i < num_opcodes; i++) { + pc_positions.at(i) = static_cast(bytecode.size()); + const auto instr = testing::random_instruction(static_cast(i)); + instructions.emplace_back(instr); + const auto instruction_bytes = instr.serialize(); + bytecode.insert(bytecode.end(), + std::make_move_iterator(instruction_bytes.begin()), + std::make_move_iterator(instruction_bytes.end())); + } + + const auto bytecode_ptr = std::make_shared>(std::move(bytecode)); + // Always use *bytecode_ptr from now on instead of bytecode as this one was moved. + + std::vector instr_events; + instr_events.reserve(num_opcodes); + for (size_t i = 0; i < num_opcodes; i++) { + instr_events.emplace_back(InstructionFetchingEvent{ + .bytecode_id = 1, .pc = pc_positions.at(i), .instruction = instructions.at(i), .bytecode = bytecode_ptr }); + } + return instr_events; +} + // Positive test for each opcode. We assume that decode instruction is working correctly. // It works as long as the relations are not constraining the correct range for TAG nor indirect. TEST(InstrFetchingConstrainingTest, EachOpcodeWithTraceGen) { - uint32_t seed = 987137937; // Arbitrary number serving as pseudo-random seed to generate bytes + TestTraceContainer trace; + BytecodeTraceBuilder builder; - auto gen_40_bytes = [&]() { - std::vector bytes; - bytes.resize(40); + builder.process_instruction_fetching(gen_instr_events_each_opcode(), trace); - for (size_t i = 0; i < 40; i++) { - bytes.at(i) = static_cast(seed % 256); - seed *= seed; - } - return bytes; + constexpr auto num_opcodes = static_cast(WireOpCode::LAST_OPCODE_SENTINEL); + EXPECT_EQ(trace.get_num_rows(), num_opcodes); + check_relation(trace); +} + +// Negative test about decomposition of operands. We mutate correct operand values in the trace. +// This also covers wrong operands which are not "involved" by the instruction. +// We perform this for a random instruction for opcodes: REVERT_16, CAST_8, TORADIXBE +TEST(InstrFetchingConstrainingTest, NegativeWrongOperand) +{ + BytecodeTraceBuilder builder; + + std::vector opcodes = { WireOpCode::REVERT_16, WireOpCode::CAST_8, WireOpCode::TORADIXBE }; + std::vector sub_relations = { + instr_fetching::SR_INDIRECT_BYTES_DECOMPOSITION, instr_fetching::SR_OP1_BYTES_DECOMPOSITION, + instr_fetching::SR_OP2_BYTES_DECOMPOSITION, instr_fetching::SR_OP3_BYTES_DECOMPOSITION, + instr_fetching::SR_OP4_BYTES_DECOMPOSITION, instr_fetching::SR_OP5_BYTES_DECOMPOSITION, + instr_fetching::SR_OP6_BYTES_DECOMPOSITION, instr_fetching::SR_OP7_BYTES_DECOMPOSITION, }; - for (uint8_t i = 0; i < static_cast(WireOpCode::LAST_OPCODE_SENTINEL); i++) { + constexpr std::array operand_cols = { + C::instr_fetching_indirect, C::instr_fetching_op1, C::instr_fetching_op2, C::instr_fetching_op3, + C::instr_fetching_op4, C::instr_fetching_op5, C::instr_fetching_op6, C::instr_fetching_op7, + }; + + for (const auto& opcode : opcodes) { TestTraceContainer trace; - BytecodeTraceBuilder builder; + const auto instr = testing::random_instruction(opcode); + builder.process_instruction_fetching( + { { .bytecode_id = 1, + .pc = 0, + .instruction = instr, + .bytecode = std::make_shared>(instr.serialize()) } }, + trace); + check_relation(trace); - std::vector bytecode = gen_40_bytes(); - bytecode.at(0) = i; + EXPECT_EQ(trace.get_num_rows(), 1); - const auto instr = simulation::decode_instruction(bytecode, 0); + for (size_t i = 0; i < operand_cols.size(); i++) { + auto mutated_trace = trace; + const FF mutated_operand = trace.get(operand_cols.at(i), 0) + 1; // Mutate to value + 1 + mutated_trace.set(operand_cols.at(i), 0, mutated_operand); + EXPECT_THROW_WITH_MESSAGE(check_relation(mutated_trace, sub_relations.at(i)), + instr_fetching::get_subrelation_label(sub_relations.at(i))); + } + } +} - builder.process_instruction_fetching({ { .bytecode_id = 1, - .pc = 0, - .instruction = instr, - .bytecode = std::make_shared>(bytecode) } }, - trace); +// Positive test for interaction with instruction spec table using same events as for the test +// EachOpcodeWithTraceGen, i.e., one event/row is generated per wire opcode. +// It works as long as the relations are not constraining the correct range for TAG nor indirect. +TEST(InstrFetchingConstrainingTest, WireInstructionSpecInteractions) +{ + using wire_instr_spec_lookup = lookup_instr_fetching_wire_instruction_info_relation; - EXPECT_EQ(trace.get_num_rows(), 1); - check_relation(trace); + TestTraceContainer trace; + BytecodeTraceBuilder bytecode_builder; + + PrecomputedTraceBuilder precomputed_builder; + precomputed_builder.process_wire_instruction_spec(trace); + bytecode_builder.process_instruction_fetching(gen_instr_events_each_opcode(), trace); + precomputed_builder.process_misc(trace, trace.get_num_rows()); // Limit to the number of rows we need. + + tracegen::LookupIntoIndexedByClk().process(trace); + + check_relation(trace); + check_interaction(trace); +} + +// Positive test for the interaction with bytecode decomposition table. +// One event/row is generated per wire opcode (same as for test WireInstructionSpecInteractions). +// It works as long as the relations are not constraining the correct range for TAG nor indirect. +TEST(InstrFetchingConstrainingTest, BcDecompositionInteractions) +{ + using bc_decomposition_lookup = lookup_instr_fetching_bytes_from_bc_dec_relation; + + TestTraceContainer trace; + BytecodeTraceBuilder bytecode_builder; + + const auto instr_fetch_events = gen_instr_events_each_opcode(); + bytecode_builder.process_instruction_fetching(instr_fetch_events, trace); + bytecode_builder.process_decomposition({ { + .bytecode_id = instr_fetch_events.at(0).bytecode_id, + .bytecode = instr_fetch_events.at(0).bytecode, + } }, + trace); + + tracegen::LookupIntoDynamicTableSequential().process(trace); + + check_relation(trace); + check_interaction(trace); +} + +// Negative interaction test with some values not matching the instruction spec table. +TEST(InstrFetchingConstrainingTest, NegativeWrongWireInstructionSpecInteractions) +{ + using wire_instr_spec_lookup = lookup_instr_fetching_wire_instruction_info_relation; + using tracegen::LookupIntoIndexedByClk; + + BytecodeTraceBuilder bytecode_builder; + PrecomputedTraceBuilder precomputed_builder; + + // Some arbitrary chosen opcodes. We limit to one as this unit test is costly. + // Test works if the following vector is extended to other opcodes though. + std::vector opcodes = { WireOpCode::CALLDATACOPY }; + + for (const auto& opcode : opcodes) { + TestTraceContainer trace; + const auto instr = testing::random_instruction(opcode); + bytecode_builder.process_instruction_fetching( + { { .bytecode_id = 1, + .pc = 0, + .instruction = instr, + .bytecode = std::make_shared>(instr.serialize()) } }, + trace); + precomputed_builder.process_wire_instruction_spec(trace); + precomputed_builder.process_misc(trace, trace.get_num_rows()); // Limit to the number of rows we need. + + LookupIntoIndexedByClk().process(trace); + + ASSERT_EQ(trace.get(C::lookup_instr_fetching_wire_instruction_info_counts, static_cast(opcode)), 1); + check_interaction(trace); + + constexpr std::array mutated_cols = { + C::instr_fetching_exec_opcode, C::instr_fetching_instr_size_in_bytes, C::instr_fetching_sel_op_dc_0, + C::instr_fetching_sel_op_dc_1, C::instr_fetching_sel_op_dc_2, C::instr_fetching_sel_op_dc_3, + C::instr_fetching_sel_op_dc_4, C::instr_fetching_sel_op_dc_5, C::instr_fetching_sel_op_dc_6, + C::instr_fetching_sel_op_dc_7, C::instr_fetching_sel_op_dc_8, C::instr_fetching_sel_op_dc_9, + C::instr_fetching_sel_op_dc_10, C::instr_fetching_sel_op_dc_11, C::instr_fetching_sel_op_dc_12, + C::instr_fetching_sel_op_dc_13, C::instr_fetching_sel_op_dc_14, C::instr_fetching_sel_op_dc_15, + C::instr_fetching_sel_op_dc_16, C::instr_fetching_sel_op_dc_17, + }; + + // Mutate execution opcode + for (const auto& col : mutated_cols) { + auto mutated_trace = trace; + const FF mutated_value = trace.get(col, 0) + 1; // Mutate to value + 1 + mutated_trace.set(col, 0, mutated_value); + + // We do not need to re-run LookupIntoIndexedByClk().process(trace); + // because we never mutate the indexing column for this lookup (clk) and for this lookup + // find_in_dst only uses column C::instr_fetching_bd0 mapped to (clk). So, the counts are still valid. + + EXPECT_THROW_WITH_MESSAGE(check_interaction(mutated_trace), + "Relation.*WIRE_INSTRUCTION_INFO.* ACCUMULATION.* is non-zero"); + } + } +} + +// Negative interaction test with some values not matching the bytecode decomposition table. +TEST(InstrFetchingConstrainingTest, NegativeWrongBcDecompositionInteractions) +{ + using bc_decomposition_lookup = lookup_instr_fetching_bytes_from_bc_dec_relation; + using tracegen::LookupIntoDynamicTableSequential; + + TestTraceContainer trace; + BytecodeTraceBuilder bytecode_builder; + + // Some arbitrary chosen opcodes. We limit to one as this unit test is costly. + // Test works if the following vector is extended to other opcodes though. + std::vector opcodes = { WireOpCode::STATICCALL }; + + for (const auto& opcode : opcodes) { + TestTraceContainer trace; + const auto instr = testing::random_instruction(opcode); + auto bytecode_ptr = std::make_shared>(instr.serialize()); + bytecode_builder.process_instruction_fetching({ { + .bytecode_id = 1, + .pc = 0, + .instruction = instr, + .bytecode = bytecode_ptr, + } }, + trace); + bytecode_builder.process_decomposition({ { + .bytecode_id = 1, + .bytecode = bytecode_ptr, + } }, + trace); + + auto valid_trace = trace; // Keep original trace before lookup processing + LookupIntoDynamicTableSequential().process(valid_trace); + check_interaction(valid_trace); + + constexpr std::array mutated_cols = { + C::instr_fetching_pc, C::instr_fetching_bytecode_id, C::instr_fetching_bd0, C::instr_fetching_bd1, + C::instr_fetching_bd2, C::instr_fetching_bd3, C::instr_fetching_bd4, C::instr_fetching_bd5, + C::instr_fetching_bd6, C::instr_fetching_bd7, C::instr_fetching_bd8, C::instr_fetching_bd9, + C::instr_fetching_bd10, C::instr_fetching_bd11, C::instr_fetching_bd12, C::instr_fetching_bd13, + C::instr_fetching_bd14, C::instr_fetching_bd15, C::instr_fetching_bd16, C::instr_fetching_bd17, + C::instr_fetching_bd18, C::instr_fetching_bd19, C::instr_fetching_bd20, C::instr_fetching_bd21, + C::instr_fetching_bd22, C::instr_fetching_bd23, C::instr_fetching_bd24, C::instr_fetching_bd25, + C::instr_fetching_bd26, C::instr_fetching_bd27, C::instr_fetching_bd28, C::instr_fetching_bd29, + C::instr_fetching_bd30, C::instr_fetching_bd31, C::instr_fetching_bd32, C::instr_fetching_bd33, + C::instr_fetching_bd34, C::instr_fetching_bd35, C::instr_fetching_bd36, + }; + + // Mutate execution opcode + for (const auto& col : mutated_cols) { + auto mutated_trace = trace; + const FF mutated_value = trace.get(col, 0) + 1; // Mutate to value + 1 + mutated_trace.set(col, 0, mutated_value); + + // This sets the length of the inverse polynomial via SetDummyInverses, so we still need to call this even + // though we know it will fail. + EXPECT_THROW_WITH_MESSAGE( + LookupIntoDynamicTableSequential().process(mutated_trace), + "Failed.*BYTES_FROM_BC_DEC. Could not find tuple in destination."); + + EXPECT_THROW_WITH_MESSAGE(check_interaction(mutated_trace), + "Relation.*BYTES_FROM_BC_DEC.* ACCUMULATION.* is non-zero"); + } } } diff --git a/barretenberg/cpp/src/barretenberg/vm2/constraining/relations/op_decomposition.test.cpp b/barretenberg/cpp/src/barretenberg/vm2/constraining/relations/op_decomposition.test.cpp index fea485972b5..fe998d6617b 100644 --- a/barretenberg/cpp/src/barretenberg/vm2/constraining/relations/op_decomposition.test.cpp +++ b/barretenberg/cpp/src/barretenberg/vm2/constraining/relations/op_decomposition.test.cpp @@ -173,6 +173,8 @@ std::string render_pil( { std::string pil_equations; for (uint8_t i = 0; i < NUM_OF_OPERANDS; i++) { + pil_equations += (i == 0) ? "#[INDIRECT_BYTES_DECOMPOSITION]\n" + : format("#[OP", static_cast(i), "_BYTES_DECOMPOSITION]\n"); pil_equations += (i == 0) ? "indirect = " : format(OPERAND_PREFIX, static_cast(i), " = "); std::vector additive_terms; diff --git a/barretenberg/cpp/src/barretenberg/vm2/constraining/relations/poseidon2.test.cpp b/barretenberg/cpp/src/barretenberg/vm2/constraining/relations/poseidon2.test.cpp index 560ceeb51ec..0a850c81a3d 100644 --- a/barretenberg/cpp/src/barretenberg/vm2/constraining/relations/poseidon2.test.cpp +++ b/barretenberg/cpp/src/barretenberg/vm2/constraining/relations/poseidon2.test.cpp @@ -10,9 +10,11 @@ #include "barretenberg/vm2/generated/relations/poseidon2_hash.hpp" #include "barretenberg/vm2/generated/relations/poseidon2_perm.hpp" #include "barretenberg/vm2/simulation/events/event_emitter.hpp" +#include "barretenberg/vm2/testing/fixtures.hpp" #include "barretenberg/vm2/testing/macros.hpp" #include "barretenberg/vm2/tracegen/lib/lookup_builder.hpp" #include "barretenberg/vm2/tracegen/poseidon2_trace.hpp" + // Temporary imports, see comment in test. #include "barretenberg/vm2/simulation/poseidon2.hpp" #include "barretenberg/vm2/tracegen/test_trace_container.hpp" @@ -38,12 +40,8 @@ using lookup_poseidon2_perm_relation = bb::avm2::lookup_poseidon2_hash_poseidon2 TEST(Poseidon2ConstrainingTest, Poseidon2EmptyRow) { - auto trace = TestTraceContainer::from_rows({ - { .precomputed_first_row = 1 }, - }); - - check_relation(trace); - check_relation(trace); + check_relation(testing::empty_trace()); + check_relation(testing::empty_trace()); } // These tests imports a bunch of external code since hand-generating the poseidon2 trace is a bit laborious atm. diff --git a/barretenberg/cpp/src/barretenberg/vm2/constraining/relations/range_check.test.cpp b/barretenberg/cpp/src/barretenberg/vm2/constraining/relations/range_check.test.cpp index 69a775c2655..5838673d607 100644 --- a/barretenberg/cpp/src/barretenberg/vm2/constraining/relations/range_check.test.cpp +++ b/barretenberg/cpp/src/barretenberg/vm2/constraining/relations/range_check.test.cpp @@ -7,6 +7,7 @@ #include "barretenberg/vm2/constraining/testing/check_relation.hpp" #include "barretenberg/vm2/generated/flavor_settings.hpp" #include "barretenberg/vm2/generated/relations/range_check.hpp" +#include "barretenberg/vm2/testing/fixtures.hpp" #include "barretenberg/vm2/testing/macros.hpp" #include "barretenberg/vm2/tracegen/range_check_trace.hpp" #include "barretenberg/vm2/tracegen/test_trace_container.hpp" @@ -22,11 +23,7 @@ using range_check = bb::avm2::range_check; TEST(RangeCheckConstrainingTest, EmptyRow) { - TestTraceContainer trace({ - { { C::precomputed_clk, 1 } }, - }); - - check_relation(trace); + check_relation(testing::empty_trace()); } TEST(RangeCheckConstrainingTest, IsLteMutuallyExclusive) diff --git a/barretenberg/cpp/src/barretenberg/vm2/constraining/relations/sha256.test.cpp b/barretenberg/cpp/src/barretenberg/vm2/constraining/relations/sha256.test.cpp index 1281c830d31..238a86df530 100644 --- a/barretenberg/cpp/src/barretenberg/vm2/constraining/relations/sha256.test.cpp +++ b/barretenberg/cpp/src/barretenberg/vm2/constraining/relations/sha256.test.cpp @@ -9,6 +9,7 @@ #include "barretenberg/vm2/generated/relations/sha256.hpp" #include "barretenberg/vm2/simulation/events/event_emitter.hpp" #include "barretenberg/vm2/simulation/memory.hpp" +#include "barretenberg/vm2/testing/fixtures.hpp" #include "barretenberg/vm2/testing/macros.hpp" #include "barretenberg/vm2/tracegen/lib/lookup_into_indexed_by_clk.hpp" #include "barretenberg/vm2/tracegen/precomputed_trace.hpp" @@ -44,11 +45,7 @@ using lookup_sha256_round_relation = bb::avm2::lookup_sha256_round_constant_rela TEST(Sha256ConstrainingTest, EmptyRow) { - TestTraceContainer trace({ - { { C::precomputed_clk, 1 } }, - }); - - check_relation(trace); + check_relation(testing::empty_trace()); } // This test imports a bunch of external code since hand-generating the sha256 trace is a bit laborious atm. diff --git a/barretenberg/cpp/src/barretenberg/vm2/constraining/testing/check_relation.hpp b/barretenberg/cpp/src/barretenberg/vm2/constraining/testing/check_relation.hpp index 0ccca00153a..d21b9736cfa 100644 --- a/barretenberg/cpp/src/barretenberg/vm2/constraining/testing/check_relation.hpp +++ b/barretenberg/cpp/src/barretenberg/vm2/constraining/testing/check_relation.hpp @@ -27,13 +27,16 @@ template constexpr bool subrelation_is_linearly_independent( } template -void check_relation_internal(const Trace& trace, std::span subrelations, RowGetter get_row) +void check_relation_internal(const Trace& trace, + std::span subrelations, + uint32_t num_rows, + RowGetter get_row) { typename Relation::SumcheckArrayOfValuesOverSubrelations result{}; // Accumulate the trace over the subrelations and check the result // if the subrelation is linearly independent. - for (size_t r = 0; r < trace.size(); ++r) { + for (size_t r = 0; r < num_rows; ++r) { Relation::accumulate(result, get_row(trace, r), get_test_params(), 1); for (size_t j : subrelations) { if (subrelation_is_linearly_independent(j) && !result[j].is_zero()) { @@ -65,7 +68,7 @@ void check_relation(const tracegen::TestTraceContainer& trace, Ts... subrelation { std::array subrelations = { subrelation... }; detail::check_relation_internal( - trace.as_rows(), subrelations, [](const auto& trace, size_t r) { return trace.at(r); }); + trace.as_rows(), subrelations, trace.get_num_rows(), [](const auto& trace, size_t r) { return trace.at(r); }); } template void check_relation(const tracegen::TestTraceContainer& trace) @@ -93,8 +96,8 @@ template void check_interaction(const tracegen::TestTraceConta [&](std::index_sequence) { constexpr std::array subrels = { Is... }; detail::check_relation_internal( - polys, subrels, [](const auto& polys, size_t r) { return polys.get_row(r); }); + polys, subrels, num_rows, [](const auto& polys, size_t r) { return polys.get_row(r); }); }(std::make_index_sequence()); } -} // namespace bb::avm2::constraining \ No newline at end of file +} // namespace bb::avm2::constraining diff --git a/barretenberg/cpp/src/barretenberg/vm2/generated/columns.hpp b/barretenberg/cpp/src/barretenberg/vm2/generated/columns.hpp index d7313639208..50245ede1af 100644 --- a/barretenberg/cpp/src/barretenberg/vm2/generated/columns.hpp +++ b/barretenberg/cpp/src/barretenberg/vm2/generated/columns.hpp @@ -9,8 +9,8 @@ namespace bb::avm2 { // The entities that will be used in the flavor. // clang-format off -#define AVM2_PRECOMPUTED_ENTITIES precomputed_as_unary, precomputed_bitwise_input_a, precomputed_bitwise_input_b, precomputed_bitwise_op_id, precomputed_bitwise_output, precomputed_clk, precomputed_exec_opcode, precomputed_first_row, precomputed_integral_tag_length, precomputed_power_of_2, precomputed_sel_bitwise, precomputed_sel_integral_tag, precomputed_sel_op_dc_0, precomputed_sel_op_dc_1, precomputed_sel_op_dc_10, precomputed_sel_op_dc_11, precomputed_sel_op_dc_12, precomputed_sel_op_dc_13, precomputed_sel_op_dc_14, precomputed_sel_op_dc_15, precomputed_sel_op_dc_16, precomputed_sel_op_dc_17, precomputed_sel_op_dc_2, precomputed_sel_op_dc_3, precomputed_sel_op_dc_4, precomputed_sel_op_dc_5, precomputed_sel_op_dc_6, precomputed_sel_op_dc_7, precomputed_sel_op_dc_8, precomputed_sel_op_dc_9, precomputed_sel_range_16, precomputed_sel_range_8, precomputed_sel_range_wire_opcode, precomputed_sel_sha256_compression, precomputed_sel_unary, precomputed_sha256_compression_round_constant, precomputed_zero -#define AVM2_WIRE_ENTITIES execution_input, alu_dst_addr, alu_ia, alu_ia_addr, alu_ib, alu_ib_addr, alu_ic, alu_op, alu_sel_op_add, bc_decomposition_abs_diff, bc_decomposition_bytes, bc_decomposition_bytes_pc_plus_1, bc_decomposition_bytes_pc_plus_10, bc_decomposition_bytes_pc_plus_11, bc_decomposition_bytes_pc_plus_12, bc_decomposition_bytes_pc_plus_13, bc_decomposition_bytes_pc_plus_14, bc_decomposition_bytes_pc_plus_15, bc_decomposition_bytes_pc_plus_16, bc_decomposition_bytes_pc_plus_17, bc_decomposition_bytes_pc_plus_18, bc_decomposition_bytes_pc_plus_19, bc_decomposition_bytes_pc_plus_2, bc_decomposition_bytes_pc_plus_20, bc_decomposition_bytes_pc_plus_21, bc_decomposition_bytes_pc_plus_22, bc_decomposition_bytes_pc_plus_23, bc_decomposition_bytes_pc_plus_24, bc_decomposition_bytes_pc_plus_25, bc_decomposition_bytes_pc_plus_26, bc_decomposition_bytes_pc_plus_27, bc_decomposition_bytes_pc_plus_28, bc_decomposition_bytes_pc_plus_29, bc_decomposition_bytes_pc_plus_3, bc_decomposition_bytes_pc_plus_30, bc_decomposition_bytes_pc_plus_31, bc_decomposition_bytes_pc_plus_32, bc_decomposition_bytes_pc_plus_33, bc_decomposition_bytes_pc_plus_34, bc_decomposition_bytes_pc_plus_35, bc_decomposition_bytes_pc_plus_36, bc_decomposition_bytes_pc_plus_4, bc_decomposition_bytes_pc_plus_5, bc_decomposition_bytes_pc_plus_6, bc_decomposition_bytes_pc_plus_7, bc_decomposition_bytes_pc_plus_8, bc_decomposition_bytes_pc_plus_9, bc_decomposition_bytes_rem_inv, bc_decomposition_bytes_rem_min_one_inv, bc_decomposition_bytes_remaining, bc_decomposition_bytes_to_read, bc_decomposition_bytes_to_read_unary, bc_decomposition_id, bc_decomposition_last_of_contract, bc_decomposition_packed_field, bc_decomposition_pc, bc_decomposition_sel, bc_decomposition_sel_overflow_correction_needed, bc_decomposition_sel_packed, bc_decomposition_sel_pc_plus_1, bc_decomposition_sel_pc_plus_10, bc_decomposition_sel_pc_plus_11, bc_decomposition_sel_pc_plus_12, bc_decomposition_sel_pc_plus_13, bc_decomposition_sel_pc_plus_14, bc_decomposition_sel_pc_plus_15, bc_decomposition_sel_pc_plus_16, bc_decomposition_sel_pc_plus_17, bc_decomposition_sel_pc_plus_18, bc_decomposition_sel_pc_plus_19, bc_decomposition_sel_pc_plus_2, bc_decomposition_sel_pc_plus_20, bc_decomposition_sel_pc_plus_21, bc_decomposition_sel_pc_plus_22, bc_decomposition_sel_pc_plus_23, bc_decomposition_sel_pc_plus_24, bc_decomposition_sel_pc_plus_25, bc_decomposition_sel_pc_plus_26, bc_decomposition_sel_pc_plus_27, bc_decomposition_sel_pc_plus_28, bc_decomposition_sel_pc_plus_29, bc_decomposition_sel_pc_plus_3, bc_decomposition_sel_pc_plus_30, bc_decomposition_sel_pc_plus_31, bc_decomposition_sel_pc_plus_32, bc_decomposition_sel_pc_plus_33, bc_decomposition_sel_pc_plus_34, bc_decomposition_sel_pc_plus_35, bc_decomposition_sel_pc_plus_36, bc_decomposition_sel_pc_plus_4, bc_decomposition_sel_pc_plus_5, bc_decomposition_sel_pc_plus_6, bc_decomposition_sel_pc_plus_7, bc_decomposition_sel_pc_plus_8, bc_decomposition_sel_pc_plus_9, bc_hashing_bytecode_id, bc_hashing_incremental_hash, bc_hashing_latch, bc_hashing_output_hash, bc_hashing_packed_field, bc_hashing_pc_index, bc_hashing_sel, bc_hashing_start, bc_retrieval_address, bc_retrieval_artifact_hash, bc_retrieval_bytecode_id, bc_retrieval_class_id, bc_retrieval_deployer_addr, bc_retrieval_err, bc_retrieval_incoming_viewing_key_x, bc_retrieval_incoming_viewing_key_y, bc_retrieval_init_hash, bc_retrieval_nullifier_key_x, bc_retrieval_nullifier_key_y, bc_retrieval_outgoing_viewing_key_x, bc_retrieval_outgoing_viewing_key_y, bc_retrieval_private_function_root, bc_retrieval_public_bytecode_commitment, bc_retrieval_salt, bc_retrieval_sel, bc_retrieval_siloed_address, bc_retrieval_tagging_key_x, bc_retrieval_tagging_key_y, bitwise_acc_ia, bitwise_acc_ib, bitwise_acc_ic, bitwise_ctr, bitwise_ctr_inv, bitwise_ctr_min_one_inv, bitwise_ia_byte, bitwise_ib_byte, bitwise_ic_byte, bitwise_last, bitwise_op_id, bitwise_sel, bitwise_start, bitwise_tag, class_id_derivation_artifact_hash, class_id_derivation_class_id, class_id_derivation_private_function_root, class_id_derivation_public_bytecode_commitment, class_id_derivation_sel, class_id_derivation_temp_constant_for_lookup, ecc_add_op, ecc_double_op, ecc_inv_2_p_y, ecc_inv_x_diff, ecc_inv_y_diff, ecc_lambda, ecc_p_is_inf, ecc_p_x, ecc_p_y, ecc_q_is_inf, ecc_q_x, ecc_q_y, ecc_r_is_inf, ecc_r_x, ecc_r_y, ecc_result_infinity, ecc_sel, ecc_x_match, ecc_y_match, execution_addressing_error_idx, execution_addressing_error_kind, execution_base_address_tag, execution_base_address_val, execution_bytecode_id, execution_clk, execution_ex_opcode, execution_indirect, execution_last, execution_op1, execution_op1_after_relative, execution_op2, execution_op2_after_relative, execution_op3, execution_op3_after_relative, execution_op4, execution_op4_after_relative, execution_pc, execution_rop1, execution_rop2, execution_rop3, execution_rop4, execution_sel, execution_sel_addressing_error, execution_sel_op1_is_address, execution_sel_op2_is_address, execution_sel_op3_is_address, execution_sel_op4_is_address, instr_fetching_bd0, instr_fetching_bd1, instr_fetching_bd10, instr_fetching_bd11, instr_fetching_bd12, instr_fetching_bd13, instr_fetching_bd14, instr_fetching_bd15, instr_fetching_bd16, instr_fetching_bd17, instr_fetching_bd18, instr_fetching_bd19, instr_fetching_bd2, instr_fetching_bd20, instr_fetching_bd21, instr_fetching_bd22, instr_fetching_bd23, instr_fetching_bd24, instr_fetching_bd25, instr_fetching_bd26, instr_fetching_bd27, instr_fetching_bd28, instr_fetching_bd29, instr_fetching_bd3, instr_fetching_bd30, instr_fetching_bd31, instr_fetching_bd32, instr_fetching_bd33, instr_fetching_bd34, instr_fetching_bd35, instr_fetching_bd36, instr_fetching_bd4, instr_fetching_bd5, instr_fetching_bd6, instr_fetching_bd7, instr_fetching_bd8, instr_fetching_bd9, instr_fetching_bytecode_id, instr_fetching_exec_opcode, instr_fetching_indirect, instr_fetching_op1, instr_fetching_op2, instr_fetching_op3, instr_fetching_op4, instr_fetching_op5, instr_fetching_op6, instr_fetching_op7, instr_fetching_pc, instr_fetching_sel, instr_fetching_sel_op_dc_0, instr_fetching_sel_op_dc_1, instr_fetching_sel_op_dc_10, instr_fetching_sel_op_dc_11, instr_fetching_sel_op_dc_12, instr_fetching_sel_op_dc_13, instr_fetching_sel_op_dc_14, instr_fetching_sel_op_dc_15, instr_fetching_sel_op_dc_16, instr_fetching_sel_op_dc_17, instr_fetching_sel_op_dc_2, instr_fetching_sel_op_dc_3, instr_fetching_sel_op_dc_4, instr_fetching_sel_op_dc_5, instr_fetching_sel_op_dc_6, instr_fetching_sel_op_dc_7, instr_fetching_sel_op_dc_8, instr_fetching_sel_op_dc_9, poseidon2_hash_a_0, poseidon2_hash_a_1, poseidon2_hash_a_2, poseidon2_hash_a_3, poseidon2_hash_b_0, poseidon2_hash_b_1, poseidon2_hash_b_2, poseidon2_hash_b_3, poseidon2_hash_end, poseidon2_hash_input_0, poseidon2_hash_input_1, poseidon2_hash_input_2, poseidon2_hash_input_len, poseidon2_hash_num_perm_rounds_rem, poseidon2_hash_num_perm_rounds_rem_inv, poseidon2_hash_output, poseidon2_hash_padding, poseidon2_hash_sel, poseidon2_hash_start, poseidon2_perm_B_10_0, poseidon2_perm_B_10_1, poseidon2_perm_B_10_2, poseidon2_perm_B_10_3, poseidon2_perm_B_11_0, poseidon2_perm_B_11_1, poseidon2_perm_B_11_2, poseidon2_perm_B_11_3, poseidon2_perm_B_12_0, poseidon2_perm_B_12_1, poseidon2_perm_B_12_2, poseidon2_perm_B_12_3, poseidon2_perm_B_13_0, poseidon2_perm_B_13_1, poseidon2_perm_B_13_2, poseidon2_perm_B_13_3, poseidon2_perm_B_14_0, poseidon2_perm_B_14_1, poseidon2_perm_B_14_2, poseidon2_perm_B_14_3, poseidon2_perm_B_15_0, poseidon2_perm_B_15_1, poseidon2_perm_B_15_2, poseidon2_perm_B_15_3, poseidon2_perm_B_16_0, poseidon2_perm_B_16_1, poseidon2_perm_B_16_2, poseidon2_perm_B_16_3, poseidon2_perm_B_17_0, poseidon2_perm_B_17_1, poseidon2_perm_B_17_2, poseidon2_perm_B_17_3, poseidon2_perm_B_18_0, poseidon2_perm_B_18_1, poseidon2_perm_B_18_2, poseidon2_perm_B_18_3, poseidon2_perm_B_19_0, poseidon2_perm_B_19_1, poseidon2_perm_B_19_2, poseidon2_perm_B_19_3, poseidon2_perm_B_20_0, poseidon2_perm_B_20_1, poseidon2_perm_B_20_2, poseidon2_perm_B_20_3, poseidon2_perm_B_21_0, poseidon2_perm_B_21_1, poseidon2_perm_B_21_2, poseidon2_perm_B_21_3, poseidon2_perm_B_22_0, poseidon2_perm_B_22_1, poseidon2_perm_B_22_2, poseidon2_perm_B_22_3, poseidon2_perm_B_23_0, poseidon2_perm_B_23_1, poseidon2_perm_B_23_2, poseidon2_perm_B_23_3, poseidon2_perm_B_24_0, poseidon2_perm_B_24_1, poseidon2_perm_B_24_2, poseidon2_perm_B_24_3, poseidon2_perm_B_25_0, poseidon2_perm_B_25_1, poseidon2_perm_B_25_2, poseidon2_perm_B_25_3, poseidon2_perm_B_26_0, poseidon2_perm_B_26_1, poseidon2_perm_B_26_2, poseidon2_perm_B_26_3, poseidon2_perm_B_27_0, poseidon2_perm_B_27_1, poseidon2_perm_B_27_2, poseidon2_perm_B_27_3, poseidon2_perm_B_28_0, poseidon2_perm_B_28_1, poseidon2_perm_B_28_2, poseidon2_perm_B_28_3, poseidon2_perm_B_29_0, poseidon2_perm_B_29_1, poseidon2_perm_B_29_2, poseidon2_perm_B_29_3, poseidon2_perm_B_30_0, poseidon2_perm_B_30_1, poseidon2_perm_B_30_2, poseidon2_perm_B_30_3, poseidon2_perm_B_31_0, poseidon2_perm_B_31_1, poseidon2_perm_B_31_2, poseidon2_perm_B_31_3, poseidon2_perm_B_32_0, poseidon2_perm_B_32_1, poseidon2_perm_B_32_2, poseidon2_perm_B_32_3, poseidon2_perm_B_33_0, poseidon2_perm_B_33_1, poseidon2_perm_B_33_2, poseidon2_perm_B_33_3, poseidon2_perm_B_34_0, poseidon2_perm_B_34_1, poseidon2_perm_B_34_2, poseidon2_perm_B_34_3, poseidon2_perm_B_35_0, poseidon2_perm_B_35_1, poseidon2_perm_B_35_2, poseidon2_perm_B_35_3, poseidon2_perm_B_36_0, poseidon2_perm_B_36_1, poseidon2_perm_B_36_2, poseidon2_perm_B_36_3, poseidon2_perm_B_37_0, poseidon2_perm_B_37_1, poseidon2_perm_B_37_2, poseidon2_perm_B_37_3, poseidon2_perm_B_38_0, poseidon2_perm_B_38_1, poseidon2_perm_B_38_2, poseidon2_perm_B_38_3, poseidon2_perm_B_39_0, poseidon2_perm_B_39_1, poseidon2_perm_B_39_2, poseidon2_perm_B_39_3, poseidon2_perm_B_40_0, poseidon2_perm_B_40_1, poseidon2_perm_B_40_2, poseidon2_perm_B_40_3, poseidon2_perm_B_41_0, poseidon2_perm_B_41_1, poseidon2_perm_B_41_2, poseidon2_perm_B_41_3, poseidon2_perm_B_42_0, poseidon2_perm_B_42_1, poseidon2_perm_B_42_2, poseidon2_perm_B_42_3, poseidon2_perm_B_43_0, poseidon2_perm_B_43_1, poseidon2_perm_B_43_2, poseidon2_perm_B_43_3, poseidon2_perm_B_44_0, poseidon2_perm_B_44_1, poseidon2_perm_B_44_2, poseidon2_perm_B_44_3, poseidon2_perm_B_45_0, poseidon2_perm_B_45_1, poseidon2_perm_B_45_2, poseidon2_perm_B_45_3, poseidon2_perm_B_46_0, poseidon2_perm_B_46_1, poseidon2_perm_B_46_2, poseidon2_perm_B_46_3, poseidon2_perm_B_47_0, poseidon2_perm_B_47_1, poseidon2_perm_B_47_2, poseidon2_perm_B_47_3, poseidon2_perm_B_48_0, poseidon2_perm_B_48_1, poseidon2_perm_B_48_2, poseidon2_perm_B_48_3, poseidon2_perm_B_49_0, poseidon2_perm_B_49_1, poseidon2_perm_B_49_2, poseidon2_perm_B_49_3, poseidon2_perm_B_4_0, poseidon2_perm_B_4_1, poseidon2_perm_B_4_2, poseidon2_perm_B_4_3, poseidon2_perm_B_50_0, poseidon2_perm_B_50_1, poseidon2_perm_B_50_2, poseidon2_perm_B_50_3, poseidon2_perm_B_51_0, poseidon2_perm_B_51_1, poseidon2_perm_B_51_2, poseidon2_perm_B_51_3, poseidon2_perm_B_52_0, poseidon2_perm_B_52_1, poseidon2_perm_B_52_2, poseidon2_perm_B_52_3, poseidon2_perm_B_53_0, poseidon2_perm_B_53_1, poseidon2_perm_B_53_2, poseidon2_perm_B_53_3, poseidon2_perm_B_54_0, poseidon2_perm_B_54_1, poseidon2_perm_B_54_2, poseidon2_perm_B_54_3, poseidon2_perm_B_55_0, poseidon2_perm_B_55_1, poseidon2_perm_B_55_2, poseidon2_perm_B_55_3, poseidon2_perm_B_56_0, poseidon2_perm_B_56_1, poseidon2_perm_B_56_2, poseidon2_perm_B_56_3, poseidon2_perm_B_57_0, poseidon2_perm_B_57_1, poseidon2_perm_B_57_2, poseidon2_perm_B_57_3, poseidon2_perm_B_58_0, poseidon2_perm_B_58_1, poseidon2_perm_B_58_2, poseidon2_perm_B_58_3, poseidon2_perm_B_59_0, poseidon2_perm_B_59_1, poseidon2_perm_B_59_2, poseidon2_perm_B_59_3, poseidon2_perm_B_5_0, poseidon2_perm_B_5_1, poseidon2_perm_B_5_2, poseidon2_perm_B_5_3, poseidon2_perm_B_6_0, poseidon2_perm_B_6_1, poseidon2_perm_B_6_2, poseidon2_perm_B_6_3, poseidon2_perm_B_7_0, poseidon2_perm_B_7_1, poseidon2_perm_B_7_2, poseidon2_perm_B_7_3, poseidon2_perm_B_8_0, poseidon2_perm_B_8_1, poseidon2_perm_B_8_2, poseidon2_perm_B_8_3, poseidon2_perm_B_9_0, poseidon2_perm_B_9_1, poseidon2_perm_B_9_2, poseidon2_perm_B_9_3, poseidon2_perm_EXT_LAYER_4, poseidon2_perm_EXT_LAYER_5, poseidon2_perm_EXT_LAYER_6, poseidon2_perm_EXT_LAYER_7, poseidon2_perm_T_0_4, poseidon2_perm_T_0_5, poseidon2_perm_T_0_6, poseidon2_perm_T_0_7, poseidon2_perm_T_1_4, poseidon2_perm_T_1_5, poseidon2_perm_T_1_6, poseidon2_perm_T_1_7, poseidon2_perm_T_2_4, poseidon2_perm_T_2_5, poseidon2_perm_T_2_6, poseidon2_perm_T_2_7, poseidon2_perm_T_3_4, poseidon2_perm_T_3_5, poseidon2_perm_T_3_6, poseidon2_perm_T_3_7, poseidon2_perm_T_60_4, poseidon2_perm_T_60_5, poseidon2_perm_T_60_6, poseidon2_perm_T_60_7, poseidon2_perm_T_61_4, poseidon2_perm_T_61_5, poseidon2_perm_T_61_6, poseidon2_perm_T_61_7, poseidon2_perm_T_62_4, poseidon2_perm_T_62_5, poseidon2_perm_T_62_6, poseidon2_perm_T_62_7, poseidon2_perm_T_63_4, poseidon2_perm_T_63_5, poseidon2_perm_T_63_6, poseidon2_perm_T_63_7, poseidon2_perm_a_0, poseidon2_perm_a_1, poseidon2_perm_a_2, poseidon2_perm_a_3, poseidon2_perm_b_0, poseidon2_perm_b_1, poseidon2_perm_b_2, poseidon2_perm_b_3, poseidon2_perm_sel, range_check_dyn_diff, range_check_dyn_rng_chk_bits, range_check_dyn_rng_chk_pow_2, range_check_is_lte_u112, range_check_is_lte_u128, range_check_is_lte_u16, range_check_is_lte_u32, range_check_is_lte_u48, range_check_is_lte_u64, range_check_is_lte_u80, range_check_is_lte_u96, range_check_rng_chk_bits, range_check_sel, range_check_sel_r0_16_bit_rng_lookup, range_check_sel_r1_16_bit_rng_lookup, range_check_sel_r2_16_bit_rng_lookup, range_check_sel_r3_16_bit_rng_lookup, range_check_sel_r4_16_bit_rng_lookup, range_check_sel_r5_16_bit_rng_lookup, range_check_sel_r6_16_bit_rng_lookup, range_check_u16_r0, range_check_u16_r1, range_check_u16_r2, range_check_u16_r3, range_check_u16_r4, range_check_u16_r5, range_check_u16_r6, range_check_u16_r7, range_check_value, scalar_mul_bit, scalar_mul_bit_idx, scalar_mul_bit_radix, scalar_mul_end, scalar_mul_not_end, scalar_mul_point_inf, scalar_mul_point_x, scalar_mul_point_y, scalar_mul_res_inf, scalar_mul_res_x, scalar_mul_res_y, scalar_mul_scalar, scalar_mul_sel, scalar_mul_should_add, scalar_mul_start, scalar_mul_temp_inf, scalar_mul_temp_x, scalar_mul_temp_y, sha256_a, sha256_a_and_b, sha256_a_and_b_xor_a_and_c, sha256_a_and_c, sha256_a_rotr_13, sha256_a_rotr_2, sha256_a_rotr_22, sha256_a_rotr_2_xor_a_rotr_13, sha256_and_sel, sha256_b, sha256_b_and_c, sha256_c, sha256_ch, sha256_clk, sha256_computed_w_lhs, sha256_computed_w_rhs, sha256_d, sha256_e, sha256_e_and_f, sha256_e_rotr_11, sha256_e_rotr_25, sha256_e_rotr_6, sha256_e_rotr_6_xor_e_rotr_11, sha256_f, sha256_g, sha256_h, sha256_helper_w0, sha256_helper_w1, sha256_helper_w10, sha256_helper_w11, sha256_helper_w12, sha256_helper_w13, sha256_helper_w14, sha256_helper_w15, sha256_helper_w2, sha256_helper_w3, sha256_helper_w4, sha256_helper_w5, sha256_helper_w6, sha256_helper_w7, sha256_helper_w8, sha256_helper_w9, sha256_init_a, sha256_init_b, sha256_init_c, sha256_init_d, sha256_init_e, sha256_init_f, sha256_init_g, sha256_init_h, sha256_input_offset, sha256_is_input_round, sha256_latch, sha256_lhs_a_13, sha256_lhs_a_2, sha256_lhs_a_22, sha256_lhs_e_11, sha256_lhs_e_25, sha256_lhs_e_6, sha256_lhs_w_10, sha256_lhs_w_17, sha256_lhs_w_18, sha256_lhs_w_19, sha256_lhs_w_3, sha256_lhs_w_7, sha256_maj, sha256_next_a_lhs, sha256_next_a_rhs, sha256_next_e_lhs, sha256_next_e_rhs, sha256_not_e, sha256_not_e_and_g, sha256_output_a_lhs, sha256_output_a_rhs, sha256_output_b_lhs, sha256_output_b_rhs, sha256_output_c_lhs, sha256_output_c_rhs, sha256_output_d_lhs, sha256_output_d_rhs, sha256_output_e_lhs, sha256_output_e_rhs, sha256_output_f_lhs, sha256_output_f_rhs, sha256_output_g_lhs, sha256_output_g_rhs, sha256_output_h_lhs, sha256_output_h_rhs, sha256_output_offset, sha256_perform_round, sha256_rhs_a_13, sha256_rhs_a_2, sha256_rhs_a_22, sha256_rhs_e_11, sha256_rhs_e_25, sha256_rhs_e_6, sha256_rhs_w_10, sha256_rhs_w_17, sha256_rhs_w_18, sha256_rhs_w_19, sha256_rhs_w_3, sha256_rhs_w_7, sha256_round_constant, sha256_round_count, sha256_rounds_remaining, sha256_rounds_remaining_inv, sha256_s_0, sha256_s_1, sha256_sel, sha256_start, sha256_state_offset, sha256_w, sha256_w_15_rotr_18, sha256_w_15_rotr_7, sha256_w_15_rotr_7_xor_w_15_rotr_18, sha256_w_15_rshift_3, sha256_w_2_rotr_17, sha256_w_2_rotr_17_xor_w_2_rotr_19, sha256_w_2_rotr_19, sha256_w_2_rshift_10, sha256_w_s_0, sha256_w_s_1, sha256_xor_sel, lookup_bc_decomposition_bytes_are_bytes_counts, lookup_bc_decomposition_abs_diff_is_u16_counts, lookup_bc_decomposition_bytes_to_read_as_unary_counts, lookup_poseidon2_hash_poseidon2_perm_counts, lookup_bc_hashing_get_packed_field_counts, lookup_bc_hashing_iv_is_len_counts, lookup_bc_hashing_poseidon2_hash_counts, lookup_bc_retrieval_class_id_derivation_counts, lookup_bc_retrieval_bytecode_hash_is_correct_counts, lookup_instr_fetching_bytes_from_bc_dec_counts, lookup_instr_fetching_wire_instruction_info_counts, lookup_class_id_derivation_class_id_poseidon2_0_counts, lookup_class_id_derivation_class_id_poseidon2_1_counts, lookup_range_check_dyn_rng_chk_pow_2_counts, lookup_range_check_dyn_diff_is_u16_counts, lookup_range_check_r0_is_u16_counts, lookup_range_check_r1_is_u16_counts, lookup_range_check_r2_is_u16_counts, lookup_range_check_r3_is_u16_counts, lookup_range_check_r4_is_u16_counts, lookup_range_check_r5_is_u16_counts, lookup_range_check_r6_is_u16_counts, lookup_range_check_r7_is_u16_counts, lookup_bitwise_integral_tag_length_counts, lookup_bitwise_byte_operations_counts, lookup_sha256_round_constant_counts, lookup_scalar_mul_double_counts, lookup_scalar_mul_add_counts +#define AVM2_PRECOMPUTED_ENTITIES precomputed_as_unary, precomputed_bitwise_input_a, precomputed_bitwise_input_b, precomputed_bitwise_op_id, precomputed_bitwise_output, precomputed_clk, precomputed_exec_opcode, precomputed_first_row, precomputed_instr_size_in_bytes, precomputed_integral_tag_length, precomputed_power_of_2, precomputed_sel_bitwise, precomputed_sel_integral_tag, precomputed_sel_op_dc_0, precomputed_sel_op_dc_1, precomputed_sel_op_dc_10, precomputed_sel_op_dc_11, precomputed_sel_op_dc_12, precomputed_sel_op_dc_13, precomputed_sel_op_dc_14, precomputed_sel_op_dc_15, precomputed_sel_op_dc_16, precomputed_sel_op_dc_17, precomputed_sel_op_dc_2, precomputed_sel_op_dc_3, precomputed_sel_op_dc_4, precomputed_sel_op_dc_5, precomputed_sel_op_dc_6, precomputed_sel_op_dc_7, precomputed_sel_op_dc_8, precomputed_sel_op_dc_9, precomputed_sel_range_16, precomputed_sel_range_8, precomputed_sel_range_wire_opcode, precomputed_sel_sha256_compression, precomputed_sel_unary, precomputed_sha256_compression_round_constant, precomputed_zero +#define AVM2_WIRE_ENTITIES execution_input, alu_dst_addr, alu_ia, alu_ia_addr, alu_ib, alu_ib_addr, alu_ic, alu_op, alu_sel_op_add, bc_decomposition_abs_diff, bc_decomposition_bytes, bc_decomposition_bytes_pc_plus_1, bc_decomposition_bytes_pc_plus_10, bc_decomposition_bytes_pc_plus_11, bc_decomposition_bytes_pc_plus_12, bc_decomposition_bytes_pc_plus_13, bc_decomposition_bytes_pc_plus_14, bc_decomposition_bytes_pc_plus_15, bc_decomposition_bytes_pc_plus_16, bc_decomposition_bytes_pc_plus_17, bc_decomposition_bytes_pc_plus_18, bc_decomposition_bytes_pc_plus_19, bc_decomposition_bytes_pc_plus_2, bc_decomposition_bytes_pc_plus_20, bc_decomposition_bytes_pc_plus_21, bc_decomposition_bytes_pc_plus_22, bc_decomposition_bytes_pc_plus_23, bc_decomposition_bytes_pc_plus_24, bc_decomposition_bytes_pc_plus_25, bc_decomposition_bytes_pc_plus_26, bc_decomposition_bytes_pc_plus_27, bc_decomposition_bytes_pc_plus_28, bc_decomposition_bytes_pc_plus_29, bc_decomposition_bytes_pc_plus_3, bc_decomposition_bytes_pc_plus_30, bc_decomposition_bytes_pc_plus_31, bc_decomposition_bytes_pc_plus_32, bc_decomposition_bytes_pc_plus_33, bc_decomposition_bytes_pc_plus_34, bc_decomposition_bytes_pc_plus_35, bc_decomposition_bytes_pc_plus_36, bc_decomposition_bytes_pc_plus_4, bc_decomposition_bytes_pc_plus_5, bc_decomposition_bytes_pc_plus_6, bc_decomposition_bytes_pc_plus_7, bc_decomposition_bytes_pc_plus_8, bc_decomposition_bytes_pc_plus_9, bc_decomposition_bytes_rem_inv, bc_decomposition_bytes_rem_min_one_inv, bc_decomposition_bytes_remaining, bc_decomposition_bytes_to_read, bc_decomposition_bytes_to_read_unary, bc_decomposition_id, bc_decomposition_last_of_contract, bc_decomposition_packed_field, bc_decomposition_pc, bc_decomposition_sel, bc_decomposition_sel_overflow_correction_needed, bc_decomposition_sel_packed, bc_decomposition_sel_pc_plus_1, bc_decomposition_sel_pc_plus_10, bc_decomposition_sel_pc_plus_11, bc_decomposition_sel_pc_plus_12, bc_decomposition_sel_pc_plus_13, bc_decomposition_sel_pc_plus_14, bc_decomposition_sel_pc_plus_15, bc_decomposition_sel_pc_plus_16, bc_decomposition_sel_pc_plus_17, bc_decomposition_sel_pc_plus_18, bc_decomposition_sel_pc_plus_19, bc_decomposition_sel_pc_plus_2, bc_decomposition_sel_pc_plus_20, bc_decomposition_sel_pc_plus_21, bc_decomposition_sel_pc_plus_22, bc_decomposition_sel_pc_plus_23, bc_decomposition_sel_pc_plus_24, bc_decomposition_sel_pc_plus_25, bc_decomposition_sel_pc_plus_26, bc_decomposition_sel_pc_plus_27, bc_decomposition_sel_pc_plus_28, bc_decomposition_sel_pc_plus_29, bc_decomposition_sel_pc_plus_3, bc_decomposition_sel_pc_plus_30, bc_decomposition_sel_pc_plus_31, bc_decomposition_sel_pc_plus_32, bc_decomposition_sel_pc_plus_33, bc_decomposition_sel_pc_plus_34, bc_decomposition_sel_pc_plus_35, bc_decomposition_sel_pc_plus_36, bc_decomposition_sel_pc_plus_4, bc_decomposition_sel_pc_plus_5, bc_decomposition_sel_pc_plus_6, bc_decomposition_sel_pc_plus_7, bc_decomposition_sel_pc_plus_8, bc_decomposition_sel_pc_plus_9, bc_hashing_bytecode_id, bc_hashing_incremental_hash, bc_hashing_latch, bc_hashing_output_hash, bc_hashing_packed_field, bc_hashing_pc_index, bc_hashing_sel, bc_hashing_start, bc_retrieval_address, bc_retrieval_artifact_hash, bc_retrieval_bytecode_id, bc_retrieval_class_id, bc_retrieval_deployer_addr, bc_retrieval_err, bc_retrieval_incoming_viewing_key_x, bc_retrieval_incoming_viewing_key_y, bc_retrieval_init_hash, bc_retrieval_nullifier_key_x, bc_retrieval_nullifier_key_y, bc_retrieval_outgoing_viewing_key_x, bc_retrieval_outgoing_viewing_key_y, bc_retrieval_private_function_root, bc_retrieval_public_bytecode_commitment, bc_retrieval_salt, bc_retrieval_sel, bc_retrieval_siloed_address, bc_retrieval_tagging_key_x, bc_retrieval_tagging_key_y, bitwise_acc_ia, bitwise_acc_ib, bitwise_acc_ic, bitwise_ctr, bitwise_ctr_inv, bitwise_ctr_min_one_inv, bitwise_ia_byte, bitwise_ib_byte, bitwise_ic_byte, bitwise_last, bitwise_op_id, bitwise_sel, bitwise_start, bitwise_tag, class_id_derivation_artifact_hash, class_id_derivation_class_id, class_id_derivation_private_function_root, class_id_derivation_public_bytecode_commitment, class_id_derivation_sel, class_id_derivation_temp_constant_for_lookup, ecc_add_op, ecc_double_op, ecc_inv_2_p_y, ecc_inv_x_diff, ecc_inv_y_diff, ecc_lambda, ecc_p_is_inf, ecc_p_x, ecc_p_y, ecc_q_is_inf, ecc_q_x, ecc_q_y, ecc_r_is_inf, ecc_r_x, ecc_r_y, ecc_result_infinity, ecc_sel, ecc_x_match, ecc_y_match, execution_addressing_error_idx, execution_addressing_error_kind, execution_base_address_tag, execution_base_address_val, execution_bytecode_id, execution_clk, execution_ex_opcode, execution_indirect, execution_last, execution_op1, execution_op1_after_relative, execution_op2, execution_op2_after_relative, execution_op3, execution_op3_after_relative, execution_op4, execution_op4_after_relative, execution_pc, execution_rop1, execution_rop2, execution_rop3, execution_rop4, execution_sel, execution_sel_addressing_error, execution_sel_op1_is_address, execution_sel_op2_is_address, execution_sel_op3_is_address, execution_sel_op4_is_address, instr_fetching_bd0, instr_fetching_bd1, instr_fetching_bd10, instr_fetching_bd11, instr_fetching_bd12, instr_fetching_bd13, instr_fetching_bd14, instr_fetching_bd15, instr_fetching_bd16, instr_fetching_bd17, instr_fetching_bd18, instr_fetching_bd19, instr_fetching_bd2, instr_fetching_bd20, instr_fetching_bd21, instr_fetching_bd22, instr_fetching_bd23, instr_fetching_bd24, instr_fetching_bd25, instr_fetching_bd26, instr_fetching_bd27, instr_fetching_bd28, instr_fetching_bd29, instr_fetching_bd3, instr_fetching_bd30, instr_fetching_bd31, instr_fetching_bd32, instr_fetching_bd33, instr_fetching_bd34, instr_fetching_bd35, instr_fetching_bd36, instr_fetching_bd4, instr_fetching_bd5, instr_fetching_bd6, instr_fetching_bd7, instr_fetching_bd8, instr_fetching_bd9, instr_fetching_bytecode_id, instr_fetching_exec_opcode, instr_fetching_indirect, instr_fetching_instr_size_in_bytes, instr_fetching_op1, instr_fetching_op2, instr_fetching_op3, instr_fetching_op4, instr_fetching_op5, instr_fetching_op6, instr_fetching_op7, instr_fetching_pc, instr_fetching_sel, instr_fetching_sel_op_dc_0, instr_fetching_sel_op_dc_1, instr_fetching_sel_op_dc_10, instr_fetching_sel_op_dc_11, instr_fetching_sel_op_dc_12, instr_fetching_sel_op_dc_13, instr_fetching_sel_op_dc_14, instr_fetching_sel_op_dc_15, instr_fetching_sel_op_dc_16, instr_fetching_sel_op_dc_17, instr_fetching_sel_op_dc_2, instr_fetching_sel_op_dc_3, instr_fetching_sel_op_dc_4, instr_fetching_sel_op_dc_5, instr_fetching_sel_op_dc_6, instr_fetching_sel_op_dc_7, instr_fetching_sel_op_dc_8, instr_fetching_sel_op_dc_9, poseidon2_hash_a_0, poseidon2_hash_a_1, poseidon2_hash_a_2, poseidon2_hash_a_3, poseidon2_hash_b_0, poseidon2_hash_b_1, poseidon2_hash_b_2, poseidon2_hash_b_3, poseidon2_hash_end, poseidon2_hash_input_0, poseidon2_hash_input_1, poseidon2_hash_input_2, poseidon2_hash_input_len, poseidon2_hash_num_perm_rounds_rem, poseidon2_hash_num_perm_rounds_rem_inv, poseidon2_hash_output, poseidon2_hash_padding, poseidon2_hash_sel, poseidon2_hash_start, poseidon2_perm_B_10_0, poseidon2_perm_B_10_1, poseidon2_perm_B_10_2, poseidon2_perm_B_10_3, poseidon2_perm_B_11_0, poseidon2_perm_B_11_1, poseidon2_perm_B_11_2, poseidon2_perm_B_11_3, poseidon2_perm_B_12_0, poseidon2_perm_B_12_1, poseidon2_perm_B_12_2, poseidon2_perm_B_12_3, poseidon2_perm_B_13_0, poseidon2_perm_B_13_1, poseidon2_perm_B_13_2, poseidon2_perm_B_13_3, poseidon2_perm_B_14_0, poseidon2_perm_B_14_1, poseidon2_perm_B_14_2, poseidon2_perm_B_14_3, poseidon2_perm_B_15_0, poseidon2_perm_B_15_1, poseidon2_perm_B_15_2, poseidon2_perm_B_15_3, poseidon2_perm_B_16_0, poseidon2_perm_B_16_1, poseidon2_perm_B_16_2, poseidon2_perm_B_16_3, poseidon2_perm_B_17_0, poseidon2_perm_B_17_1, poseidon2_perm_B_17_2, poseidon2_perm_B_17_3, poseidon2_perm_B_18_0, poseidon2_perm_B_18_1, poseidon2_perm_B_18_2, poseidon2_perm_B_18_3, poseidon2_perm_B_19_0, poseidon2_perm_B_19_1, poseidon2_perm_B_19_2, poseidon2_perm_B_19_3, poseidon2_perm_B_20_0, poseidon2_perm_B_20_1, poseidon2_perm_B_20_2, poseidon2_perm_B_20_3, poseidon2_perm_B_21_0, poseidon2_perm_B_21_1, poseidon2_perm_B_21_2, poseidon2_perm_B_21_3, poseidon2_perm_B_22_0, poseidon2_perm_B_22_1, poseidon2_perm_B_22_2, poseidon2_perm_B_22_3, poseidon2_perm_B_23_0, poseidon2_perm_B_23_1, poseidon2_perm_B_23_2, poseidon2_perm_B_23_3, poseidon2_perm_B_24_0, poseidon2_perm_B_24_1, poseidon2_perm_B_24_2, poseidon2_perm_B_24_3, poseidon2_perm_B_25_0, poseidon2_perm_B_25_1, poseidon2_perm_B_25_2, poseidon2_perm_B_25_3, poseidon2_perm_B_26_0, poseidon2_perm_B_26_1, poseidon2_perm_B_26_2, poseidon2_perm_B_26_3, poseidon2_perm_B_27_0, poseidon2_perm_B_27_1, poseidon2_perm_B_27_2, poseidon2_perm_B_27_3, poseidon2_perm_B_28_0, poseidon2_perm_B_28_1, poseidon2_perm_B_28_2, poseidon2_perm_B_28_3, poseidon2_perm_B_29_0, poseidon2_perm_B_29_1, poseidon2_perm_B_29_2, poseidon2_perm_B_29_3, poseidon2_perm_B_30_0, poseidon2_perm_B_30_1, poseidon2_perm_B_30_2, poseidon2_perm_B_30_3, poseidon2_perm_B_31_0, poseidon2_perm_B_31_1, poseidon2_perm_B_31_2, poseidon2_perm_B_31_3, poseidon2_perm_B_32_0, poseidon2_perm_B_32_1, poseidon2_perm_B_32_2, poseidon2_perm_B_32_3, poseidon2_perm_B_33_0, poseidon2_perm_B_33_1, poseidon2_perm_B_33_2, poseidon2_perm_B_33_3, poseidon2_perm_B_34_0, poseidon2_perm_B_34_1, poseidon2_perm_B_34_2, poseidon2_perm_B_34_3, poseidon2_perm_B_35_0, poseidon2_perm_B_35_1, poseidon2_perm_B_35_2, poseidon2_perm_B_35_3, poseidon2_perm_B_36_0, poseidon2_perm_B_36_1, poseidon2_perm_B_36_2, poseidon2_perm_B_36_3, poseidon2_perm_B_37_0, poseidon2_perm_B_37_1, poseidon2_perm_B_37_2, poseidon2_perm_B_37_3, poseidon2_perm_B_38_0, poseidon2_perm_B_38_1, poseidon2_perm_B_38_2, poseidon2_perm_B_38_3, poseidon2_perm_B_39_0, poseidon2_perm_B_39_1, poseidon2_perm_B_39_2, poseidon2_perm_B_39_3, poseidon2_perm_B_40_0, poseidon2_perm_B_40_1, poseidon2_perm_B_40_2, poseidon2_perm_B_40_3, poseidon2_perm_B_41_0, poseidon2_perm_B_41_1, poseidon2_perm_B_41_2, poseidon2_perm_B_41_3, poseidon2_perm_B_42_0, poseidon2_perm_B_42_1, poseidon2_perm_B_42_2, poseidon2_perm_B_42_3, poseidon2_perm_B_43_0, poseidon2_perm_B_43_1, poseidon2_perm_B_43_2, poseidon2_perm_B_43_3, poseidon2_perm_B_44_0, poseidon2_perm_B_44_1, poseidon2_perm_B_44_2, poseidon2_perm_B_44_3, poseidon2_perm_B_45_0, poseidon2_perm_B_45_1, poseidon2_perm_B_45_2, poseidon2_perm_B_45_3, poseidon2_perm_B_46_0, poseidon2_perm_B_46_1, poseidon2_perm_B_46_2, poseidon2_perm_B_46_3, poseidon2_perm_B_47_0, poseidon2_perm_B_47_1, poseidon2_perm_B_47_2, poseidon2_perm_B_47_3, poseidon2_perm_B_48_0, poseidon2_perm_B_48_1, poseidon2_perm_B_48_2, poseidon2_perm_B_48_3, poseidon2_perm_B_49_0, poseidon2_perm_B_49_1, poseidon2_perm_B_49_2, poseidon2_perm_B_49_3, poseidon2_perm_B_4_0, poseidon2_perm_B_4_1, poseidon2_perm_B_4_2, poseidon2_perm_B_4_3, poseidon2_perm_B_50_0, poseidon2_perm_B_50_1, poseidon2_perm_B_50_2, poseidon2_perm_B_50_3, poseidon2_perm_B_51_0, poseidon2_perm_B_51_1, poseidon2_perm_B_51_2, poseidon2_perm_B_51_3, poseidon2_perm_B_52_0, poseidon2_perm_B_52_1, poseidon2_perm_B_52_2, poseidon2_perm_B_52_3, poseidon2_perm_B_53_0, poseidon2_perm_B_53_1, poseidon2_perm_B_53_2, poseidon2_perm_B_53_3, poseidon2_perm_B_54_0, poseidon2_perm_B_54_1, poseidon2_perm_B_54_2, poseidon2_perm_B_54_3, poseidon2_perm_B_55_0, poseidon2_perm_B_55_1, poseidon2_perm_B_55_2, poseidon2_perm_B_55_3, poseidon2_perm_B_56_0, poseidon2_perm_B_56_1, poseidon2_perm_B_56_2, poseidon2_perm_B_56_3, poseidon2_perm_B_57_0, poseidon2_perm_B_57_1, poseidon2_perm_B_57_2, poseidon2_perm_B_57_3, poseidon2_perm_B_58_0, poseidon2_perm_B_58_1, poseidon2_perm_B_58_2, poseidon2_perm_B_58_3, poseidon2_perm_B_59_0, poseidon2_perm_B_59_1, poseidon2_perm_B_59_2, poseidon2_perm_B_59_3, poseidon2_perm_B_5_0, poseidon2_perm_B_5_1, poseidon2_perm_B_5_2, poseidon2_perm_B_5_3, poseidon2_perm_B_6_0, poseidon2_perm_B_6_1, poseidon2_perm_B_6_2, poseidon2_perm_B_6_3, poseidon2_perm_B_7_0, poseidon2_perm_B_7_1, poseidon2_perm_B_7_2, poseidon2_perm_B_7_3, poseidon2_perm_B_8_0, poseidon2_perm_B_8_1, poseidon2_perm_B_8_2, poseidon2_perm_B_8_3, poseidon2_perm_B_9_0, poseidon2_perm_B_9_1, poseidon2_perm_B_9_2, poseidon2_perm_B_9_3, poseidon2_perm_EXT_LAYER_4, poseidon2_perm_EXT_LAYER_5, poseidon2_perm_EXT_LAYER_6, poseidon2_perm_EXT_LAYER_7, poseidon2_perm_T_0_4, poseidon2_perm_T_0_5, poseidon2_perm_T_0_6, poseidon2_perm_T_0_7, poseidon2_perm_T_1_4, poseidon2_perm_T_1_5, poseidon2_perm_T_1_6, poseidon2_perm_T_1_7, poseidon2_perm_T_2_4, poseidon2_perm_T_2_5, poseidon2_perm_T_2_6, poseidon2_perm_T_2_7, poseidon2_perm_T_3_4, poseidon2_perm_T_3_5, poseidon2_perm_T_3_6, poseidon2_perm_T_3_7, poseidon2_perm_T_60_4, poseidon2_perm_T_60_5, poseidon2_perm_T_60_6, poseidon2_perm_T_60_7, poseidon2_perm_T_61_4, poseidon2_perm_T_61_5, poseidon2_perm_T_61_6, poseidon2_perm_T_61_7, poseidon2_perm_T_62_4, poseidon2_perm_T_62_5, poseidon2_perm_T_62_6, poseidon2_perm_T_62_7, poseidon2_perm_T_63_4, poseidon2_perm_T_63_5, poseidon2_perm_T_63_6, poseidon2_perm_T_63_7, poseidon2_perm_a_0, poseidon2_perm_a_1, poseidon2_perm_a_2, poseidon2_perm_a_3, poseidon2_perm_b_0, poseidon2_perm_b_1, poseidon2_perm_b_2, poseidon2_perm_b_3, poseidon2_perm_sel, range_check_dyn_diff, range_check_dyn_rng_chk_bits, range_check_dyn_rng_chk_pow_2, range_check_is_lte_u112, range_check_is_lte_u128, range_check_is_lte_u16, range_check_is_lte_u32, range_check_is_lte_u48, range_check_is_lte_u64, range_check_is_lte_u80, range_check_is_lte_u96, range_check_rng_chk_bits, range_check_sel, range_check_sel_r0_16_bit_rng_lookup, range_check_sel_r1_16_bit_rng_lookup, range_check_sel_r2_16_bit_rng_lookup, range_check_sel_r3_16_bit_rng_lookup, range_check_sel_r4_16_bit_rng_lookup, range_check_sel_r5_16_bit_rng_lookup, range_check_sel_r6_16_bit_rng_lookup, range_check_u16_r0, range_check_u16_r1, range_check_u16_r2, range_check_u16_r3, range_check_u16_r4, range_check_u16_r5, range_check_u16_r6, range_check_u16_r7, range_check_value, scalar_mul_bit, scalar_mul_bit_idx, scalar_mul_bit_radix, scalar_mul_end, scalar_mul_not_end, scalar_mul_point_inf, scalar_mul_point_x, scalar_mul_point_y, scalar_mul_res_inf, scalar_mul_res_x, scalar_mul_res_y, scalar_mul_scalar, scalar_mul_sel, scalar_mul_should_add, scalar_mul_start, scalar_mul_temp_inf, scalar_mul_temp_x, scalar_mul_temp_y, sha256_a, sha256_a_and_b, sha256_a_and_b_xor_a_and_c, sha256_a_and_c, sha256_a_rotr_13, sha256_a_rotr_2, sha256_a_rotr_22, sha256_a_rotr_2_xor_a_rotr_13, sha256_and_sel, sha256_b, sha256_b_and_c, sha256_c, sha256_ch, sha256_clk, sha256_computed_w_lhs, sha256_computed_w_rhs, sha256_d, sha256_e, sha256_e_and_f, sha256_e_rotr_11, sha256_e_rotr_25, sha256_e_rotr_6, sha256_e_rotr_6_xor_e_rotr_11, sha256_f, sha256_g, sha256_h, sha256_helper_w0, sha256_helper_w1, sha256_helper_w10, sha256_helper_w11, sha256_helper_w12, sha256_helper_w13, sha256_helper_w14, sha256_helper_w15, sha256_helper_w2, sha256_helper_w3, sha256_helper_w4, sha256_helper_w5, sha256_helper_w6, sha256_helper_w7, sha256_helper_w8, sha256_helper_w9, sha256_init_a, sha256_init_b, sha256_init_c, sha256_init_d, sha256_init_e, sha256_init_f, sha256_init_g, sha256_init_h, sha256_input_offset, sha256_is_input_round, sha256_latch, sha256_lhs_a_13, sha256_lhs_a_2, sha256_lhs_a_22, sha256_lhs_e_11, sha256_lhs_e_25, sha256_lhs_e_6, sha256_lhs_w_10, sha256_lhs_w_17, sha256_lhs_w_18, sha256_lhs_w_19, sha256_lhs_w_3, sha256_lhs_w_7, sha256_maj, sha256_next_a_lhs, sha256_next_a_rhs, sha256_next_e_lhs, sha256_next_e_rhs, sha256_not_e, sha256_not_e_and_g, sha256_output_a_lhs, sha256_output_a_rhs, sha256_output_b_lhs, sha256_output_b_rhs, sha256_output_c_lhs, sha256_output_c_rhs, sha256_output_d_lhs, sha256_output_d_rhs, sha256_output_e_lhs, sha256_output_e_rhs, sha256_output_f_lhs, sha256_output_f_rhs, sha256_output_g_lhs, sha256_output_g_rhs, sha256_output_h_lhs, sha256_output_h_rhs, sha256_output_offset, sha256_perform_round, sha256_rhs_a_13, sha256_rhs_a_2, sha256_rhs_a_22, sha256_rhs_e_11, sha256_rhs_e_25, sha256_rhs_e_6, sha256_rhs_w_10, sha256_rhs_w_17, sha256_rhs_w_18, sha256_rhs_w_19, sha256_rhs_w_3, sha256_rhs_w_7, sha256_round_constant, sha256_round_count, sha256_rounds_remaining, sha256_rounds_remaining_inv, sha256_s_0, sha256_s_1, sha256_sel, sha256_start, sha256_state_offset, sha256_w, sha256_w_15_rotr_18, sha256_w_15_rotr_7, sha256_w_15_rotr_7_xor_w_15_rotr_18, sha256_w_15_rshift_3, sha256_w_2_rotr_17, sha256_w_2_rotr_17_xor_w_2_rotr_19, sha256_w_2_rotr_19, sha256_w_2_rshift_10, sha256_w_s_0, sha256_w_s_1, sha256_xor_sel, lookup_bc_decomposition_bytes_are_bytes_counts, lookup_bc_decomposition_abs_diff_is_u16_counts, lookup_bc_decomposition_bytes_to_read_as_unary_counts, lookup_poseidon2_hash_poseidon2_perm_counts, lookup_bc_hashing_get_packed_field_counts, lookup_bc_hashing_iv_is_len_counts, lookup_bc_hashing_poseidon2_hash_counts, lookup_bc_retrieval_class_id_derivation_counts, lookup_bc_retrieval_bytecode_hash_is_correct_counts, lookup_instr_fetching_bytes_from_bc_dec_counts, lookup_instr_fetching_wire_instruction_info_counts, lookup_class_id_derivation_class_id_poseidon2_0_counts, lookup_class_id_derivation_class_id_poseidon2_1_counts, lookup_range_check_dyn_rng_chk_pow_2_counts, lookup_range_check_dyn_diff_is_u16_counts, lookup_range_check_r0_is_u16_counts, lookup_range_check_r1_is_u16_counts, lookup_range_check_r2_is_u16_counts, lookup_range_check_r3_is_u16_counts, lookup_range_check_r4_is_u16_counts, lookup_range_check_r5_is_u16_counts, lookup_range_check_r6_is_u16_counts, lookup_range_check_r7_is_u16_counts, lookup_bitwise_integral_tag_length_counts, lookup_bitwise_byte_operations_counts, lookup_sha256_round_constant_counts, lookup_scalar_mul_double_counts, lookup_scalar_mul_add_counts #define AVM2_DERIVED_WITNESS_ENTITIES lookup_bc_decomposition_bytes_are_bytes_inv, lookup_bc_decomposition_abs_diff_is_u16_inv, lookup_bc_decomposition_bytes_to_read_as_unary_inv, lookup_poseidon2_hash_poseidon2_perm_inv, lookup_bc_hashing_get_packed_field_inv, lookup_bc_hashing_iv_is_len_inv, lookup_bc_hashing_poseidon2_hash_inv, lookup_bc_retrieval_class_id_derivation_inv, lookup_bc_retrieval_bytecode_hash_is_correct_inv, lookup_instr_fetching_bytes_from_bc_dec_inv, lookup_instr_fetching_wire_instruction_info_inv, lookup_class_id_derivation_class_id_poseidon2_0_inv, lookup_class_id_derivation_class_id_poseidon2_1_inv, lookup_range_check_dyn_rng_chk_pow_2_inv, lookup_range_check_dyn_diff_is_u16_inv, lookup_range_check_r0_is_u16_inv, lookup_range_check_r1_is_u16_inv, lookup_range_check_r2_is_u16_inv, lookup_range_check_r3_is_u16_inv, lookup_range_check_r4_is_u16_inv, lookup_range_check_r5_is_u16_inv, lookup_range_check_r6_is_u16_inv, lookup_range_check_r7_is_u16_inv, lookup_bitwise_integral_tag_length_inv, lookup_bitwise_byte_operations_inv, lookup_sha256_round_constant_inv, lookup_scalar_mul_double_inv, lookup_scalar_mul_add_inv #define AVM2_SHIFTED_ENTITIES bc_decomposition_bytes_shift, bc_decomposition_bytes_pc_plus_1_shift, bc_decomposition_bytes_pc_plus_10_shift, bc_decomposition_bytes_pc_plus_11_shift, bc_decomposition_bytes_pc_plus_12_shift, bc_decomposition_bytes_pc_plus_13_shift, bc_decomposition_bytes_pc_plus_14_shift, bc_decomposition_bytes_pc_plus_15_shift, bc_decomposition_bytes_pc_plus_16_shift, bc_decomposition_bytes_pc_plus_17_shift, bc_decomposition_bytes_pc_plus_18_shift, bc_decomposition_bytes_pc_plus_19_shift, bc_decomposition_bytes_pc_plus_2_shift, bc_decomposition_bytes_pc_plus_20_shift, bc_decomposition_bytes_pc_plus_21_shift, bc_decomposition_bytes_pc_plus_22_shift, bc_decomposition_bytes_pc_plus_23_shift, bc_decomposition_bytes_pc_plus_24_shift, bc_decomposition_bytes_pc_plus_25_shift, bc_decomposition_bytes_pc_plus_26_shift, bc_decomposition_bytes_pc_plus_27_shift, bc_decomposition_bytes_pc_plus_28_shift, bc_decomposition_bytes_pc_plus_29_shift, bc_decomposition_bytes_pc_plus_3_shift, bc_decomposition_bytes_pc_plus_30_shift, bc_decomposition_bytes_pc_plus_31_shift, bc_decomposition_bytes_pc_plus_32_shift, bc_decomposition_bytes_pc_plus_33_shift, bc_decomposition_bytes_pc_plus_34_shift, bc_decomposition_bytes_pc_plus_35_shift, bc_decomposition_bytes_pc_plus_4_shift, bc_decomposition_bytes_pc_plus_5_shift, bc_decomposition_bytes_pc_plus_6_shift, bc_decomposition_bytes_pc_plus_7_shift, bc_decomposition_bytes_pc_plus_8_shift, bc_decomposition_bytes_pc_plus_9_shift, bc_decomposition_bytes_remaining_shift, bc_decomposition_id_shift, bc_decomposition_pc_shift, bc_decomposition_sel_shift, bc_hashing_bytecode_id_shift, bc_hashing_incremental_hash_shift, bc_hashing_pc_index_shift, bc_hashing_sel_shift, bc_hashing_start_shift, bitwise_acc_ia_shift, bitwise_acc_ib_shift, bitwise_acc_ic_shift, bitwise_ctr_shift, bitwise_op_id_shift, execution_sel_shift, poseidon2_hash_a_0_shift, poseidon2_hash_a_1_shift, poseidon2_hash_a_2_shift, poseidon2_hash_a_3_shift, poseidon2_hash_input_0_shift, poseidon2_hash_input_1_shift, poseidon2_hash_input_2_shift, poseidon2_hash_num_perm_rounds_rem_shift, poseidon2_hash_output_shift, poseidon2_hash_sel_shift, poseidon2_hash_start_shift, scalar_mul_bit_idx_shift, scalar_mul_point_inf_shift, scalar_mul_point_x_shift, scalar_mul_point_y_shift, scalar_mul_res_inf_shift, scalar_mul_res_x_shift, scalar_mul_res_y_shift, scalar_mul_scalar_shift, scalar_mul_sel_shift, scalar_mul_start_shift, scalar_mul_temp_inf_shift, scalar_mul_temp_x_shift, scalar_mul_temp_y_shift, sha256_a_shift, sha256_b_shift, sha256_c_shift, sha256_d_shift, sha256_e_shift, sha256_f_shift, sha256_g_shift, sha256_h_shift, sha256_helper_w0_shift, sha256_helper_w1_shift, sha256_helper_w10_shift, sha256_helper_w11_shift, sha256_helper_w12_shift, sha256_helper_w13_shift, sha256_helper_w14_shift, sha256_helper_w15_shift, sha256_helper_w2_shift, sha256_helper_w3_shift, sha256_helper_w4_shift, sha256_helper_w5_shift, sha256_helper_w6_shift, sha256_helper_w7_shift, sha256_helper_w8_shift, sha256_helper_w9_shift, sha256_rounds_remaining_shift, sha256_sel_shift, sha256_start_shift #define AVM2_TO_BE_SHIFTED(e) e.bc_decomposition_bytes, e.bc_decomposition_bytes_pc_plus_1, e.bc_decomposition_bytes_pc_plus_10, e.bc_decomposition_bytes_pc_plus_11, e.bc_decomposition_bytes_pc_plus_12, e.bc_decomposition_bytes_pc_plus_13, e.bc_decomposition_bytes_pc_plus_14, e.bc_decomposition_bytes_pc_plus_15, e.bc_decomposition_bytes_pc_plus_16, e.bc_decomposition_bytes_pc_plus_17, e.bc_decomposition_bytes_pc_plus_18, e.bc_decomposition_bytes_pc_plus_19, e.bc_decomposition_bytes_pc_plus_2, e.bc_decomposition_bytes_pc_plus_20, e.bc_decomposition_bytes_pc_plus_21, e.bc_decomposition_bytes_pc_plus_22, e.bc_decomposition_bytes_pc_plus_23, e.bc_decomposition_bytes_pc_plus_24, e.bc_decomposition_bytes_pc_plus_25, e.bc_decomposition_bytes_pc_plus_26, e.bc_decomposition_bytes_pc_plus_27, e.bc_decomposition_bytes_pc_plus_28, e.bc_decomposition_bytes_pc_plus_29, e.bc_decomposition_bytes_pc_plus_3, e.bc_decomposition_bytes_pc_plus_30, e.bc_decomposition_bytes_pc_plus_31, e.bc_decomposition_bytes_pc_plus_32, e.bc_decomposition_bytes_pc_plus_33, e.bc_decomposition_bytes_pc_plus_34, e.bc_decomposition_bytes_pc_plus_35, e.bc_decomposition_bytes_pc_plus_4, e.bc_decomposition_bytes_pc_plus_5, e.bc_decomposition_bytes_pc_plus_6, e.bc_decomposition_bytes_pc_plus_7, e.bc_decomposition_bytes_pc_plus_8, e.bc_decomposition_bytes_pc_plus_9, e.bc_decomposition_bytes_remaining, e.bc_decomposition_id, e.bc_decomposition_pc, e.bc_decomposition_sel, e.bc_hashing_bytecode_id, e.bc_hashing_incremental_hash, e.bc_hashing_pc_index, e.bc_hashing_sel, e.bc_hashing_start, e.bitwise_acc_ia, e.bitwise_acc_ib, e.bitwise_acc_ic, e.bitwise_ctr, e.bitwise_op_id, e.execution_sel, e.poseidon2_hash_a_0, e.poseidon2_hash_a_1, e.poseidon2_hash_a_2, e.poseidon2_hash_a_3, e.poseidon2_hash_input_0, e.poseidon2_hash_input_1, e.poseidon2_hash_input_2, e.poseidon2_hash_num_perm_rounds_rem, e.poseidon2_hash_output, e.poseidon2_hash_sel, e.poseidon2_hash_start, e.scalar_mul_bit_idx, e.scalar_mul_point_inf, e.scalar_mul_point_x, e.scalar_mul_point_y, e.scalar_mul_res_inf, e.scalar_mul_res_x, e.scalar_mul_res_y, e.scalar_mul_scalar, e.scalar_mul_sel, e.scalar_mul_start, e.scalar_mul_temp_inf, e.scalar_mul_temp_x, e.scalar_mul_temp_y, e.sha256_a, e.sha256_b, e.sha256_c, e.sha256_d, e.sha256_e, e.sha256_f, e.sha256_g, e.sha256_h, e.sha256_helper_w0, e.sha256_helper_w1, e.sha256_helper_w10, e.sha256_helper_w11, e.sha256_helper_w12, e.sha256_helper_w13, e.sha256_helper_w14, e.sha256_helper_w15, e.sha256_helper_w2, e.sha256_helper_w3, e.sha256_helper_w4, e.sha256_helper_w5, e.sha256_helper_w6, e.sha256_helper_w7, e.sha256_helper_w8, e.sha256_helper_w9, e.sha256_rounds_remaining, e.sha256_sel, e.sha256_start @@ -31,8 +31,8 @@ enum class ColumnAndShifts { SENTINEL_DO_NOT_USE, }; -constexpr auto NUM_COLUMNS_WITH_SHIFTS = 910; -constexpr auto NUM_COLUMNS_WITHOUT_SHIFTS = 808; +constexpr auto NUM_COLUMNS_WITH_SHIFTS = 912; +constexpr auto NUM_COLUMNS_WITHOUT_SHIFTS = 810; constexpr auto TO_BE_SHIFTED_COLUMNS_ARRAY = []() { return std::array{ AVM2_TO_BE_SHIFTED_COLUMNS }; }(); constexpr auto SHIFTED_COLUMNS_ARRAY = []() { return std::array{ AVM2_SHIFTED_COLUMNS }; }(); static_assert(TO_BE_SHIFTED_COLUMNS_ARRAY.size() == SHIFTED_COLUMNS_ARRAY.size()); diff --git a/barretenberg/cpp/src/barretenberg/vm2/generated/flavor.hpp b/barretenberg/cpp/src/barretenberg/vm2/generated/flavor.hpp index 2a4d19ee0e6..e0f417be8c6 100644 --- a/barretenberg/cpp/src/barretenberg/vm2/generated/flavor.hpp +++ b/barretenberg/cpp/src/barretenberg/vm2/generated/flavor.hpp @@ -85,13 +85,13 @@ class AvmFlavor { // This flavor would not be used with ZK Sumcheck static constexpr bool HasZK = false; - static constexpr size_t NUM_PRECOMPUTED_ENTITIES = 37; - static constexpr size_t NUM_WITNESS_ENTITIES = 771; + static constexpr size_t NUM_PRECOMPUTED_ENTITIES = 38; + static constexpr size_t NUM_WITNESS_ENTITIES = 772; static constexpr size_t NUM_SHIFTED_ENTITIES = 102; static constexpr size_t NUM_WIRES = NUM_WITNESS_ENTITIES + NUM_PRECOMPUTED_ENTITIES; // We have two copies of the witness entities, so we subtract the number of fixed ones (they have no shift), one for // the unshifted and one for the shifted - static constexpr size_t NUM_ALL_ENTITIES = 910; + static constexpr size_t NUM_ALL_ENTITIES = 912; // Need to be templated for recursive verifier template @@ -399,6 +399,7 @@ class AvmFlavor { this->precomputed_clk = verification_key->precomputed_clk; this->precomputed_exec_opcode = verification_key->precomputed_exec_opcode; this->precomputed_first_row = verification_key->precomputed_first_row; + this->precomputed_instr_size_in_bytes = verification_key->precomputed_instr_size_in_bytes; this->precomputed_integral_tag_length = verification_key->precomputed_integral_tag_length; this->precomputed_power_of_2 = verification_key->precomputed_power_of_2; this->precomputed_sel_bitwise = verification_key->precomputed_sel_bitwise; diff --git a/barretenberg/cpp/src/barretenberg/vm2/generated/relations/instr_fetching.hpp b/barretenberg/cpp/src/barretenberg/vm2/generated/relations/instr_fetching.hpp index 3145c75d675..c43da9b1526 100644 --- a/barretenberg/cpp/src/barretenberg/vm2/generated/relations/instr_fetching.hpp +++ b/barretenberg/cpp/src/barretenberg/vm2/generated/relations/instr_fetching.hpp @@ -187,9 +187,36 @@ template class instr_fetching : public Relation SRC_COLUMNS = { - ColumnAndShifts::instr_fetching_bd0, ColumnAndShifts::instr_fetching_exec_opcode, - ColumnAndShifts::instr_fetching_sel_op_dc_0, ColumnAndShifts::instr_fetching_sel_op_dc_1, - ColumnAndShifts::instr_fetching_sel_op_dc_2, ColumnAndShifts::instr_fetching_sel_op_dc_3, - ColumnAndShifts::instr_fetching_sel_op_dc_4, ColumnAndShifts::instr_fetching_sel_op_dc_5, - ColumnAndShifts::instr_fetching_sel_op_dc_6, ColumnAndShifts::instr_fetching_sel_op_dc_7, - ColumnAndShifts::instr_fetching_sel_op_dc_8, ColumnAndShifts::instr_fetching_sel_op_dc_9, - ColumnAndShifts::instr_fetching_sel_op_dc_10, ColumnAndShifts::instr_fetching_sel_op_dc_11, - ColumnAndShifts::instr_fetching_sel_op_dc_12, ColumnAndShifts::instr_fetching_sel_op_dc_13, - ColumnAndShifts::instr_fetching_sel_op_dc_14, ColumnAndShifts::instr_fetching_sel_op_dc_15, - ColumnAndShifts::instr_fetching_sel_op_dc_16, ColumnAndShifts::instr_fetching_sel_op_dc_17 + ColumnAndShifts::instr_fetching_bd0, + ColumnAndShifts::instr_fetching_exec_opcode, + ColumnAndShifts::instr_fetching_instr_size_in_bytes, + ColumnAndShifts::instr_fetching_sel_op_dc_0, + ColumnAndShifts::instr_fetching_sel_op_dc_1, + ColumnAndShifts::instr_fetching_sel_op_dc_2, + ColumnAndShifts::instr_fetching_sel_op_dc_3, + ColumnAndShifts::instr_fetching_sel_op_dc_4, + ColumnAndShifts::instr_fetching_sel_op_dc_5, + ColumnAndShifts::instr_fetching_sel_op_dc_6, + ColumnAndShifts::instr_fetching_sel_op_dc_7, + ColumnAndShifts::instr_fetching_sel_op_dc_8, + ColumnAndShifts::instr_fetching_sel_op_dc_9, + ColumnAndShifts::instr_fetching_sel_op_dc_10, + ColumnAndShifts::instr_fetching_sel_op_dc_11, + ColumnAndShifts::instr_fetching_sel_op_dc_12, + ColumnAndShifts::instr_fetching_sel_op_dc_13, + ColumnAndShifts::instr_fetching_sel_op_dc_14, + ColumnAndShifts::instr_fetching_sel_op_dc_15, + ColumnAndShifts::instr_fetching_sel_op_dc_16, + ColumnAndShifts::instr_fetching_sel_op_dc_17 }; static constexpr std::array DST_COLUMNS = { - ColumnAndShifts::precomputed_clk, ColumnAndShifts::precomputed_exec_opcode, - ColumnAndShifts::precomputed_sel_op_dc_0, ColumnAndShifts::precomputed_sel_op_dc_1, - ColumnAndShifts::precomputed_sel_op_dc_2, ColumnAndShifts::precomputed_sel_op_dc_3, - ColumnAndShifts::precomputed_sel_op_dc_4, ColumnAndShifts::precomputed_sel_op_dc_5, - ColumnAndShifts::precomputed_sel_op_dc_6, ColumnAndShifts::precomputed_sel_op_dc_7, - ColumnAndShifts::precomputed_sel_op_dc_8, ColumnAndShifts::precomputed_sel_op_dc_9, - ColumnAndShifts::precomputed_sel_op_dc_10, ColumnAndShifts::precomputed_sel_op_dc_11, - ColumnAndShifts::precomputed_sel_op_dc_12, ColumnAndShifts::precomputed_sel_op_dc_13, - ColumnAndShifts::precomputed_sel_op_dc_14, ColumnAndShifts::precomputed_sel_op_dc_15, - ColumnAndShifts::precomputed_sel_op_dc_16, ColumnAndShifts::precomputed_sel_op_dc_17 + ColumnAndShifts::precomputed_clk, + ColumnAndShifts::precomputed_exec_opcode, + ColumnAndShifts::precomputed_instr_size_in_bytes, + ColumnAndShifts::precomputed_sel_op_dc_0, + ColumnAndShifts::precomputed_sel_op_dc_1, + ColumnAndShifts::precomputed_sel_op_dc_2, + ColumnAndShifts::precomputed_sel_op_dc_3, + ColumnAndShifts::precomputed_sel_op_dc_4, + ColumnAndShifts::precomputed_sel_op_dc_5, + ColumnAndShifts::precomputed_sel_op_dc_6, + ColumnAndShifts::precomputed_sel_op_dc_7, + ColumnAndShifts::precomputed_sel_op_dc_8, + ColumnAndShifts::precomputed_sel_op_dc_9, + ColumnAndShifts::precomputed_sel_op_dc_10, + ColumnAndShifts::precomputed_sel_op_dc_11, + ColumnAndShifts::precomputed_sel_op_dc_12, + ColumnAndShifts::precomputed_sel_op_dc_13, + ColumnAndShifts::precomputed_sel_op_dc_14, + ColumnAndShifts::precomputed_sel_op_dc_15, + ColumnAndShifts::precomputed_sel_op_dc_16, + ColumnAndShifts::precomputed_sel_op_dc_17 }; template static inline auto inverse_polynomial_is_computed_at_row(const AllEntities& in) @@ -308,6 +330,7 @@ class lookup_instr_fetching_wire_instruction_info_settings { in._precomputed_sel_range_wire_opcode(), in._instr_fetching_bd0(), in._instr_fetching_exec_opcode(), + in._instr_fetching_instr_size_in_bytes(), in._instr_fetching_sel_op_dc_0(), in._instr_fetching_sel_op_dc_1(), in._instr_fetching_sel_op_dc_2(), @@ -328,6 +351,7 @@ class lookup_instr_fetching_wire_instruction_info_settings { in._instr_fetching_sel_op_dc_17(), in._precomputed_clk(), in._precomputed_exec_opcode(), + in._precomputed_instr_size_in_bytes(), in._precomputed_sel_op_dc_0(), in._precomputed_sel_op_dc_1(), in._precomputed_sel_op_dc_2(), diff --git a/barretenberg/cpp/src/barretenberg/vm2/simulation/bytecode_manager.cpp b/barretenberg/cpp/src/barretenberg/vm2/simulation/bytecode_manager.cpp index b7eb3713ff1..ecfb70ee31d 100644 --- a/barretenberg/cpp/src/barretenberg/vm2/simulation/bytecode_manager.cpp +++ b/barretenberg/cpp/src/barretenberg/vm2/simulation/bytecode_manager.cpp @@ -57,7 +57,7 @@ Instruction TxBytecodeManager::read_instruction(BytecodeId bytecode_id, uint32_t auto bytecode_ptr = it->second; const auto& bytecode = *bytecode_ptr; // TODO: catch errors etc. - Instruction instruction = decode_instruction(bytecode, pc); + Instruction instruction = deserialize_instruction(bytecode, pc); // The event will be deduplicated internally. fetching_events.emit( diff --git a/barretenberg/cpp/src/barretenberg/vm2/simulation/execution.cpp b/barretenberg/cpp/src/barretenberg/vm2/simulation/execution.cpp index 9574ed6a319..0e314b11c98 100644 --- a/barretenberg/cpp/src/barretenberg/vm2/simulation/execution.cpp +++ b/barretenberg/cpp/src/barretenberg/vm2/simulation/execution.cpp @@ -106,7 +106,7 @@ void Execution::execution_loop() try { auto pc = context.get_pc(); Instruction instruction = context.get_bytecode_manager().read_instruction(pc); - context.set_next_pc(pc + instruction.size_in_bytes); + context.set_next_pc(pc + WIRE_INSTRUCTION_SPEC.at(instruction.opcode).size_in_bytes); info("@", pc, " ", instruction.to_string()); ExecutionOpCode opcode = instruction_info_db.map_wire_opcode_to_execution_opcode(instruction.opcode); diff --git a/barretenberg/cpp/src/barretenberg/vm2/simulation/lib/serialization.cpp b/barretenberg/cpp/src/barretenberg/vm2/simulation/lib/serialization.cpp index 2b930d7c47e..106f0a363cd 100644 --- a/barretenberg/cpp/src/barretenberg/vm2/simulation/lib/serialization.cpp +++ b/barretenberg/cpp/src/barretenberg/vm2/simulation/lib/serialization.cpp @@ -12,6 +12,7 @@ #include "barretenberg/common/serialize.hpp" #include "barretenberg/numeric/uint256/uint256.hpp" +#include "barretenberg/vm2/common/instruction_spec.hpp" #include "barretenberg/vm2/common/opcodes.hpp" namespace bb::avm2::simulation { @@ -224,6 +225,27 @@ Operand& Operand::operator=(const Operand& other) return *this; } +bool Operand::operator==(const Operand& other) const +{ + if (this == &other) { + return true; + } + + if (value.index() != other.value.index()) { + return false; + } + + if (std::holds_alternative(value)) { + return *std::get(value) == *std::get(other.value); + } + + if (std::holds_alternative(value)) { + return *std::get(value) == *std::get(other.value); + } + + return value == other.value; +} + Operand::operator bool() const { return (this->operator uint8_t() == 1); @@ -330,10 +352,9 @@ std::string Operand::to_string() const __builtin_unreachable(); } -Instruction decode_instruction(std::span bytecode, size_t pos) +Instruction deserialize_instruction(std::span bytecode, size_t pos) { const auto bytecode_length = bytecode.size(); - const auto starting_pos = pos; assert(pos < bytecode_length); (void)bytecode_length; // Avoid GCC unused parameter warning when asserts are disabled. @@ -451,10 +472,11 @@ Instruction decode_instruction(std::span bytecode, size_t pos) pos += operand_size; } - return { .opcode = opcode, - .indirect = indirect, - .operands = std::move(operands), - .size_in_bytes = static_cast(pos - starting_pos) }; + return { + .opcode = opcode, + .indirect = indirect, + .operands = std::move(operands), + }; }; std::string Instruction::to_string() const @@ -464,8 +486,60 @@ std::string Instruction::to_string() const for (const auto& operand : operands) { oss << operand.to_string() << " "; } - oss << "], size: " << static_cast(size_in_bytes); + oss << "]"; return oss.str(); } +std::vector Instruction::serialize() const +{ + std::vector output; + output.reserve(WIRE_INSTRUCTION_SPEC.at(opcode).size_in_bytes); + output.emplace_back(static_cast(opcode)); + size_t operand_pos = 0; + + for (const auto& operand_type : WireOpCode_WIRE_FORMAT.at(opcode)) { + switch (operand_type) { + case OperandType::INDIRECT8: + output.emplace_back(static_cast(indirect)); + break; + case OperandType::INDIRECT16: { + const auto indirect_vec = to_buffer(indirect); + output.insert(output.end(), + std::make_move_iterator(indirect_vec.begin()), + std::make_move_iterator(indirect_vec.end())); + } break; + case OperandType::TAG: + case OperandType::UINT8: + output.emplace_back(static_cast(operands.at(operand_pos++))); + break; + case OperandType::UINT16: { + const auto operand_vec = to_buffer(static_cast(operands.at(operand_pos++))); + output.insert( + output.end(), std::make_move_iterator(operand_vec.begin()), std::make_move_iterator(operand_vec.end())); + } break; + case OperandType::UINT32: { + const auto operand_vec = to_buffer(static_cast(operands.at(operand_pos++))); + output.insert( + output.end(), std::make_move_iterator(operand_vec.begin()), std::make_move_iterator(operand_vec.end())); + } break; + case OperandType::UINT64: { + const auto operand_vec = to_buffer(static_cast(operands.at(operand_pos++))); + output.insert( + output.end(), std::make_move_iterator(operand_vec.begin()), std::make_move_iterator(operand_vec.end())); + } break; + case OperandType::UINT128: { + const auto operand_vec = to_buffer(static_cast(operands.at(operand_pos++))); + output.insert( + output.end(), std::make_move_iterator(operand_vec.begin()), std::make_move_iterator(operand_vec.end())); + } break; + case OperandType::FF: { + const auto operand_vec = to_buffer(static_cast(operands.at(operand_pos++))); + output.insert( + output.end(), std::make_move_iterator(operand_vec.begin()), std::make_move_iterator(operand_vec.end())); + } break; + } + } + return output; +} + } // namespace bb::avm2::simulation diff --git a/barretenberg/cpp/src/barretenberg/vm2/simulation/lib/serialization.hpp b/barretenberg/cpp/src/barretenberg/vm2/simulation/lib/serialization.hpp index 8b8a6a3aa64..010f3218cbb 100644 --- a/barretenberg/cpp/src/barretenberg/vm2/simulation/lib/serialization.hpp +++ b/barretenberg/cpp/src/barretenberg/vm2/simulation/lib/serialization.hpp @@ -42,6 +42,7 @@ class Operand { Operand(const Operand& other); Operand(Operand&&) = default; Operand& operator=(const Operand& other); + bool operator==(const Operand& other) const; // Helpers for when we want to pass a value without casting. static Operand u8(uint8_t value) { return { value }; } @@ -68,9 +69,12 @@ struct Instruction { WireOpCode opcode; uint16_t indirect; std::vector operands; - uint8_t size_in_bytes; std::string to_string() const; + // Serialize the instruction according to the specification from OPCODE_WIRE_FORMAT. + std::vector serialize() const; + + bool operator==(const Instruction& other) const = default; }; /** @@ -83,6 +87,6 @@ struct Instruction { * @throws runtime_error exception when the bytecode is invalid or pos is out-of-range * @return The instruction */ -Instruction decode_instruction(std::span bytecode, size_t pos); +Instruction deserialize_instruction(std::span bytecode, size_t pos); } // namespace bb::avm2::simulation diff --git a/barretenberg/cpp/src/barretenberg/vm2/simulation/lib/serialization.test.cpp b/barretenberg/cpp/src/barretenberg/vm2/simulation/lib/serialization.test.cpp new file mode 100644 index 00000000000..35ef430de2c --- /dev/null +++ b/barretenberg/cpp/src/barretenberg/vm2/simulation/lib/serialization.test.cpp @@ -0,0 +1,85 @@ +#include +#include + +#include "barretenberg/vm2/simulation/lib/serialization.hpp" + +namespace bb::avm2 { +namespace { +using simulation::deserialize_instruction; +using simulation::Instruction; +using simulation::Operand; + +// Testing serialization with some u8 variants +TEST(SerializationTest, Not8RoundTrip) +{ + const Instruction instr = { .opcode = WireOpCode::NOT_8, + .indirect = 5, + .operands = { Operand::u8(123), Operand::u8(45) } }; + const auto decoded = deserialize_instruction(instr.serialize(), 0); + EXPECT_EQ(instr, decoded); +} + +// Testing serialization with some u16 variants +TEST(SerializationTest, Add16RoundTrip) +{ + const Instruction instr = { .opcode = WireOpCode::ADD_16, + .indirect = 3, + .operands = { Operand::u16(1000), Operand::u16(1001), Operand::u16(1002) } }; + const auto decoded = deserialize_instruction(instr.serialize(), 0); + EXPECT_EQ(instr, decoded); +} + +// Testing serialization with a u32 variant +TEST(SerializationTest, Jumpi32RoundTrip) +{ + const Instruction instr = { .opcode = WireOpCode::JUMPI_32, + .indirect = 7, + .operands = { Operand::u16(12345), Operand::u32(678901234) } }; + const auto decoded = deserialize_instruction(instr.serialize(), 0); + EXPECT_EQ(instr, decoded); +} + +// Testing serialization with a u64 variant +TEST(SerializationTest, Set64RoundTrip) +{ + const uint64_t value_64 = 0xABCDEF0123456789LLU; + + const Instruction instr = { + .opcode = WireOpCode::SET_64, + .indirect = 2, + .operands = { Operand::u16(1002), Operand::u8(static_cast(MemoryTag::U64)), Operand::u64(value_64) } + }; + const auto decoded = deserialize_instruction(instr.serialize(), 0); + EXPECT_EQ(instr, decoded); +} + +// Testing serialization with a u128 variant +TEST(SerializationTest, Set128RoundTrip) +{ + const uint128_t value_128 = (uint128_t{ 0x123456789ABCDEF0LLU } << 64) + uint128_t{ 0xABCDEF0123456789LLU }; + + const Instruction instr = { + .opcode = WireOpCode::SET_128, + .indirect = 2, + .operands = { Operand::u16(1002), Operand::u8(static_cast(MemoryTag::U128)), Operand::u128(value_128) } + }; + const auto decoded = deserialize_instruction(instr.serialize(), 0); + EXPECT_EQ(instr, decoded); +} + +// Testing serialization with ff variant +TEST(SerializationTest, SetFFRoundTrip) +{ + const FF large_ff = FF::modulus - 981723; + + const Instruction instr = { + .opcode = WireOpCode::SET_FF, + .indirect = 2, + .operands = { Operand::u16(1002), Operand::u8(static_cast(MemoryTag::FF)), Operand::ff(large_ff) } + }; + const auto decoded = deserialize_instruction(instr.serialize(), 0); + EXPECT_EQ(instr, decoded); +} + +} // namespace +} // namespace bb::avm2 diff --git a/barretenberg/cpp/src/barretenberg/vm2/testing/fixtures.cpp b/barretenberg/cpp/src/barretenberg/vm2/testing/fixtures.cpp index 8a772d3d5c6..48c703f0fdf 100644 --- a/barretenberg/cpp/src/barretenberg/vm2/testing/fixtures.cpp +++ b/barretenberg/cpp/src/barretenberg/vm2/testing/fixtures.cpp @@ -1,12 +1,20 @@ #include "barretenberg/vm2/testing/fixtures.hpp" -#include "barretenberg/vm2/tracegen/test_trace_container.hpp" #include +#include "barretenberg/vm2/common/instruction_spec.hpp" + using bb::avm2::tracegen::TestTraceContainer; namespace bb::avm2::testing { +using simulation::Instruction; +using simulation::Operand; +using simulation::OperandType; + +// If MemoryTag enum changes, this value might need to be adjusted. +constexpr uint8_t NUM_MEMORY_TAGS = static_cast(MemoryTag::U128) + 1; + std::vector random_fields(size_t n) { std::vector fields; @@ -27,6 +35,79 @@ std::vector random_bytes(size_t n) return bytes; } +Operand random_operand(OperandType operand_type) +{ + const auto rand_bytes = random_bytes(simulation::testonly::get_operand_type_sizes().at(operand_type)); + const uint8_t* pos_ptr = &rand_bytes.at(0); + + switch (operand_type) { + case OperandType::INDIRECT8: // Irrelevant bits might be toggled but they are ignored during address resolution. + case OperandType::UINT8: { + uint8_t operand_u8 = 0; + serialize::read(pos_ptr, operand_u8); + return Operand::u8(operand_u8); + } + case OperandType::TAG: { + uint8_t operand_u8 = 0; + serialize::read(pos_ptr, operand_u8); + return Operand::u8(operand_u8 % NUM_MEMORY_TAGS); // Insecure bias but it is fine for testing purposes. + } + case OperandType::INDIRECT16: // Irrelevant bits might be toggled but they are ignored during address resolution. + case OperandType::UINT16: { + uint16_t operand_u16 = 0; + serialize::read(pos_ptr, operand_u16); + return Operand::u16(operand_u16); + } + case OperandType::UINT32: { + uint32_t operand_u32 = 0; + serialize::read(pos_ptr, operand_u32); + return Operand::u32(operand_u32); + } + case OperandType::UINT64: { + uint64_t operand_u64 = 0; + serialize::read(pos_ptr, operand_u64); + return Operand::u64(operand_u64); + } + case OperandType::UINT128: { + uint128_t operand_u128 = 0; + serialize::read(pos_ptr, operand_u128); + return Operand::u128(operand_u128); + } + case OperandType::FF: + return Operand::ff(FF::random_element()); + } + + // Need this for gcc compilation even though we fully handle the switch cases. + // We never reach this point. + __builtin_unreachable(); +} + +Instruction random_instruction(WireOpCode w_opcode) +{ + const auto format = simulation::testonly::get_instruction_wire_formats().at(w_opcode); + std::vector operands; + uint16_t indirect = 0; + operands.reserve(format.size()); // Might be a bit larger (due to indirect) + + for (const auto& operand_type : format) { + switch (operand_type) { + case OperandType::INDIRECT8: + case OperandType::INDIRECT16: + indirect = static_cast(random_operand(operand_type)); + break; + default: + operands.emplace_back(random_operand(operand_type)); + break; + } + } + + return Instruction{ + .opcode = w_opcode, + .indirect = indirect, + .operands = std::move(operands), + }; +} + TestTraceContainer empty_trace() { return TestTraceContainer::from_rows({ { .precomputed_first_row = 1 }, { .precomputed_clk = 1 } }); diff --git a/barretenberg/cpp/src/barretenberg/vm2/testing/fixtures.hpp b/barretenberg/cpp/src/barretenberg/vm2/testing/fixtures.hpp index 8ca3f2a45b9..e43cd1bb553 100644 --- a/barretenberg/cpp/src/barretenberg/vm2/testing/fixtures.hpp +++ b/barretenberg/cpp/src/barretenberg/vm2/testing/fixtures.hpp @@ -3,12 +3,34 @@ #include #include "barretenberg/vm2/common/field.hpp" +#include "barretenberg/vm2/simulation/lib/serialization.hpp" #include "barretenberg/vm2/tracegen/test_trace_container.hpp" namespace bb::avm2::testing { std::vector random_fields(size_t n); + +// WARNING: Cryptographically insecure randomness routines for testing purposes only. std::vector random_bytes(size_t n); +simulation::Operand random_operand(simulation::OperandType operand_type); + +// This generates a random instruction for a given wire opcode. The output will conform to +// the wire format specified in WireOpCode_WIRE_FORMAT. The format specifies a vector of +// OperandType and we generate a random value for each operand conforming to the OperandType. +// For OperandTypes: +// INDIRECT8, UINT8: single random byte +// INDIRECT16, UINT16: 2 random bytes +// UINTXX: random bytes conforming to the size in bytes of the operand +// FF: random field +// TAG: random tag within the enum MemoryTag range +// We do not provide any guarantee beyond the above static format restrictions. +// For instance, next pc destination in JUMP might overflow the bytecode, etc, .... +// Also, immediate operands which correspond to an enum value might fall outside +// the prescribed range (for instance: GETENVVAR_16 and GETCONTRACTINSTANCE). +// Note that indirect value might have toggled bits which are not relevant to the +// wire opcode. It is in principle not an issue as these bits are ignored during +// address resolution. +simulation::Instruction random_instruction(WireOpCode w_opcode); tracegen::TestTraceContainer empty_trace(); } // namespace bb::avm2::testing diff --git a/barretenberg/cpp/src/barretenberg/vm2/tracegen/bytecode_trace.cpp b/barretenberg/cpp/src/barretenberg/vm2/tracegen/bytecode_trace.cpp index 0278d8239ff..5e7b445516d 100644 --- a/barretenberg/cpp/src/barretenberg/vm2/tracegen/bytecode_trace.cpp +++ b/barretenberg/cpp/src/barretenberg/vm2/tracegen/bytecode_trace.cpp @@ -271,14 +271,8 @@ void BytecodeTraceBuilder::process_instruction_fetching( { C::instr_fetching_op5, get_operand(4) }, { C::instr_fetching_op6, get_operand(5) }, { C::instr_fetching_op7, get_operand(6) }, - // From instruction table. - // FIXME: This one is wrong, it's the wire opcode. - // { C::instr_fetching_ex_opcode, event.instruction.opcode }, - // TODO: add the rest. // Single bytes. { C::instr_fetching_bd0, wire_opcode }, - { C::instr_fetching_exec_opcode, - static_cast(WIRE_INSTRUCTION_SPEC.at(w_opcode).exec_opcode) }, { C::instr_fetching_bd1, bytecode_at(event.pc + 1) }, { C::instr_fetching_bd2, bytecode_at(event.pc + 2) }, { C::instr_fetching_bd3, bytecode_at(event.pc + 3) }, @@ -316,6 +310,10 @@ void BytecodeTraceBuilder::process_instruction_fetching( { C::instr_fetching_bd35, bytecode_at(event.pc + 35) }, { C::instr_fetching_bd36, bytecode_at(event.pc + 36) }, + // From instruction table. + { C::instr_fetching_exec_opcode, + static_cast(WIRE_INSTRUCTION_SPEC.at(w_opcode).exec_opcode) }, + { C::instr_fetching_instr_size_in_bytes, WIRE_INSTRUCTION_SPEC.at(w_opcode).size_in_bytes }, // Fill operand decomposition selectors { C::instr_fetching_sel_op_dc_0, WIRE_INSTRUCTION_SPEC.at(w_opcode).op_dc_selectors.at(0) }, { C::instr_fetching_sel_op_dc_1, WIRE_INSTRUCTION_SPEC.at(w_opcode).op_dc_selectors.at(1) }, diff --git a/barretenberg/cpp/src/barretenberg/vm2/tracegen/bytecode_trace.test.cpp b/barretenberg/cpp/src/barretenberg/vm2/tracegen/bytecode_trace.test.cpp index 56182433a99..c60eed8bd88 100644 --- a/barretenberg/cpp/src/barretenberg/vm2/tracegen/bytecode_trace.test.cpp +++ b/barretenberg/cpp/src/barretenberg/vm2/tracegen/bytecode_trace.test.cpp @@ -8,8 +8,10 @@ #include #include +#include "barretenberg/vm2/common/instruction_spec.hpp" #include "barretenberg/vm2/generated/flavor_settings.hpp" #include "barretenberg/vm2/generated/full_row.hpp" +#include "barretenberg/vm2/testing/fixtures.hpp" #include "barretenberg/vm2/testing/macros.hpp" #include "barretenberg/vm2/tracegen/bytecode_trace.hpp" #include "barretenberg/vm2/tracegen/test_trace_container.hpp" @@ -17,12 +19,12 @@ namespace bb::avm2::tracegen { namespace { -using testing::Field; +using ::testing::Field; using R = TestTraceContainer::Row; using FF = R::FF; -TEST(BytecodeTraceGenTest, basicShortLength) +TEST(BytecodeTraceGenTest, BasicShortLength) { TestTraceContainer trace; BytecodeTraceBuilder builder; @@ -100,7 +102,7 @@ TEST(BytecodeTraceGenTest, basicShortLength) ROW_FIELD_EQ(R, bc_decomposition_last_of_contract, 1))); } -TEST(BytecodeTraceGenTest, basicLongerThanWindowSize) +TEST(BytecodeTraceGenTest, BasicLongerThanWindowSize) { TestTraceContainer trace; BytecodeTraceBuilder builder; @@ -181,7 +183,7 @@ TEST(BytecodeTraceGenTest, basicLongerThanWindowSize) ROW_FIELD_EQ(R, bc_decomposition_last_of_contract, 1))); } -TEST(BytecodeTraceGenTest, multipleEvents) +TEST(BytecodeTraceGenTest, MultipleEvents) { TestTraceContainer trace; BytecodeTraceBuilder builder; @@ -247,7 +249,7 @@ TEST(BytecodeTraceGenTest, multipleEvents) } } -TEST(BytecodeTraceGenTest, basicHashing) +TEST(BytecodeTraceGenTest, BasicHashing) { TestTraceContainer trace; BytecodeTraceBuilder builder; @@ -283,5 +285,103 @@ TEST(BytecodeTraceGenTest, basicHashing) ROW_FIELD_EQ(R, bc_hashing_packed_field, 20))); } +// We build a random InstructionFetchingEvent for each wire opcode. +// We then verify that the bytes (bd0, bd1, ...) correspond to the serialized instruction. +TEST(BytecodeTraceGenTest, InstrDecompositionInBytesEachOpcode) +{ + using simulation::Instruction; + using simulation::InstructionFetchingEvent; + using C = Column; + + TestTraceContainer trace; + BytecodeTraceBuilder builder; + + constexpr std::array bd_columns = { + C::instr_fetching_bd0, C::instr_fetching_bd1, C::instr_fetching_bd2, C::instr_fetching_bd3, + C::instr_fetching_bd4, C::instr_fetching_bd5, C::instr_fetching_bd6, C::instr_fetching_bd7, + C::instr_fetching_bd8, C::instr_fetching_bd9, C::instr_fetching_bd10, C::instr_fetching_bd11, + C::instr_fetching_bd12, C::instr_fetching_bd13, C::instr_fetching_bd14, C::instr_fetching_bd15, + C::instr_fetching_bd16, C::instr_fetching_bd17, C::instr_fetching_bd18, C::instr_fetching_bd19, + C::instr_fetching_bd20, C::instr_fetching_bd21, C::instr_fetching_bd22, C::instr_fetching_bd23, + C::instr_fetching_bd24, C::instr_fetching_bd25, C::instr_fetching_bd26, C::instr_fetching_bd27, + C::instr_fetching_bd28, C::instr_fetching_bd29, C::instr_fetching_bd30, C::instr_fetching_bd31, + C::instr_fetching_bd32, C::instr_fetching_bd33, C::instr_fetching_bd34, C::instr_fetching_bd35, + C::instr_fetching_bd36, + }; + + constexpr std::array operand_columns = { + C::instr_fetching_op1, C::instr_fetching_op2, C::instr_fetching_op3, C::instr_fetching_op4, + C::instr_fetching_op5, C::instr_fetching_op6, C::instr_fetching_op7, + }; + + constexpr auto num_opcodes = static_cast(WireOpCode::LAST_OPCODE_SENTINEL); + + std::vector events; + events.reserve(num_opcodes); + std::vector instructions; + instructions.reserve(num_opcodes); + std::vector pcs; + pcs.reserve(num_opcodes); + std::vector bytecode; + bytecode.reserve(1024); // Rough estimate + + uint32_t pc = 0; + for (size_t i = 0; i < num_opcodes; i++) { + const auto w_opcode = static_cast(i); + const auto instr = testing::random_instruction(w_opcode); + const auto instr_encoded = instr.serialize(); + instructions.emplace_back(instr); + pcs.emplace_back(pc); + pc += instr_encoded.size(); + bytecode.insert(bytecode.end(), + std::make_move_iterator(instr_encoded.begin()), + std::make_move_iterator(instr_encoded.end())); + } + + auto bytecode_ptr = std::make_shared>(bytecode); + for (size_t i = 0; i < num_opcodes; i++) { + events.emplace_back(InstructionFetchingEvent{ + .bytecode_id = 1, + .pc = pcs.at(i), + .instruction = instructions.at(i), + .bytecode = bytecode_ptr, + }); + } + + builder.process_instruction_fetching(events, trace); + + for (uint32_t i = 0; i < num_opcodes; i++) { + const auto instr = instructions.at(i); + const auto instr_encoded = instr.serialize(); + const auto w_opcode = static_cast(i); + + // Check size_in_bytes column + const auto expected_size_in_bytes = WIRE_INSTRUCTION_SPEC.at(w_opcode).size_in_bytes; + ASSERT_EQ(instr_encoded.size(), expected_size_in_bytes); + EXPECT_EQ(FF(expected_size_in_bytes), trace.get(C::instr_fetching_instr_size_in_bytes, i)); + + // Inspect each byte + for (size_t j = 0; j < static_cast(expected_size_in_bytes); j++) { + EXPECT_EQ(FF(instr_encoded.at(j)), trace.get(bd_columns.at(j), i)); + } + + // Check exection opcode + EXPECT_EQ(FF(static_cast(WIRE_INSTRUCTION_SPEC.at(w_opcode).exec_opcode)), + trace.get(C::instr_fetching_exec_opcode, i)); + + // Check indirect + EXPECT_EQ(FF(instr.indirect), trace.get(C::instr_fetching_indirect, i)); + + // Check PCs + EXPECT_EQ(FF(pcs.at(i)), trace.get(C::instr_fetching_pc, i)); + + // Check operands + size_t operand_idx = 0; + for (const auto& operand : instr.operands) { + EXPECT_EQ(FF(operand), trace.get(operand_columns.at(operand_idx++), i)); + } + } +} + } // namespace } // namespace bb::avm2::tracegen diff --git a/barretenberg/cpp/src/barretenberg/vm2/tracegen/precomputed_trace.cpp b/barretenberg/cpp/src/barretenberg/vm2/tracegen/precomputed_trace.cpp index 1d3aa84b6c6..a1dab47e389 100644 --- a/barretenberg/cpp/src/barretenberg/vm2/tracegen/precomputed_trace.cpp +++ b/barretenberg/cpp/src/barretenberg/vm2/tracegen/precomputed_trace.cpp @@ -206,6 +206,9 @@ void PrecomputedTraceBuilder::process_wire_instruction_spec(TraceContainer& trac trace.set(C::precomputed_exec_opcode, static_cast(wire_opcode), static_cast(wire_instruction_spec.exec_opcode)); + trace.set(C::precomputed_instr_size_in_bytes, + static_cast(wire_opcode), + wire_instruction_spec.size_in_bytes); } } diff --git a/barretenberg/cpp/src/barretenberg/vm2/tracegen_helper.cpp b/barretenberg/cpp/src/barretenberg/vm2/tracegen_helper.cpp index f32947cb32a..9701ca7e768 100644 --- a/barretenberg/cpp/src/barretenberg/vm2/tracegen_helper.cpp +++ b/barretenberg/cpp/src/barretenberg/vm2/tracegen_helper.cpp @@ -246,7 +246,7 @@ TraceContainer AvmTraceGenHelper::generate_trace(EventsContainer&& events) std::make_unique>(), std::make_unique>(), std::make_unique>(), - std::make_unique>(), + std::make_unique>(), std::make_unique>(), std::make_unique>(), std::make_unique>(), diff --git a/boxes/bin.js b/boxes/bin.js index 858c1142fbb..df9557a8c59 100755 --- a/boxes/bin.js +++ b/boxes/bin.js @@ -13,7 +13,7 @@ const getLatestStable = async () => { const { data } = await axios.get( `https://api.github.com/repos/AztecProtocol/aztec-packages/releases` ); - return data[0].tag_name.split("-v")[1]; + return data[0].tag_name.replace(/^v/, ""); }; program @@ -74,10 +74,10 @@ program global.latestStable = await getLatestStable(); global.version = version || global.latestStable; - // if the user has set a semver version (matches the regex), fetch that tag (i.e. aztec-packages-v0.23.0) + // if the user has set a semver version (matches the regex), fetch that tag (i.e. v0.23.0) // otherwise use the version as the tag global.tag = global.version.match(/^\d+\.\d+\.\d+$/) - ? `aztec-packages-v${global.version}` + ? `v${global.version}` : global.version; global.debug(`Version: ${global.version}`); diff --git a/boxes/bin.test.js b/boxes/bin.test.js index 23b5720c17d..09d717ef8d2 100644 --- a/boxes/bin.test.js +++ b/boxes/bin.test.js @@ -19,7 +19,7 @@ const getLatestStable = async () => { const version = await getLatestStable(); const tag = version.match(/^\d+\.\d+\.\d+$/) - ? `aztec-packages-v${version}` + ? `v${version}` : version; describe("Token contract", () => { diff --git a/boxes/boxes/vite/package.json b/boxes/boxes/vite/package.json index f8ed2fa832f..5b401630da3 100644 --- a/boxes/boxes/vite/package.json +++ b/boxes/boxes/vite/package.json @@ -18,12 +18,7 @@ "dependencies": { "@aztec/accounts": "latest", "@aztec/aztec.js": "latest", - "@aztec/bb-prover": "latest", - "@aztec/key-store": "latest", - "@aztec/kv-store": "latest", - "@aztec/protocol-contracts": "latest", "@aztec/pxe": "latest", - "@aztec/simulator": "latest", "@aztec/stdlib": "latest", "react": "^18.3.1", "react-dom": "^18.3.1", diff --git a/boxes/boxes/vite/src/config.ts b/boxes/boxes/vite/src/config.ts index 992a6e7c4c6..5cdefcbb889 100644 --- a/boxes/boxes/vite/src/config.ts +++ b/boxes/boxes/vite/src/config.ts @@ -3,17 +3,13 @@ import { getSchnorrAccount } from "@aztec/accounts/schnorr/lazy"; import { AccountWalletWithSecretKey, createAztecNodeClient, - createLogger, } from "@aztec/aztec.js"; -import { BBWASMLazyPrivateKernelProver } from "@aztec/bb-prover/wasm/lazy"; -import { KeyStore } from "@aztec/key-store"; -import { createStore } from "@aztec/kv-store/indexeddb"; -import { L2TipsStore } from "@aztec/kv-store/stores"; -import { PXEServiceConfig, getPXEServiceConfig } from "@aztec/pxe/config"; -import { KVPxeDatabase } from "@aztec/pxe/database"; -import { PXEService } from "@aztec/pxe/service"; -import { WASMSimulator } from "@aztec/simulator/client"; -import { LazyProtocolContractsProvider } from "@aztec/protocol-contracts/providers/lazy"; +import { + PXEServiceConfig, + getPXEServiceConfig, + PXEService, + createPXEService, +} from "@aztec/pxe/client/lazy"; export class PrivateEnv { pxe: PXEService; @@ -24,44 +20,15 @@ export class PrivateEnv { async init() { const nodeURL = process.env.AZTEC_NODE_URL ?? "http://localhost:8080"; + const aztecNode = await createAztecNodeClient(nodeURL); const config = getPXEServiceConfig(); config.dataDirectory = "pxe"; - const aztecNode = await createAztecNodeClient(nodeURL); - const simulationProvider = new WASMSimulator(); - const proofCreator = new BBWASMLazyPrivateKernelProver( - simulationProvider, - 16, - ); const l1Contracts = await aztecNode.getL1ContractAddresses(); const configWithContracts = { ...config, l1Contracts, } as PXEServiceConfig; - - const store = await createStore( - "pxe_data", - configWithContracts, - createLogger("pxe:data:idb"), - ); - - const keyStore = new KeyStore(store); - - const db = await KVPxeDatabase.create(store); - const tips = new L2TipsStore(store, "pxe"); - - const protocolContractsProvider = new LazyProtocolContractsProvider(); - - this.pxe = new PXEService( - keyStore, - aztecNode, - db, - tips, - proofCreator, - simulationProvider, - protocolContractsProvider, - config, - ); - await this.pxe.init(); + this.pxe = await createPXEService(aztecNode, configWithContracts); const [accountData] = await getInitialTestAccounts(); const account = await getSchnorrAccount( this.pxe, diff --git a/boxes/yarn.lock b/boxes/yarn.lock index f8b6145f10e..36ecf11634d 100644 --- a/boxes/yarn.lock +++ b/boxes/yarn.lock @@ -27,30 +27,6 @@ __metadata: languageName: node linkType: soft -"@aztec/bb-prover@link:../yarn-project/bb-prover::locator=aztec-app%40workspace%3A.": - version: 0.0.0-use.local - resolution: "@aztec/bb-prover@link:../yarn-project/bb-prover::locator=aztec-app%40workspace%3A." - languageName: node - linkType: soft - -"@aztec/key-store@link:../yarn-project/key-store::locator=aztec-app%40workspace%3A.": - version: 0.0.0-use.local - resolution: "@aztec/key-store@link:../yarn-project/key-store::locator=aztec-app%40workspace%3A." - languageName: node - linkType: soft - -"@aztec/kv-store@link:../yarn-project/kv-store::locator=aztec-app%40workspace%3A.": - version: 0.0.0-use.local - resolution: "@aztec/kv-store@link:../yarn-project/kv-store::locator=aztec-app%40workspace%3A." - languageName: node - linkType: soft - -"@aztec/protocol-contracts@link:../yarn-project/protocol-contracts::locator=aztec-app%40workspace%3A.": - version: 0.0.0-use.local - resolution: "@aztec/protocol-contracts@link:../yarn-project/protocol-contracts::locator=aztec-app%40workspace%3A." - languageName: node - linkType: soft - "@aztec/pxe@link:../yarn-project/pxe::locator=aztec-app%40workspace%3A.": version: 0.0.0-use.local resolution: "@aztec/pxe@link:../yarn-project/pxe::locator=aztec-app%40workspace%3A." @@ -106,12 +82,6 @@ __metadata: languageName: unknown linkType: soft -"@aztec/simulator@link:../yarn-project/simulator::locator=aztec-app%40workspace%3A.": - version: 0.0.0-use.local - resolution: "@aztec/simulator@link:../yarn-project/simulator::locator=aztec-app%40workspace%3A." - languageName: node - linkType: soft - "@aztec/stdlib@link:../yarn-project/stdlib::locator=aztec-app%40workspace%3A.": version: 0.0.0-use.local resolution: "@aztec/stdlib@link:../yarn-project/stdlib::locator=aztec-app%40workspace%3A." @@ -144,12 +114,7 @@ __metadata: dependencies: "@aztec/accounts": "npm:latest" "@aztec/aztec.js": "npm:latest" - "@aztec/bb-prover": "npm:latest" - "@aztec/key-store": "npm:latest" - "@aztec/kv-store": "npm:latest" - "@aztec/protocol-contracts": "npm:latest" "@aztec/pxe": "npm:latest" - "@aztec/simulator": "npm:latest" "@aztec/stdlib": "npm:latest" "@eslint/js": "npm:^9.13.0" "@types/react": "npm:^18.3.12" diff --git a/ci3/bootstrap_ec2 b/ci3/bootstrap_ec2 index 01ab7462c27..2993b8dac53 100755 --- a/ci3/bootstrap_ec2 +++ b/ci3/bootstrap_ec2 @@ -104,8 +104,6 @@ container_script=$(cat < aztec-up diff --git a/docs/docusaurus.config.js b/docs/docusaurus.config.js index 7ef7b2bcd96..43de69519ee 100644 --- a/docs/docusaurus.config.js +++ b/docs/docusaurus.config.js @@ -102,7 +102,7 @@ const config = { fs.readFileSync(aztecVersionPath).toString() )["."]; return { - "aztec-packages": `aztec-packages-v${aztecVersion}`, + "aztec-packages": `v${aztecVersion}`, }; } catch (err) { throw new Error( diff --git a/docs/scripts/build.sh b/docs/scripts/build.sh index d884bcdf3d5..2a8f4332a0f 100755 --- a/docs/scripts/build.sh +++ b/docs/scripts/build.sh @@ -20,7 +20,7 @@ if [ -n "$NETLIFY" ]; then echo Working dir $(pwd) # Make sure the latest tag is available for loading code snippets from it - LAST_TAG="aztec-packages-v$(jq -r '.["."]' .release-please-manifest.json)" + LAST_TAG="v$(jq -r '.["."]' .release-please-manifest.json)" echo Fetching latest released tag $LAST_TAG... git fetch origin refs/tags/$LAST_TAG:refs/tags/$LAST_TAG diff --git a/docs/src/preprocess/include_version.js b/docs/src/preprocess/include_version.js index 529e93d0adb..caa2ababe59 100644 --- a/docs/src/preprocess/include_version.js +++ b/docs/src/preprocess/include_version.js @@ -15,7 +15,7 @@ async function getVersions() { fs.readFileSync(aztecVersionPath).toString() )["."]; versions = { - aztec: `aztec-packages-v${aztecVersion}`, + aztec: `v${aztecVersion}`, aztec_short: aztecVersion, }; } catch (err) { diff --git a/noir-projects/aztec-nr/.gitrepo b/noir-projects/aztec-nr/.gitrepo index 4e0a8737211..a1ef776f34a 100644 --- a/noir-projects/aztec-nr/.gitrepo +++ b/noir-projects/aztec-nr/.gitrepo @@ -6,7 +6,7 @@ [subrepo] remote = https://github.com/AztecProtocol/aztec-nr branch = master - commit = d921941f82be7434d62f9a1dab7163b7b943ca6c + commit = a0b8aaab5ce31e64a3abb8487611f6bca4d21090 method = merge cmdver = 0.4.6 - parent = bb8d41c2fadbb9734328ccf96683f56a4f9f2b5c + parent = 5aad38525330d285aa0331ccb2fc607aa3e71b73 diff --git a/noir-projects/aztec-nr/aztec/src/context/call_interfaces.nr b/noir-projects/aztec-nr/aztec/src/context/call_interfaces.nr index 1211917c6d3..d6a3a330882 100644 --- a/noir-projects/aztec-nr/aztec/src/context/call_interfaces.nr +++ b/noir-projects/aztec-nr/aztec/src/context/call_interfaces.nr @@ -484,6 +484,17 @@ impl PublicVoidCallInterface { true, ) } + + pub fn set_as_teardown(self, context: &mut PrivateContext) { + let args_hash = hash_args(self.args); + execution_cache::store(self.args); + context.set_public_teardown_function_with_args_hash( + self.target_contract, + self.selector, + args_hash, + false, + ) + } } impl CallInterface for PublicVoidCallInterface { diff --git a/noir-projects/noir-contracts/contracts/fpc_contract/src/main.nr b/noir-projects/noir-contracts/contracts/fpc_contract/src/main.nr index 059c482c6eb..695ccd0166e 100644 --- a/noir-projects/noir-contracts/contracts/fpc_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/fpc_contract/src/main.nr @@ -14,9 +14,7 @@ pub contract FPC { use crate::{config::Config, utils::safe_cast_to_u128}; use aztec::{ macros::{functions::{initializer, internal, private, public}, storage::storage}, - protocol_types::{ - abis::function_selector::FunctionSelector, address::AztecAddress, traits::ToField, - }, + prelude::AztecAddress, state_vars::PublicImmutable, }; use token::Token; @@ -94,11 +92,9 @@ pub contract FPC { let refund_slot = token.prepare_private_balance_increase(user, user).call(&mut context); // Set a public teardown function in which the refund will be paid back to the user by finalizing the partial note. - context.set_public_teardown_function( - context.this_address(), - comptime { FunctionSelector::from_signature("complete_refund((Field),Field,u128)") }, - [accepted_asset.to_field(), refund_slot, max_fee as Field], - ); + FPC::at(context.this_address()) + ._complete_refund(accepted_asset, refund_slot, max_fee) + .set_as_teardown(&mut context); // Set the FPC as the fee payer of the tx. context.set_as_fee_payer(); @@ -110,7 +106,7 @@ pub contract FPC { // docs:start:complete_refund #[public] #[internal] - fn complete_refund(accepted_asset: AztecAddress, refund_slot: Field, max_fee: u128) { + fn _complete_refund(accepted_asset: AztecAddress, refund_slot: Field, max_fee: u128) { let tx_fee = safe_cast_to_u128(context.transaction_fee()); // 1. Check that user funded the fee payer contract with at least the transaction fee. @@ -159,13 +155,9 @@ pub contract FPC { // docs:end:public_call context.set_as_fee_payer(); - // TODO(#6277) for improving interface: - // FPC::at(context.this_address()).pay_refund(...).set_public_teardown_function(&mut context); - context.set_public_teardown_function( - context.this_address(), - comptime { FunctionSelector::from_signature("pay_refund((Field),u128,(Field))") }, - [context.msg_sender().to_field(), max_fee as Field, config.accepted_asset.to_field()], - ); + FPC::at(context.this_address()) + ._pay_refund(context.msg_sender(), max_fee, config.accepted_asset) + .set_as_teardown(&mut context); } /// Pays the refund to the `refund_recipient` as part of the public fee payment flow. The refund is the difference @@ -173,7 +165,7 @@ pub contract FPC { /// It's passed as an argument to avoid the need for another read from public storage. #[public] #[internal] - fn pay_refund(refund_recipient: AztecAddress, max_fee: u128, accepted_asset: AztecAddress) { + fn _pay_refund(refund_recipient: AztecAddress, max_fee: u128, accepted_asset: AztecAddress) { let actual_fee = safe_cast_to_u128(context.transaction_fee()); assert(actual_fee <= max_fee, "Max fee paid to the paymaster does not cover actual fee"); diff --git a/noir-projects/noir-contracts/contracts/nft_contract/src/test/transfer_in_private.nr b/noir-projects/noir-contracts/contracts/nft_contract/src/test/transfer_in_private.nr index dd2056484d2..186e20b9f5c 100644 --- a/noir-projects/noir-contracts/contracts/nft_contract/src/test/transfer_in_private.nr +++ b/noir-projects/noir-contracts/contracts/nft_contract/src/test/transfer_in_private.nr @@ -103,7 +103,7 @@ unconstrained fn transfer_in_private_failure_on_behalf_of_self_non_zero_nonce() ); } -#[test(should_fail_with = "Authorization not found for message hash")] +#[test(should_fail_with = "Unknown auth witness for message hash")] unconstrained fn transfer_in_private_failure_on_behalf_of_other_without_approval() { // Setup with account contracts. Slower since we actually deploy them, but needed for authwits. // The authwit check is in the beginning so we don't need to waste time on minting the NFT and transferring @@ -122,7 +122,7 @@ unconstrained fn transfer_in_private_failure_on_behalf_of_other_without_approval ); } -#[test(should_fail_with = "Authorization not found for message hash")] +#[test(should_fail_with = "Unknown auth witness for message hash")] unconstrained fn transfer_in_private_failure_on_behalf_of_other_wrong_caller() { // Setup with account contracts. Slower since we actually deploy them, but needed for authwits. // The authwit check is in the beginning so we don't need to waste time on minting the NFT and transferring diff --git a/noir-projects/noir-contracts/contracts/nft_contract/src/test/transfer_to_public.nr b/noir-projects/noir-contracts/contracts/nft_contract/src/test/transfer_to_public.nr index 83f3690a5dd..1b4bf9dc145 100644 --- a/noir-projects/noir-contracts/contracts/nft_contract/src/test/transfer_to_public.nr +++ b/noir-projects/noir-contracts/contracts/nft_contract/src/test/transfer_to_public.nr @@ -75,7 +75,7 @@ unconstrained fn transfer_to_public_failure_on_behalf_of_self_non_zero_nonce() { ); } -#[test(should_fail_with = "Authorization not found for message hash")] +#[test(should_fail_with = "Unknown auth witness for message hash")] unconstrained fn transfer_to_public_failure_on_behalf_of_other_invalid_designated_caller() { let (env, nft_contract_address, sender, recipient, token_id) = utils::setup_mint_and_transfer_to_private(/* with_account_contracts */ true); @@ -93,7 +93,7 @@ unconstrained fn transfer_to_public_failure_on_behalf_of_other_invalid_designate transfer_to_public_call_interface.call(&mut env.private()); } -#[test(should_fail_with = "Authorization not found for message hash")] +#[test(should_fail_with = "Unknown auth witness for message hash")] unconstrained fn transfer_to_public_failure_on_behalf_of_other_no_approval() { let (env, nft_contract_address, sender, recipient, token_id) = utils::setup_mint_and_transfer_to_private(/* with_account_contracts */ true); diff --git a/noir-projects/noir-contracts/contracts/test_contract/src/main.nr b/noir-projects/noir-contracts/contracts/test_contract/src/main.nr index eefab876a16..515b9381946 100644 --- a/noir-projects/noir-contracts/contracts/test_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/test_contract/src/main.nr @@ -48,6 +48,7 @@ pub contract Test { note_interface::NoteType, retrieved_note::RetrievedNote, }, + state_vars::storage::Storage as _, test::mocks::mock_struct::MockStruct, }; use dep::token_portal_content_hash_lib::{ @@ -139,8 +140,7 @@ pub contract Test { storage_slot: Field, ) { assert( - storage_slot - != aztec::state_vars::storage::Storage::get_storage_slot(storage.example_constant), + storage_slot != storage.example_constant.get_storage_slot(), "this storage slot is reserved for example_constant", ); @@ -156,8 +156,7 @@ pub contract Test { #[private] fn call_get_notes(storage_slot: Field, active_or_nullified: bool) -> Field { assert( - storage_slot - != aztec::state_vars::storage::Storage::get_storage_slot(storage.example_constant), + storage_slot != storage.example_constant.get_storage_slot(), "this storage slot is reserved for example_constant", ); @@ -177,8 +176,7 @@ pub contract Test { #[private] fn call_get_notes_many(storage_slot: Field, active_or_nullified: bool) -> [Field; 2] { assert( - storage_slot - != aztec::state_vars::storage::Storage::get_storage_slot(storage.example_constant), + storage_slot != storage.example_constant.get_storage_slot(), "this storage slot is reserved for example_constant", ); @@ -195,8 +193,7 @@ pub contract Test { unconstrained fn call_view_notes(storage_slot: Field, active_or_nullified: bool) -> pub Field { assert( - storage_slot - != aztec::state_vars::storage::Storage::get_storage_slot(storage.example_constant), + storage_slot != storage.example_constant.get_storage_slot(), "this storage slot is reserved for example_constant", ); @@ -215,8 +212,7 @@ pub contract Test { active_or_nullified: bool, ) -> pub [Field; 2] { assert( - storage_slot - != aztec::state_vars::storage::Storage::get_storage_slot(storage.example_constant), + storage_slot != storage.example_constant.get_storage_slot(), "this storage slot is reserved for example_constant", ); @@ -233,8 +229,7 @@ pub contract Test { #[private] fn call_destroy_note(storage_slot: Field) { assert( - storage_slot - != aztec::state_vars::storage::Storage::get_storage_slot(storage.example_constant), + storage_slot != storage.example_constant.get_storage_slot(), "this storage slot is reserved for example_constant", ); @@ -376,8 +371,7 @@ pub contract Test { #[private] fn emit_encrypted_logs_nested(value: Field, owner: AztecAddress, sender: AztecAddress) { - let mut storage_slot = - aztec::state_vars::storage::Storage::get_storage_slot(storage.example_constant) + 1; + let mut storage_slot = storage.example_constant.get_storage_slot() + 1; Test::at(context.this_address()).call_create_note(value, owner, sender, storage_slot).call( &mut context, ); diff --git a/noir-projects/noir-contracts/contracts/token_contract/src/test/burn_private.nr b/noir-projects/noir-contracts/contracts/token_contract/src/test/burn_private.nr index 340421ebf8e..85d5bca73b6 100644 --- a/noir-projects/noir-contracts/contracts/token_contract/src/test/burn_private.nr +++ b/noir-projects/noir-contracts/contracts/token_contract/src/test/burn_private.nr @@ -77,7 +77,7 @@ unconstrained fn burn_private_failure_on_behalf_of_other_more_than_balance() { burn_call_interface.call(&mut env.private()); } -#[test(should_fail_with = "Authorization not found for message hash")] +#[test(should_fail_with = "Unknown auth witness for message hash")] unconstrained fn burn_private_failure_on_behalf_of_other_without_approval() { let (env, token_contract_address, owner, recipient, mint_amount) = utils::setup_and_mint_to_public(/* with_account_contracts */ true); @@ -91,7 +91,7 @@ unconstrained fn burn_private_failure_on_behalf_of_other_without_approval() { burn_call_interface.call(&mut env.private()); } -#[test(should_fail_with = "Authorization not found for message hash")] +#[test(should_fail_with = "Unknown auth witness for message hash")] unconstrained fn burn_private_failure_on_behalf_of_other_wrong_designated_caller() { let (env, token_contract_address, owner, recipient, mint_amount) = utils::setup_and_mint_to_public(/* with_account_contracts */ true); diff --git a/noir-projects/noir-contracts/contracts/token_contract/src/test/transfer_in_private.nr b/noir-projects/noir-contracts/contracts/token_contract/src/test/transfer_in_private.nr index 679265fbd0d..f802a04c3f8 100644 --- a/noir-projects/noir-contracts/contracts/token_contract/src/test/transfer_in_private.nr +++ b/noir-projects/noir-contracts/contracts/token_contract/src/test/transfer_in_private.nr @@ -67,7 +67,7 @@ unconstrained fn transfer_private_failure_on_behalf_of_more_than_balance() { transfer_private_from_call_interface.call(&mut env.private()); } -#[test(should_fail_with = "Authorization not found for message hash")] +#[test(should_fail_with = "Unknown auth witness for message hash")] unconstrained fn transfer_private_failure_on_behalf_of_other_without_approval() { // Setup with account contracts. Slower since we actually deploy them, but needed for authwits. let (env, token_contract_address, owner, recipient, _) = @@ -82,7 +82,7 @@ unconstrained fn transfer_private_failure_on_behalf_of_other_without_approval() transfer_private_from_call_interface.call(&mut env.private()); } -#[test(should_fail_with = "Authorization not found for message hash")] +#[test(should_fail_with = "Unknown auth witness for message hash")] unconstrained fn transfer_private_failure_on_behalf_of_other_wrong_caller() { // Setup with account contracts. Slower since we actually deploy them, but needed for authwits. let (env, token_contract_address, owner, recipient, _) = diff --git a/noir-projects/noir-contracts/contracts/token_contract/src/test/transfer_to_public.nr b/noir-projects/noir-contracts/contracts/token_contract/src/test/transfer_to_public.nr index 13f9ba3c238..671c9da4d2b 100644 --- a/noir-projects/noir-contracts/contracts/token_contract/src/test/transfer_to_public.nr +++ b/noir-projects/noir-contracts/contracts/token_contract/src/test/transfer_to_public.nr @@ -97,7 +97,7 @@ unconstrained fn transfer_to_public_failure_on_behalf_of_other_more_than_balance transfer_to_public_call_interface.call(&mut env.private()); } -#[test(should_fail_with = "Authorization not found for message hash")] +#[test(should_fail_with = "Unknown auth witness for message hash")] unconstrained fn transfer_to_public_failure_on_behalf_of_other_invalid_designated_caller() { let (env, token_contract_address, owner, recipient, mint_amount) = utils::setup_and_mint_to_private(/* with_account_contracts */ true); @@ -120,7 +120,7 @@ unconstrained fn transfer_to_public_failure_on_behalf_of_other_invalid_designate transfer_to_public_call_interface.call(&mut env.private()); } -#[test(should_fail_with = "Authorization not found for message hash")] +#[test(should_fail_with = "Unknown auth witness for message hash")] unconstrained fn transfer_to_public_failure_on_behalf_of_other_no_approval() { let (env, token_contract_address, owner, recipient, mint_amount) = utils::setup_and_mint_to_private(/* with_account_contracts */ true); diff --git a/noir/bb-version b/noir/bb-version index fac6d18325a..548a9079fc9 100644 --- a/noir/bb-version +++ b/noir/bb-version @@ -1 +1 @@ -0.72.1 +0.77.1 diff --git a/noir/noir-repo/.github/actions/download-noir-execute/action.yml b/noir/noir-repo/.github/actions/download-noir-execute/action.yml new file mode 100644 index 00000000000..470edc04538 --- /dev/null +++ b/noir/noir-repo/.github/actions/download-noir-execute/action.yml @@ -0,0 +1,18 @@ +name: Download noir-execute +description: Downloads the noir-execute binary from an artifact and adds it to the path + +runs: + using: composite + steps: + - name: Download noir-execute binary + uses: actions/download-artifact@v4 + with: + name: noir-execute + path: ./noir-execute + + - name: Set noir-execute on PATH + shell: bash + run: | + noir_binary="${{ github.workspace }}/noir-execute/noir-execute" + chmod +x $noir_binary + echo "$(dirname $noir_binary)" >> $GITHUB_PATH diff --git a/noir/noir-repo/.github/benchmark_projects.yml b/noir/noir-repo/.github/benchmark_projects.yml index 3a67bf99e8d..5d9266a2d1a 100644 --- a/noir/noir-repo/.github/benchmark_projects.yml +++ b/noir/noir-repo/.github/benchmark_projects.yml @@ -1,4 +1,4 @@ -define: &AZ_COMMIT a90f08e245add379fa0257c81f8e2819beb190cb +define: &AZ_COMMIT 3b981f9217f9b859bdfbcdba2f5c080392c98da6 projects: private-kernel-inner: repo: AztecProtocol/aztec-packages @@ -16,7 +16,7 @@ projects: num_runs: 5 timeout: 4 compilation-timeout: 1.2 - execution-timeout: 0.02 + execution-timeout: 0.04 compilation-memory-limit: 250 execution-memory-limit: 230 private-kernel-reset: @@ -35,19 +35,19 @@ projects: path: noir-projects/noir-protocol-circuits/crates/rollup-base-private num_runs: 5 timeout: 15 - compilation-timeout: 10 - execution-timeout: 0.5 - compilation-memory-limit: 1100 - execution-memory-limit: 500 + compilation-timeout: 20 + execution-timeout: 1 + compilation-memory-limit: 1500 + execution-memory-limit: 650 rollup-base-public: repo: AztecProtocol/aztec-packages ref: *AZ_COMMIT path: noir-projects/noir-protocol-circuits/crates/rollup-base-public num_runs: 5 timeout: 15 - compilation-timeout: 8 - execution-timeout: 0.4 - compilation-memory-limit: 1000 + compilation-timeout: 15 + execution-timeout: 0.75 + compilation-memory-limit: 1500 execution-memory-limit: 500 rollup-block-root-empty: repo: AztecProtocol/aztec-packages @@ -65,7 +65,7 @@ projects: cannot_execute: true num_runs: 1 timeout: 60 - compilation-timeout: 110 + compilation-timeout: 135 compilation-memory-limit: 8000 rollup-block-root: repo: AztecProtocol/aztec-packages @@ -73,7 +73,7 @@ projects: path: noir-projects/noir-protocol-circuits/crates/rollup-block-root num_runs: 1 timeout: 60 - compilation-timeout: 110 + compilation-timeout: 135 execution-timeout: 40 compilation-memory-limit: 8000 execution-memory-limit: 1500 diff --git a/noir/noir-repo/.github/critical_libraries_status/noir-lang/keccak256/.failures.jsonl b/noir/noir-repo/.github/critical_libraries_status/noir-lang/keccak256/.failures.jsonl new file mode 100644 index 00000000000..e69de29bb2d diff --git a/noir/noir-repo/.github/critical_libraries_status/noir-lang/sha512/.failures.jsonl b/noir/noir-repo/.github/critical_libraries_status/noir-lang/sha512/.failures.jsonl new file mode 100644 index 00000000000..e69de29bb2d diff --git a/noir/noir-repo/.github/scripts/integration-test-node.sh b/noir/noir-repo/.github/scripts/integration-test-node.sh index b7f00c65620..0d070b8001d 100755 --- a/noir/noir-repo/.github/scripts/integration-test-node.sh +++ b/noir/noir-repo/.github/scripts/integration-test-node.sh @@ -1,5 +1,5 @@ #!/bin/bash set -eu -apt-get install libc++-dev -y +apt-get install libc6 libstdc++6 -y yarn workspace integration-tests test:node diff --git a/noir/noir-repo/.github/workflows/deny.yml b/noir/noir-repo/.github/workflows/deny.yml index 11dbc3eef4b..c1b1da95ea2 100644 --- a/noir/noir-repo/.github/workflows/deny.yml +++ b/noir/noir-repo/.github/workflows/deny.yml @@ -21,6 +21,6 @@ jobs: runs-on: ubuntu-22.04 steps: - uses: actions/checkout@v4 - - uses: EmbarkStudios/cargo-deny-action@v1 + - uses: EmbarkStudios/cargo-deny-action@8d73959fce1cdc8989f23fdf03bec6ae6a6576ef with: - command: check all \ No newline at end of file + command: check all diff --git a/noir/noir-repo/.github/workflows/publish-nargo.yml b/noir/noir-repo/.github/workflows/publish-nargo.yml index a3bd1ee6ae3..a0c9247f0a4 100644 --- a/noir/noir-repo/.github/workflows/publish-nargo.yml +++ b/noir/noir-repo/.github/workflows/publish-nargo.yml @@ -54,6 +54,9 @@ jobs: cargo build --package nargo_cli --release --target ${{ matrix.target }} --no-default-features --features "${{ inputs.features }}" cargo build --package noir_profiler --release --target ${{ matrix.target }} --no-default-features --features "${{ inputs.features }}" cargo build --package noir_inspector --release --target ${{ matrix.target }} --no-default-features --features "${{ inputs.features }}" + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + - name: Package artifacts run: | mkdir dist @@ -236,4 +239,3 @@ jobs: make_latest: false overwrite: true tag: ${{ format('{0}-{1}', 'nightly', steps.date.outputs.date) }} - diff --git a/noir/noir-repo/.github/workflows/reports.yml b/noir/noir-repo/.github/workflows/reports.yml index 146dc73f225..4bd3605498f 100644 --- a/noir/noir-repo/.github/workflows/reports.yml +++ b/noir/noir-repo/.github/workflows/reports.yml @@ -42,6 +42,8 @@ jobs: - name: Build Nargo run: cargo build --package nargo_cli --release + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Package artifacts run: | @@ -58,7 +60,7 @@ jobs: compare_gates_reports: name: Circuit sizes needs: [build-nargo] - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 permissions: pull-requests: write @@ -305,7 +307,7 @@ jobs: repository: ${{ matrix.repo }} path: test-repo ref: ${{ matrix.ref }} - + - name: Fetch noir dependencies working-directory: ./test-repo/${{ matrix.path }} run: | @@ -341,7 +343,7 @@ jobs: path: ${{ steps.compilation_report.outputs.report_path }} retention-days: 3 overwrite: true - + - name: Generate execution report id: execution_report working-directory: ./test-repo/${{ matrix.path }} @@ -359,7 +361,7 @@ jobs: echo "report_name=$REPORT_NAME" >> $GITHUB_OUTPUT echo "report_path=$REPORT_PATH" >> $GITHUB_OUTPUT - + - name: Upload execution report if: ${{ !matrix.cannot_execute }} uses: actions/upload-artifact@v4 @@ -716,7 +718,7 @@ jobs: - upload_compilation_memory_report - upload_execution_report - upload_execution_memory_report - + steps: - name: Report overall success run: | diff --git a/noir/noir-repo/.github/workflows/test-js-packages.yml b/noir/noir-repo/.github/workflows/test-js-packages.yml index ba8e5f78942..d65533d11cb 100644 --- a/noir/noir-repo/.github/workflows/test-js-packages.yml +++ b/noir/noir-repo/.github/workflows/test-js-packages.yml @@ -64,6 +64,8 @@ jobs: - name: Build Nargo run: cargo build --package nargo_cli --release + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Package artifacts run: | @@ -78,6 +80,39 @@ jobs: path: ./dist/* retention-days: 3 + build-noir-execute: + runs-on: ubuntu-22.04 + timeout-minutes: 30 + + steps: + - name: Checkout Noir repo + uses: actions/checkout@v4 + + - name: Setup toolchain + uses: dtolnay/rust-toolchain@1.85.0 + + - uses: Swatinem/rust-cache@v2 + with: + key: x86_64-unknown-linux-gnu + cache-on-failure: true + save-if: ${{ github.event_name != 'merge_group' }} + + - name: Build noir-execute + run: cargo build --package noir_artifact_cli --release + + - name: Package artifacts + run: | + mkdir dist + cp ./target/release/noir-execute ./dist/noir-execute + 7z a -ttar -so -an ./dist/* | 7z a -si ./noir-execute-x86_64-unknown-linux-gnu.tar.gz + + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + name: noir-execute + path: ./dist/* + retention-days: 3 + build-noirc-abi: runs-on: ubuntu-22.04 timeout-minutes: 30 @@ -361,7 +396,7 @@ jobs: test-integration-node: name: Integration Tests (Node) - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 needs: [build-acvm-js, build-noir-wasm, build-nargo, build-noirc-abi] timeout-minutes: 30 @@ -373,6 +408,7 @@ jobs: run: | ./scripts/install_bb.sh echo "$HOME/.bb/" >> $GITHUB_PATH + sudo apt-get install libc6 libstdc++6 -y - name: Download nargo binary uses: ./.github/actions/download-nargo @@ -456,8 +492,8 @@ jobs: test-examples: name: Example scripts - runs-on: ubuntu-22.04 - needs: [build-nargo] + runs-on: ubuntu-24.04 + needs: [build-nargo, build-noir-execute] timeout-minutes: 30 steps: @@ -473,10 +509,14 @@ jobs: run: | ./scripts/install_bb.sh echo "$HOME/.bb/" >> $GITHUB_PATH + sudo apt-get install libc6 libstdc++6 -y - name: Download nargo binary uses: ./.github/actions/download-nargo + - name: Download noir-execute binary + uses: ./.github/actions/download-noir-execute + - name: Run `prove_and_verify` working-directory: ./examples/prove_and_verify run: ./test.sh @@ -485,6 +525,10 @@ jobs: working-directory: ./examples/codegen_verifier run: ./test.sh + - name: Run `oracle_transcript` + working-directory: ./examples/oracle_transcript + run: ./test.sh + external-repo-checks: needs: [build-nargo, critical-library-list] runs-on: ubuntu-22.04 @@ -493,7 +537,7 @@ jobs: fail-fast: false matrix: include: ${{ fromJson( needs.critical-library-list.outputs.libraries )}} - + name: Check external repo - ${{ matrix.repo }}/${{ matrix.path }} steps: - name: Checkout diff --git a/noir/noir-repo/.gitignore b/noir/noir-repo/.gitignore index 1d76c98a1ac..334901860d9 100644 --- a/noir/noir-repo/.gitignore +++ b/noir/noir-repo/.gitignore @@ -56,3 +56,6 @@ codegen **/cspell.json !./cspell.json + +mutants.out +mutants.out.old diff --git a/noir/noir-repo/CRITICAL_NOIR_LIBRARIES b/noir/noir-repo/CRITICAL_NOIR_LIBRARIES index 7637d9ac6df..442f5194969 100644 --- a/noir/noir-repo/CRITICAL_NOIR_LIBRARIES +++ b/noir/noir-repo/CRITICAL_NOIR_LIBRARIES @@ -12,3 +12,6 @@ https://github.com/noir-lang/noir_string_search https://github.com/noir-lang/sparse_array https://github.com/noir-lang/noir_rsa https://github.com/noir-lang/noir_json_parser +https://github.com/noir-lang/sha256 +https://github.com/noir-lang/sha512 +https://github.com/noir-lang/keccak256 diff --git a/noir/noir-repo/Cargo.lock b/noir/noir-repo/Cargo.lock index 7961f8ecc80..ca3fe90ee5d 100644 --- a/noir/noir-repo/Cargo.lock +++ b/noir/noir-repo/Cargo.lock @@ -6,14 +6,23 @@ version = 4 name = "acir" version = "1.0.0-beta.3" dependencies = [ + "acir", "acir_field", "base64 0.21.7", "bincode", "brillig", + "color-eyre", "criterion", "flate2", "fxhash", + "noir_protobuf", + "num-bigint", "pprof", + "proptest", + "proptest-derive", + "prost", + "prost-build", + "protoc-prebuilt", "serde", "serde-big-array", "serde-generate", @@ -789,6 +798,8 @@ name = "brillig" version = "1.0.0-beta.3" dependencies = [ "acir_field", + "proptest", + "proptest-derive", "serde", ] @@ -2828,12 +2839,12 @@ dependencies = [ [[package]] name = "light-poseidon" -version = "0.2.0" +version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3c9a85a9752c549ceb7578064b4ed891179d20acd85f27318573b64d2d7ee7ee" +checksum = "39e3d87542063daaccbfecd78b60f988079b6ec4e089249658b9455075c78d42" dependencies = [ - "ark-bn254 0.4.0", - "ark-ff 0.4.2", + "ark-bn254 0.5.0", + "ark-ff 0.5.0", "num-bigint", "thiserror", ] @@ -3000,6 +3011,12 @@ dependencies = [ "windows-sys 0.52.0", ] +[[package]] +name = "multimap" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "defc4c55412d89136f966bbb339008b474350e5e6e78d2714439c386b3137a03" + [[package]] name = "nargo" version = "1.0.0-beta.3" @@ -3030,9 +3047,7 @@ name = "nargo_cli" version = "1.0.0-beta.3" dependencies = [ "acvm", - "ark-bn254 0.4.0", "ark-bn254 0.5.0", - "ark-ff 0.4.2", "assert_cmd", "assert_fs", "async-lsp", @@ -3302,6 +3317,14 @@ dependencies = [ "tracing-subscriber", ] +[[package]] +name = "noir_protobuf" +version = "1.0.0-beta.3" +dependencies = [ + "color-eyre", + "prost", +] + [[package]] name = "noir_wasm" version = "1.0.0-beta.3" @@ -3963,6 +3986,16 @@ dependencies = [ "termtree", ] +[[package]] +name = "prettyplease" +version = "0.2.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f1ccf34da56fc294e7d4ccf69a85992b7dfb826b7cf57bac6a70bba3494cc08a" +dependencies = [ + "proc-macro2", + "syn 2.0.96", +] + [[package]] name = "prettytable-rs" version = "0.10.0" @@ -4032,6 +4065,68 @@ dependencies = [ "syn 2.0.96", ] +[[package]] +name = "prost" +version = "0.13.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2796faa41db3ec313a31f7624d9286acf277b52de526150b7e69f3debf891ee5" +dependencies = [ + "bytes", + "prost-derive", +] + +[[package]] +name = "prost-build" +version = "0.13.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "be769465445e8c1474e9c5dac2018218498557af32d9ed057325ec9a41ae81bf" +dependencies = [ + "heck 0.5.0", + "itertools 0.13.0", + "log", + "multimap", + "once_cell", + "petgraph", + "prettyplease", + "prost", + "prost-types", + "regex", + "syn 2.0.96", + "tempfile", +] + +[[package]] +name = "prost-derive" +version = "0.13.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8a56d757972c98b346a9b766e3f02746cde6dd1cd1d1d563472929fdd74bec4d" +dependencies = [ + "anyhow", + "itertools 0.13.0", + "proc-macro2", + "quote", + "syn 2.0.96", +] + +[[package]] +name = "prost-types" +version = "0.13.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "52c2c1bf36ddb1a1c396b3601a3cec27c2462e45f07c386894ec3ccf5332bd16" +dependencies = [ + "prost", +] + +[[package]] +name = "protoc-prebuilt" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4d85d4641fe3b8c6e853dfd09fe35379bc6b6e66bd692ac29ed4f7087de69ed5" +dependencies = [ + "ureq", + "zip", +] + [[package]] name = "quick-error" version = "1.2.3" @@ -5550,6 +5645,21 @@ version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1" +[[package]] +name = "ureq" +version = "2.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "02d1a66277ed75f640d608235660df48c8e3c19f3b4edb6a263315626cc3c01d" +dependencies = [ + "base64 0.22.1", + "log", + "once_cell", + "rustls", + "rustls-pki-types", + "url", + "webpki-roots", +] + [[package]] name = "url" version = "2.5.4" @@ -6132,6 +6242,18 @@ dependencies = [ "syn 2.0.96", ] +[[package]] +name = "zip" +version = "0.6.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "760394e246e4c28189f19d488c058bf16f564016aefac5d32bb1f3b51d5e9261" +dependencies = [ + "byteorder", + "crc32fast", + "crossbeam-utils", + "flate2", +] + [[package]] name = "zkhash" version = "0.2.0" diff --git a/noir/noir-repo/Cargo.toml b/noir/noir-repo/Cargo.toml index 9c5bf1351d1..308609aa511 100644 --- a/noir/noir-repo/Cargo.toml +++ b/noir/noir-repo/Cargo.toml @@ -37,6 +37,7 @@ members = [ "acvm-repo/bn254_blackbox_solver", # Utility crates "utils/iter-extended", + "utils/protobuf", ] default-members = [ "tooling/nargo_cli", @@ -94,6 +95,7 @@ noirc_abi = { path = "tooling/noirc_abi" } noirc_artifacts = { path = "tooling/noirc_artifacts" } noirc_artifacts_info = { path = "tooling/noirc_artifacts_info" } noir_artifact_cli = { path = "tooling/artifact_cli" } +noir_protobuf = { path = "utils/protobuf" } # Arkworks ark-bn254 = { version = "^0.5.0", default-features = false, features = [ @@ -137,6 +139,11 @@ criterion = "0.5.0" # https://github.com/tikv/pprof-rs/pull/172 pprof = { version = "0.14", features = ["flamegraph", "criterion"] } +# Protobuf +prost = "0.13" +prost-build = "0.13" +protoc-prebuilt = "0.3" + cfg-if = "1.0.0" dirs = "4" serde = { version = "1.0.136", features = ["derive"] } diff --git a/noir/noir-repo/EXTERNAL_NOIR_LIBRARIES.yml b/noir/noir-repo/EXTERNAL_NOIR_LIBRARIES.yml index 0481c539805..1d86c3e5d5b 100644 --- a/noir/noir-repo/EXTERNAL_NOIR_LIBRARIES.yml +++ b/noir/noir-repo/EXTERNAL_NOIR_LIBRARIES.yml @@ -1,4 +1,4 @@ -define: &AZ_COMMIT a90f08e245add379fa0257c81f8e2819beb190cb +define: &AZ_COMMIT 3b981f9217f9b859bdfbcdba2f5c080392c98da6 libraries: noir_check_shuffle: repo: noir-lang/noir_check_shuffle @@ -29,7 +29,7 @@ libraries: timeout: 250 noir_base64: repo: noir-lang/noir_base64 - timeout: 3 + timeout: 5 noir_string_search: repo: noir-lang/noir_string_search timeout: 2 @@ -45,6 +45,12 @@ libraries: sha256: repo: noir-lang/sha256 timeout: 3 + sha512: + repo: noir-lang/sha512 + timeout: 30 + keccak256: + repo: noir-lang/keccak256 + timeout: 3 aztec_nr: repo: AztecProtocol/aztec-packages ref: *AZ_COMMIT diff --git a/noir/noir-repo/acvm-repo/acir/Cargo.toml b/noir/noir-repo/acvm-repo/acir/Cargo.toml index 2b15c2abf09..5d7b347c511 100644 --- a/noir/noir-repo/acvm-repo/acir/Cargo.toml +++ b/noir/noir-repo/acvm-repo/acir/Cargo.toml @@ -18,15 +18,26 @@ workspace = true [dependencies] acir_field.workspace = true brillig.workspace = true +noir_protobuf.workspace = true + +color-eyre.workspace = true serde.workspace = true thiserror.workspace = true flate2.workspace = true bincode.workspace = true base64.workspace = true +prost.workspace = true serde-big-array = "0.5.1" strum = { workspace = true } strum_macros = { workspace = true } +proptest = { workspace = true, optional = true } +proptest-derive = { workspace = true, optional = true } + +[build-dependencies] +prost-build.workspace = true +protoc-prebuilt.workspace = true + [dev-dependencies] serde_json = "1.0" serde-reflection = "0.3.6" @@ -34,10 +45,15 @@ serde-generate = "0.25.1" fxhash.workspace = true criterion.workspace = true pprof.workspace = true +num-bigint.workspace = true + +acir = { path = ".", features = ["arb"] } # Self to turn on `arb`. [features] +default = [] bn254 = ["acir_field/bn254"] bls12_381 = ["acir_field/bls12_381"] +arb = ["proptest", "proptest-derive", "brillig/arb"] [[bench]] name = "serialization" diff --git a/noir/noir-repo/acvm-repo/acir/build.rs b/noir/noir-repo/acvm-repo/acir/build.rs new file mode 100644 index 00000000000..e52a06fbce4 --- /dev/null +++ b/noir/noir-repo/acvm-repo/acir/build.rs @@ -0,0 +1,24 @@ +use std::path::Path; + +fn main() { + let (protoc_bin, include_dir) = + protoc_prebuilt::init("29.3").expect("failed to initialize protoc"); + + unsafe { + std::env::set_var("PROTOC", protoc_bin); + } + + prost_build::compile_protos( + &[ + // DTOs for a `Program`, which work with the types in `acir.cpp` + "./src/proto/program.proto", + // DTOs for the `WitnessStack`, which work with the types in `witness.cpp` + "./src/proto/acir/witness.proto", + // A pared down DTO for `Program`, so Barretenberg can ignore the Brillig part. + // This is only included to make sure it compiles. + "./src/proto/acir/program.proto", + ], + &[Path::new("./src/proto"), include_dir.as_path()], + ) + .expect("failed to compile .proto schemas"); +} diff --git a/noir/noir-repo/acvm-repo/acir/src/circuit/brillig.rs b/noir/noir-repo/acvm-repo/acir/src/circuit/brillig.rs index ef75d088f8c..972a08dd32a 100644 --- a/noir/noir-repo/acvm-repo/acir/src/circuit/brillig.rs +++ b/noir/noir-repo/acvm-repo/acir/src/circuit/brillig.rs @@ -6,6 +6,7 @@ use serde::{Deserialize, Serialize}; /// Inputs for the Brillig VM. These are the initial inputs /// that the Brillig VM will use to start. #[derive(Clone, PartialEq, Eq, Serialize, Deserialize, Debug, Hash)] +#[cfg_attr(feature = "arb", derive(proptest_derive::Arbitrary))] pub enum BrilligInputs { Single(Expression), Array(Vec>), @@ -15,6 +16,7 @@ pub enum BrilligInputs { /// Outputs for the Brillig VM. Once the VM has completed /// execution, this will be the object that is returned. #[derive(Clone, PartialEq, Eq, Serialize, Deserialize, Debug, Hash)] +#[cfg_attr(feature = "arb", derive(proptest_derive::Arbitrary))] pub enum BrilligOutputs { Simple(Witness), Array(Vec), @@ -24,6 +26,7 @@ pub enum BrilligOutputs { /// a full Brillig function to be executed by the Brillig VM. /// This is stored separately on a program and accessed through a [BrilligPointer]. #[derive(Clone, PartialEq, Eq, Serialize, Deserialize, Default, Debug, Hash)] +#[cfg_attr(feature = "arb", derive(proptest_derive::Arbitrary))] pub struct BrilligBytecode { pub bytecode: Vec>, } @@ -32,6 +35,7 @@ pub struct BrilligBytecode { #[derive( Clone, Debug, PartialEq, Eq, Serialize, Deserialize, Hash, Copy, Default, PartialOrd, Ord, )] +#[cfg_attr(feature = "arb", derive(proptest_derive::Arbitrary))] #[serde(transparent)] pub struct BrilligFunctionId(pub u32); diff --git a/noir/noir-repo/acvm-repo/acir/src/circuit/mod.rs b/noir/noir-repo/acvm-repo/acir/src/circuit/mod.rs index 68c3c832b5c..2f7bb92b184 100644 --- a/noir/noir-repo/acvm-repo/acir/src/circuit/mod.rs +++ b/noir/noir-repo/acvm-repo/acir/src/circuit/mod.rs @@ -2,8 +2,12 @@ pub mod black_box_functions; pub mod brillig; pub mod opcodes; -use crate::native_types::{Expression, Witness}; +use crate::{ + native_types::{Expression, Witness}, + proto::convert::ProtoSchema, +}; use acir_field::AcirField; +use noir_protobuf::ProtoCodec as _; pub use opcodes::Opcode; use thiserror::Error; @@ -26,6 +30,7 @@ use self::{brillig::BrilligBytecode, opcodes::BlockId}; /// into a proving system which supports PLONK, where arithmetic expressions have a /// finite fan-in. #[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize, Default, Hash)] +#[cfg_attr(feature = "arb", derive(proptest_derive::Arbitrary))] pub enum ExpressionWidth { #[default] Unbounded, @@ -37,13 +42,15 @@ pub enum ExpressionWidth { /// A program represented by multiple ACIR circuits. The execution trace of these /// circuits is dictated by construction of the [crate::native_types::WitnessStack]. #[derive(Clone, PartialEq, Eq, Serialize, Deserialize, Default, Hash)] -pub struct Program { +#[cfg_attr(feature = "arb", derive(proptest_derive::Arbitrary))] +pub struct Program { pub functions: Vec>, pub unconstrained_functions: Vec>, } #[derive(Clone, PartialEq, Eq, Serialize, Deserialize, Default, Hash)] -pub struct Circuit { +#[cfg_attr(feature = "arb", derive(proptest_derive::Arbitrary))] +pub struct Circuit { // current_witness_index is the highest witness index in the circuit. The next witness to be added to this circuit // will take on this value. (The value is cached here as an optimization.) pub current_witness_index: u32, @@ -70,12 +77,14 @@ pub struct Circuit { } #[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Hash)] +#[cfg_attr(feature = "arb", derive(proptest_derive::Arbitrary))] pub enum ExpressionOrMemory { Expression(Expression), Memory(BlockId), } #[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Hash)] +#[cfg_attr(feature = "arb", derive(proptest_derive::Arbitrary))] pub struct AssertionPayload { pub error_selector: u64, pub payload: Vec>, @@ -137,6 +146,7 @@ pub struct ResolvedOpcodeLocation { } #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash, PartialOrd, Ord, Serialize, Deserialize)] +#[cfg_attr(feature = "arb", derive(proptest_derive::Arbitrary))] /// Opcodes are locatable so that callers can /// map opcodes to debug information related to their context. pub enum OpcodeLocation { @@ -218,7 +228,7 @@ impl std::fmt::Display for BrilligOpcodeLocation { } } -impl Circuit { +impl Circuit { pub fn num_vars(&self) -> u32 { self.current_witness_index + 1 } @@ -237,9 +247,9 @@ impl Circuit { } } -impl Program { +impl Program { fn write(&self, writer: W) -> std::io::Result<()> { - let buf = bincode::serialize(self).unwrap(); + let buf = self.bincode_serialize()?; let mut encoder = flate2::write::GzEncoder::new(writer, Compression::default()); encoder.write_all(&buf)?; encoder.finish()?; @@ -263,13 +273,39 @@ impl Program { } } -impl Deserialize<'a>> Program { +impl Program { + /// Serialize the program using `bincode`, which is what we have to use until Barretenberg can read another format. + pub(crate) fn bincode_serialize(&self) -> std::io::Result> { + bincode::serialize(self).map_err(std::io::Error::other) + } +} + +impl Deserialize<'a>> Program { + pub(crate) fn bincode_deserialize(buf: &[u8]) -> std::io::Result { + bincode::deserialize(buf) + .map_err(|e| std::io::Error::new(std::io::ErrorKind::InvalidInput, e)) + } +} + +#[allow(dead_code)] // TODO: Remove once we switch to protobuf +impl Program { + /// Serialize the program using `protobuf`, which is what we try to replace `bincode` with. + pub(crate) fn proto_serialize(&self) -> Vec { + ProtoSchema::::serialize_to_vec(self) + } + pub(crate) fn proto_deserialize(buf: &[u8]) -> std::io::Result { + ProtoSchema::::deserialize_from_vec(buf) + .map_err(|e| std::io::Error::new(std::io::ErrorKind::InvalidInput, e)) + } +} + +impl Deserialize<'a>> Program { fn read(reader: R) -> std::io::Result { let mut gz_decoder = flate2::read::GzDecoder::new(reader); - let mut buf_d = Vec::new(); - gz_decoder.read_to_end(&mut buf_d)?; - bincode::deserialize(&buf_d) - .map_err(|err| std::io::Error::new(std::io::ErrorKind::InvalidInput, err)) + let mut buf = Vec::new(); + gz_decoder.read_to_end(&mut buf)?; + let program = Self::bincode_deserialize(&buf)?; + Ok(program) } /// Deserialize bytecode. @@ -357,6 +393,7 @@ impl std::fmt::Debug for Program { } #[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize, Default, Hash)] +#[cfg_attr(feature = "arb", derive(proptest_derive::Arbitrary))] pub struct PublicInputs(pub BTreeSet); impl PublicInputs { @@ -477,4 +514,153 @@ mod tests { Program::deserialize_program(&zipped_bad_circuit); assert!(deserialization_result.is_err()); } + + /// Property based testing for serialization + mod props { + use acir_field::FieldElement; + use proptest::prelude::*; + use proptest::test_runner::{TestCaseResult, TestRunner}; + + use crate::circuit::Program; + use crate::native_types::{WitnessMap, WitnessStack}; + + // It's not possible to set the maximum size of collections via `ProptestConfig`, only an env var, + // because e.g. the `VecStrategy` uses `Config::default().max_default_size_range`. On top of that, + // `Config::default()` reads a static `DEFAULT_CONFIG`, which gets the env vars only once at the + // beginning, so we can't override this on a test-by-test basis, unless we use `fork`, + // which is a feature that is currently disabled, because it doesn't work with Wasm. + // We could add it as a `dev-dependency` just for this crate, but when I tried it just crashed. + // For now using a const so it's obvious we can't set it to different values for different tests. + const MAX_SIZE_RANGE: usize = 5; + const SIZE_RANGE_KEY: &str = "PROPTEST_MAX_DEFAULT_SIZE_RANGE"; + + // Define a wrapper around field so we can implement `Arbitrary`. + // NB there are other methods like `arbitrary_field_elements` around the codebase, + // but for `proptest_derive::Arbitrary` we need `F: AcirField + Arbitrary`. + acir_field::field_wrapper!(TestField, FieldElement); + + impl Arbitrary for TestField { + type Parameters = (); + type Strategy = BoxedStrategy; + + fn arbitrary_with(_args: Self::Parameters) -> Self::Strategy { + any::().prop_map(|v| Self(FieldElement::from(v))).boxed() + } + } + + /// Override the maximum size of collections created by `proptest`. + fn run_with_max_size_range(cases: u32, f: F) + where + T: Arbitrary, + F: Fn(T) -> TestCaseResult, + { + let orig_size_range = std::env::var(SIZE_RANGE_KEY).ok(); + // The defaults are only read once. If they are already set, leave them be. + if orig_size_range.is_none() { + unsafe { + std::env::set_var(SIZE_RANGE_KEY, MAX_SIZE_RANGE.to_string()); + } + } + + let mut runner = TestRunner::new(ProptestConfig { cases, ..Default::default() }); + let result = runner.run(&any::(), f); + + // Restore the original. + unsafe { + std::env::set_var(SIZE_RANGE_KEY, orig_size_range.unwrap_or_default()); + } + + result.unwrap(); + } + + #[test] + fn prop_program_proto_roundtrip() { + run_with_max_size_range(100, |program: Program| { + let bz = Program::proto_serialize(&program); + let de = Program::proto_deserialize(&bz)?; + prop_assert_eq!(program, de); + Ok(()) + }); + } + + #[test] + fn prop_program_bincode_roundtrip() { + run_with_max_size_range(100, |program: Program| { + let bz = Program::bincode_serialize(&program)?; + let de = Program::bincode_deserialize(&bz)?; + prop_assert_eq!(program, de); + Ok(()) + }); + } + + #[test] + fn prop_program_roundtrip() { + run_with_max_size_range(10, |program: Program| { + let bz = Program::serialize_program(&program); + let de = Program::deserialize_program(&bz)?; + prop_assert_eq!(program, de); + Ok(()) + }); + } + + #[test] + fn prop_witness_stack_proto_roundtrip() { + run_with_max_size_range(10, |witness: WitnessStack| { + let bz = WitnessStack::proto_serialize(&witness); + let de = WitnessStack::proto_deserialize(&bz)?; + prop_assert_eq!(witness, de); + Ok(()) + }); + } + + #[test] + fn prop_witness_stack_bincode_roundtrip() { + run_with_max_size_range(10, |witness: WitnessStack| { + let bz = WitnessStack::bincode_serialize(&witness)?; + let de = WitnessStack::bincode_deserialize(&bz)?; + prop_assert_eq!(witness, de); + Ok(()) + }); + } + + #[test] + fn prop_witness_stack_roundtrip() { + run_with_max_size_range(10, |witness: WitnessStack| { + let bz = Vec::::try_from(&witness)?; + let de = WitnessStack::try_from(bz.as_slice())?; + prop_assert_eq!(witness, de); + Ok(()) + }); + } + + #[test] + fn prop_witness_map_proto_roundtrip() { + run_with_max_size_range(10, |witness: WitnessMap| { + let bz = WitnessMap::proto_serialize(&witness); + let de = WitnessMap::proto_deserialize(&bz)?; + prop_assert_eq!(witness, de); + Ok(()) + }); + } + + #[test] + fn prop_witness_map_bincode_roundtrip() { + run_with_max_size_range(10, |witness: WitnessMap| { + let bz = WitnessMap::bincode_serialize(&witness)?; + let de = WitnessMap::bincode_deserialize(&bz)?; + prop_assert_eq!(witness, de); + Ok(()) + }); + } + + #[test] + fn prop_witness_map_roundtrip() { + run_with_max_size_range(10, |witness: WitnessMap| { + let bz = Vec::::try_from(witness.clone())?; + let de = WitnessMap::try_from(bz.as_slice())?; + prop_assert_eq!(witness, de); + Ok(()) + }); + } + } } diff --git a/noir/noir-repo/acvm-repo/acir/src/circuit/opcodes.rs b/noir/noir-repo/acvm-repo/acir/src/circuit/opcodes.rs index dec58b5f90b..41a42a14684 100644 --- a/noir/noir-repo/acvm-repo/acir/src/circuit/opcodes.rs +++ b/noir/noir-repo/acvm-repo/acir/src/circuit/opcodes.rs @@ -15,7 +15,8 @@ pub use black_box_function_call::{ }; pub use memory_operation::{BlockId, MemOp}; -#[derive(Clone, PartialEq, Eq, Serialize, Deserialize, Hash)] +#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize, Hash)] +#[cfg_attr(feature = "arb", derive(proptest_derive::Arbitrary))] pub enum BlockType { Memory, CallData(u32), @@ -30,7 +31,8 @@ impl BlockType { #[allow(clippy::large_enum_variant)] #[derive(Clone, PartialEq, Eq, Serialize, Deserialize, Hash)] -pub enum Opcode { +#[cfg_attr(feature = "arb", derive(proptest_derive::Arbitrary))] +pub enum Opcode { /// An `AssertZero` opcode adds the constraint that `P(w) = 0`, where /// `w=(w_1,..w_n)` is a tuple of `n` witnesses, and `P` is a multi-variate /// polynomial of total degree at most `2`. diff --git a/noir/noir-repo/acvm-repo/acir/src/circuit/opcodes/black_box_function_call.rs b/noir/noir-repo/acvm-repo/acir/src/circuit/opcodes/black_box_function_call.rs index 9cf31e94eb4..8c5ada3ee6e 100644 --- a/noir/noir-repo/acvm-repo/acir/src/circuit/opcodes/black_box_function_call.rs +++ b/noir/noir-repo/acvm-repo/acir/src/circuit/opcodes/black_box_function_call.rs @@ -10,12 +10,14 @@ use thiserror::Error; // So we need to supply how many bits of the witness is needed #[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize, Hash)] +#[cfg_attr(feature = "arb", derive(proptest_derive::Arbitrary))] pub enum ConstantOrWitnessEnum { Constant(F), Witness(Witness), } #[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize, Hash)] +#[cfg_attr(feature = "arb", derive(proptest_derive::Arbitrary))] pub struct FunctionInput { input: ConstantOrWitnessEnum, num_bits: u32, @@ -80,7 +82,7 @@ impl std::fmt::Display for FunctionInput { } #[derive(Clone, PartialEq, Eq, Serialize, Deserialize, Hash)] -pub enum BlackBoxFuncCall { +pub enum BlackBoxFuncCall { AES128Encrypt { inputs: Vec>, iv: Box<[FunctionInput; 16]>, @@ -214,7 +216,7 @@ pub enum BlackBoxFuncCall { }, } -impl BlackBoxFuncCall { +impl BlackBoxFuncCall { pub fn get_black_box_func(&self) -> BlackBoxFunc { match self { BlackBoxFuncCall::AES128Encrypt { .. } => BlackBoxFunc::AES128Encrypt, @@ -427,7 +429,7 @@ fn get_outputs_string(outputs: &[Witness]) -> String { } } -impl std::fmt::Display for BlackBoxFuncCall { +impl std::fmt::Display for BlackBoxFuncCall { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { let uppercase_name = self.name().to_uppercase(); write!(f, "BLACKBOX::{uppercase_name} ")?; @@ -452,7 +454,7 @@ impl std::fmt::Display for BlackBoxFuncCall { } } -impl std::fmt::Debug for BlackBoxFuncCall { +impl std::fmt::Debug for BlackBoxFuncCall { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { std::fmt::Display::fmt(self, f) } @@ -506,3 +508,207 @@ mod tests { assert_eq!(opcode, recovered_opcode); } } + +#[cfg(feature = "arb")] +mod arb { + use acir_field::AcirField; + use proptest::prelude::*; + + use crate::native_types::Witness; + + use super::{BlackBoxFuncCall, FunctionInput}; + + // Implementing this separately because trying to derive leads to stack overflow. + impl Arbitrary for BlackBoxFuncCall + where + F: AcirField + Arbitrary, + { + type Parameters = (); + type Strategy = BoxedStrategy; + + fn arbitrary_with(_args: Self::Parameters) -> Self::Strategy { + let input = any::>(); + let input_vec = any::>>(); + let input_arr_3 = any::; 3]>>(); + let input_arr_8 = any::; 8]>>(); + let input_arr_16 = any::; 16]>>(); + let input_arr_25 = any::; 25]>>(); + let input_arr_32 = any::; 32]>>(); + let input_arr_64 = any::; 64]>>(); + let witness = any::(); + let witness_vec = any::>(); + let witness_arr_8 = any::>(); + let witness_arr_25 = any::>(); + let witness_arr_32 = any::>(); + + let case_aes128_encrypt = ( + input_vec.clone(), + input_arr_16.clone(), + input_arr_16.clone(), + witness_vec.clone(), + ) + .prop_map(|(inputs, iv, key, outputs)| { + BlackBoxFuncCall::AES128Encrypt { inputs, iv, key, outputs } + }); + + let case_and = (input.clone(), input.clone(), witness.clone()) + .prop_map(|(lhs, rhs, output)| BlackBoxFuncCall::AND { lhs, rhs, output }); + + let case_xor = (input.clone(), input.clone(), witness.clone()) + .prop_map(|(lhs, rhs, output)| BlackBoxFuncCall::XOR { lhs, rhs, output }); + + let case_range = input.clone().prop_map(|input| BlackBoxFuncCall::RANGE { input }); + + let case_blake2s = (input_vec.clone(), witness_arr_32.clone()) + .prop_map(|(inputs, outputs)| BlackBoxFuncCall::Blake2s { inputs, outputs }); + + let case_blake3 = (input_vec.clone(), witness_arr_32.clone()) + .prop_map(|(inputs, outputs)| BlackBoxFuncCall::Blake3 { inputs, outputs }); + + let case_ecdsa_secp256k1 = ( + input_arr_32.clone(), + input_arr_32.clone(), + input_arr_64.clone(), + input_arr_32.clone(), + witness.clone(), + ) + .prop_map( + |(public_key_x, public_key_y, signature, hashed_message, output)| { + BlackBoxFuncCall::EcdsaSecp256k1 { + public_key_x, + public_key_y, + signature, + hashed_message, + output, + } + }, + ); + + let case_ecdsa_secp256r1 = ( + input_arr_32.clone(), + input_arr_32.clone(), + input_arr_64.clone(), + input_arr_32.clone(), + witness.clone(), + ) + .prop_map( + |(public_key_x, public_key_y, signature, hashed_message, output)| { + BlackBoxFuncCall::EcdsaSecp256r1 { + public_key_x, + public_key_y, + signature, + hashed_message, + output, + } + }, + ); + + let case_multi_scalar_mul = ( + input_vec.clone(), + input_vec.clone(), + witness.clone(), + witness.clone(), + witness.clone(), + ) + .prop_map(|(points, scalars, w1, w2, w3)| { + BlackBoxFuncCall::MultiScalarMul { points, scalars, outputs: (w1, w2, w3) } + }); + + let case_embedded_curve_add = ( + input_arr_3.clone(), + input_arr_3.clone(), + witness.clone(), + witness.clone(), + witness.clone(), + ) + .prop_map(|(input1, input2, w1, w2, w3)| { + BlackBoxFuncCall::EmbeddedCurveAdd { input1, input2, outputs: (w1, w2, w3) } + }); + + let case_keccakf1600 = (input_arr_25.clone(), witness_arr_25.clone()) + .prop_map(|(inputs, outputs)| BlackBoxFuncCall::Keccakf1600 { inputs, outputs }); + + let case_recursive_aggregation = ( + input_vec.clone(), + input_vec.clone(), + input_vec.clone(), + input.clone(), + any::(), + ) + .prop_map( + |(verification_key, proof, public_inputs, key_hash, proof_type)| { + BlackBoxFuncCall::RecursiveAggregation { + verification_key, + proof, + public_inputs, + key_hash, + proof_type, + } + }, + ); + + let big_int_args = (any::(), any::(), any::()); + + let case_big_int_add = big_int_args + .prop_map(|(lhs, rhs, output)| BlackBoxFuncCall::BigIntAdd { lhs, rhs, output }); + + let case_big_int_sub = big_int_args + .prop_map(|(lhs, rhs, output)| BlackBoxFuncCall::BigIntSub { lhs, rhs, output }); + + let case_big_int_mul = big_int_args + .prop_map(|(lhs, rhs, output)| BlackBoxFuncCall::BigIntMul { lhs, rhs, output }); + + let case_big_int_div = big_int_args + .prop_map(|(lhs, rhs, output)| BlackBoxFuncCall::BigIntDiv { lhs, rhs, output }); + + let case_big_int_from_le_bytes = (input_vec.clone(), any::>(), any::()) + .prop_map(|(inputs, modulus, output)| BlackBoxFuncCall::BigIntFromLeBytes { + inputs, + modulus, + output, + }); + + let case_big_int_to_le_bytes = (any::(), witness_vec.clone()) + .prop_map(|(input, outputs)| BlackBoxFuncCall::BigIntToLeBytes { input, outputs }); + + let case_poseidon2_permutation = (input_vec.clone(), witness_vec.clone(), any::()) + .prop_map(|(inputs, outputs, len)| BlackBoxFuncCall::Poseidon2Permutation { + inputs, + outputs, + len, + }); + + let case_sha256_compression = (input_arr_16, input_arr_8, witness_arr_8).prop_map( + |(inputs, hash_values, outputs)| BlackBoxFuncCall::Sha256Compression { + inputs, + hash_values, + outputs, + }, + ); + + prop_oneof![ + case_aes128_encrypt, + case_and, + case_xor, + case_range, + case_blake2s, + case_blake3, + case_ecdsa_secp256k1, + case_ecdsa_secp256r1, + case_multi_scalar_mul, + case_embedded_curve_add, + case_keccakf1600, + case_recursive_aggregation, + case_big_int_add, + case_big_int_sub, + case_big_int_mul, + case_big_int_div, + case_big_int_from_le_bytes, + case_big_int_to_le_bytes, + case_poseidon2_permutation, + case_sha256_compression, + ] + .boxed() + } + } +} diff --git a/noir/noir-repo/acvm-repo/acir/src/circuit/opcodes/function_id.rs b/noir/noir-repo/acvm-repo/acir/src/circuit/opcodes/function_id.rs index b5abb1b3942..e87e52ce967 100644 --- a/noir/noir-repo/acvm-repo/acir/src/circuit/opcodes/function_id.rs +++ b/noir/noir-repo/acvm-repo/acir/src/circuit/opcodes/function_id.rs @@ -1,6 +1,7 @@ use serde::{Deserialize, Serialize}; #[derive(Clone, Copy, PartialEq, Eq, Debug, Serialize, Deserialize, Hash)] +#[cfg_attr(feature = "arb", derive(proptest_derive::Arbitrary))] #[serde(transparent)] pub struct AcirFunctionId(pub u32); diff --git a/noir/noir-repo/acvm-repo/acir/src/circuit/opcodes/memory_operation.rs b/noir/noir-repo/acvm-repo/acir/src/circuit/opcodes/memory_operation.rs index c9a78983204..66034166b23 100644 --- a/noir/noir-repo/acvm-repo/acir/src/circuit/opcodes/memory_operation.rs +++ b/noir/noir-repo/acvm-repo/acir/src/circuit/opcodes/memory_operation.rs @@ -3,11 +3,13 @@ use acir_field::AcirField; use serde::{Deserialize, Serialize}; #[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize, Hash, Copy, Default)] +#[cfg_attr(feature = "arb", derive(proptest_derive::Arbitrary))] pub struct BlockId(pub u32); /// Operation on a block of memory /// We can either write or read at an index in memory #[derive(Clone, PartialEq, Eq, Serialize, Deserialize, Debug, Hash)] +#[cfg_attr(feature = "arb", derive(proptest_derive::Arbitrary))] pub struct MemOp { /// A constant expression that can be 0 (read) or 1 (write) pub operation: Expression, diff --git a/noir/noir-repo/acvm-repo/acir/src/lib.rs b/noir/noir-repo/acvm-repo/acir/src/lib.rs index e49ab60f9e0..63a1253cbe1 100644 --- a/noir/noir-repo/acvm-repo/acir/src/lib.rs +++ b/noir/noir-repo/acvm-repo/acir/src/lib.rs @@ -1,4 +1,4 @@ -#![forbid(unsafe_code)] +#![cfg_attr(not(test), forbid(unsafe_code))] // `std::env::set_var` is used in tests. #![warn(unreachable_pub)] #![warn(clippy::semicolon_if_nothing_returned)] #![cfg_attr(not(test), warn(unused_crate_dependencies, unused_extern_crates))] @@ -7,6 +7,7 @@ pub mod circuit; pub mod native_types; +mod proto; pub use acir_field; pub use acir_field::{AcirField, FieldElement}; diff --git a/noir/noir-repo/acvm-repo/acir/src/native_types/expression/mod.rs b/noir/noir-repo/acvm-repo/acir/src/native_types/expression/mod.rs index cdb8974526f..1d1f12b7eea 100644 --- a/noir/noir-repo/acvm-repo/acir/src/native_types/expression/mod.rs +++ b/noir/noir-repo/acvm-repo/acir/src/native_types/expression/mod.rs @@ -13,6 +13,7 @@ mod ordering; // In the multiplication polynomial // XXX: If we allow the degree of the quotient polynomial to be arbitrary, then we will need a vector of wire values #[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize, Hash)] +#[cfg_attr(feature = "arb", derive(proptest_derive::Arbitrary))] pub struct Expression { // To avoid having to create intermediate variables pre-optimization // We collect all of the multiplication terms in the assert-zero opcode diff --git a/noir/noir-repo/acvm-repo/acir/src/native_types/witness.rs b/noir/noir-repo/acvm-repo/acir/src/native_types/witness.rs index a570968f948..d97702174b8 100644 --- a/noir/noir-repo/acvm-repo/acir/src/native_types/witness.rs +++ b/noir/noir-repo/acvm-repo/acir/src/native_types/witness.rs @@ -4,6 +4,7 @@ use serde::{Deserialize, Serialize}; #[derive( Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, Default, Serialize, Deserialize, )] +#[cfg_attr(feature = "arb", derive(proptest_derive::Arbitrary))] pub struct Witness(pub u32); impl Witness { diff --git a/noir/noir-repo/acvm-repo/acir/src/native_types/witness_map.rs b/noir/noir-repo/acvm-repo/acir/src/native_types/witness_map.rs index 77745c714a3..cbaef49049f 100644 --- a/noir/noir-repo/acvm-repo/acir/src/native_types/witness_map.rs +++ b/noir/noir-repo/acvm-repo/acir/src/native_types/witness_map.rs @@ -4,18 +4,24 @@ use std::{ ops::Index, }; +use acir_field::AcirField; use flate2::Compression; use flate2::bufread::GzDecoder; use flate2::bufread::GzEncoder; +use noir_protobuf::ProtoCodec as _; use serde::{Deserialize, Serialize}; use thiserror::Error; -use crate::native_types::Witness; +use crate::{native_types::Witness, proto::convert::ProtoSchema}; #[derive(Debug, Error)] enum SerializationError { #[error(transparent)] Deflate(#[from] std::io::Error), + + #[allow(dead_code)] + #[error("error deserializing witness map: {0}")] + Deserialize(String), } #[derive(Debug, Error)] @@ -24,6 +30,7 @@ pub struct WitnessMapError(#[from] SerializationError); /// A map from the witnesses in a constraint system to the field element values #[derive(Clone, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, Default, Serialize, Deserialize)] +#[cfg_attr(feature = "arb", derive(proptest_derive::Arbitrary))] pub struct WitnessMap(BTreeMap); impl WitnessMap { @@ -77,26 +84,50 @@ impl From> for WitnessMap { } } -impl TryFrom> for Vec { +impl WitnessMap { + pub(crate) fn bincode_serialize(&self) -> Result, WitnessMapError> { + bincode::serialize(self).map_err(|e| SerializationError::Deserialize(e.to_string()).into()) + } +} + +impl Deserialize<'a>> WitnessMap { + pub(crate) fn bincode_deserialize(buf: &[u8]) -> Result { + bincode::deserialize(buf).map_err(|e| SerializationError::Deserialize(e.to_string()).into()) + } +} + +#[allow(dead_code)] +impl WitnessMap { + pub(crate) fn proto_serialize(&self) -> Vec { + ProtoSchema::::serialize_to_vec(self) + } + + pub(crate) fn proto_deserialize(buf: &[u8]) -> Result { + ProtoSchema::::deserialize_from_vec(buf) + .map_err(|e| SerializationError::Deserialize(e.to_string()).into()) + } +} + +impl TryFrom> for Vec { type Error = WitnessMapError; fn try_from(val: WitnessMap) -> Result { - let buf = bincode::serialize(&val).unwrap(); + let buf = val.bincode_serialize()?; let mut deflater = GzEncoder::new(buf.as_slice(), Compression::best()); - let mut buf_c = Vec::new(); - deflater.read_to_end(&mut buf_c).map_err(|err| WitnessMapError(err.into()))?; - Ok(buf_c) + let mut buf = Vec::new(); + deflater.read_to_end(&mut buf).map_err(|err| WitnessMapError(err.into()))?; + Ok(buf) } } -impl Deserialize<'a>> TryFrom<&[u8]> for WitnessMap { +impl Deserialize<'a>> TryFrom<&[u8]> for WitnessMap { type Error = WitnessMapError; fn try_from(bytes: &[u8]) -> Result { let mut deflater = GzDecoder::new(bytes); - let mut buf_d = Vec::new(); - deflater.read_to_end(&mut buf_d).map_err(|err| WitnessMapError(err.into()))?; - let witness_map = bincode::deserialize(&buf_d).unwrap(); - Ok(Self(witness_map)) + let mut buf = Vec::new(); + deflater.read_to_end(&mut buf).map_err(|err| WitnessMapError(err.into()))?; + let witness_map = Self::bincode_deserialize(&buf)?; + Ok(witness_map) } } diff --git a/noir/noir-repo/acvm-repo/acir/src/native_types/witness_stack.rs b/noir/noir-repo/acvm-repo/acir/src/native_types/witness_stack.rs index d048e050995..d8c0eb14773 100644 --- a/noir/noir-repo/acvm-repo/acir/src/native_types/witness_stack.rs +++ b/noir/noir-repo/acvm-repo/acir/src/native_types/witness_stack.rs @@ -1,17 +1,28 @@ use std::io::Read; +use acir_field::AcirField; use flate2::Compression; use flate2::bufread::GzDecoder; use flate2::bufread::GzEncoder; +use noir_protobuf::ProtoCodec as _; use serde::{Deserialize, Serialize}; use thiserror::Error; +use crate::proto::convert::ProtoSchema; + use super::WitnessMap; #[derive(Debug, Error)] enum SerializationError { #[error(transparent)] Deflate(#[from] std::io::Error), + + #[error(transparent)] + BincodeError(#[from] bincode::Error), + + #[allow(dead_code)] + #[error("error deserializing witness stack: {0}")] + Deserialize(String), } #[derive(Debug, Error)] @@ -20,11 +31,13 @@ pub struct WitnessStackError(#[from] SerializationError); /// An ordered set of witness maps for separate circuits #[derive(Clone, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, Default, Serialize, Deserialize)] +#[cfg_attr(feature = "arb", derive(proptest_derive::Arbitrary))] pub struct WitnessStack { stack: Vec>, } #[derive(Clone, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, Default, Serialize, Deserialize)] +#[cfg_attr(feature = "arb", derive(proptest_derive::Arbitrary))] pub struct StackItem { /// Index into a [crate::circuit::Program] function list for which we have an associated witness pub index: u32, @@ -57,11 +70,35 @@ impl From> for WitnessStack { } } -impl TryFrom> for Vec { +impl WitnessStack { + pub(crate) fn bincode_serialize(&self) -> Result, WitnessStackError> { + bincode::serialize(self).map_err(|e| WitnessStackError(e.into())) + } +} + +impl Deserialize<'a>> WitnessStack { + pub(crate) fn bincode_deserialize(buf: &[u8]) -> Result { + bincode::deserialize(buf).map_err(|e| WitnessStackError(e.into())) + } +} + +#[allow(dead_code)] +impl WitnessStack { + pub(crate) fn proto_serialize(&self) -> Vec { + ProtoSchema::::serialize_to_vec(self) + } + + pub(crate) fn proto_deserialize(buf: &[u8]) -> Result { + ProtoSchema::::deserialize_from_vec(buf) + .map_err(|e| SerializationError::Deserialize(e.to_string()).into()) + } +} + +impl TryFrom<&WitnessStack> for Vec { type Error = WitnessStackError; - fn try_from(val: WitnessStack) -> Result { - let buf = bincode::serialize(&val).unwrap(); + fn try_from(val: &WitnessStack) -> Result { + let buf = val.bincode_serialize()?; let mut deflater = GzEncoder::new(buf.as_slice(), Compression::best()); let mut buf_c = Vec::new(); deflater.read_to_end(&mut buf_c).map_err(|err| WitnessStackError(err.into()))?; @@ -69,14 +106,22 @@ impl TryFrom> for Vec { } } -impl Deserialize<'a>> TryFrom<&[u8]> for WitnessStack { +impl TryFrom> for Vec { + type Error = WitnessStackError; + + fn try_from(val: WitnessStack) -> Result { + Self::try_from(&val) + } +} + +impl Deserialize<'a>> TryFrom<&[u8]> for WitnessStack { type Error = WitnessStackError; fn try_from(bytes: &[u8]) -> Result { let mut deflater = GzDecoder::new(bytes); - let mut buf_d = Vec::new(); - deflater.read_to_end(&mut buf_d).map_err(|err| WitnessStackError(err.into()))?; - let witness_stack = bincode::deserialize(&buf_d).unwrap(); + let mut buf = Vec::new(); + deflater.read_to_end(&mut buf).map_err(|err| WitnessStackError(err.into()))?; + let witness_stack = Self::bincode_deserialize(&buf)?; Ok(witness_stack) } } diff --git a/noir/noir-repo/acvm-repo/acir/src/proto/acir/circuit.proto b/noir/noir-repo/acvm-repo/acir/src/proto/acir/circuit.proto new file mode 100644 index 00000000000..c0981d6e30c --- /dev/null +++ b/noir/noir-repo/acvm-repo/acir/src/proto/acir/circuit.proto @@ -0,0 +1,255 @@ +syntax = "proto3"; + +package acvm.acir.circuit; + +import "acir/native.proto"; + +message Circuit { + uint32 current_witness_index = 1; + repeated Opcode opcodes = 2; + ExpressionWidth expression_width = 3; + repeated native.Witness private_parameters = 4; + repeated native.Witness public_parameters = 5; + repeated native.Witness return_values = 6; + repeated AssertMessage assert_messages = 7; +} + +message ExpressionWidth { + oneof value { + Unbounded unbounded = 1; + Bounded bounded = 2; + } + message Unbounded {} + message Bounded { uint64 width = 1; } +} + +message AssertMessage { + OpcodeLocation location = 1; + AssertionPayload payload = 2; +} + +message OpcodeLocation { + oneof value { + uint64 acir = 1; + BrilligLocation brillig = 2; + } + message BrilligLocation { + uint64 acir_index = 1; + uint64 brillig_index = 2; + } +} + +message AssertionPayload { + uint64 error_selector = 1; + repeated ExpressionOrMemory payload = 2; +} + +message ExpressionOrMemory { + oneof value { + native.Expression expression = 1; + uint32 memory = 2; + } +} + +message Opcode { + oneof value { + native.Expression assert_zero = 1; + BlackBoxFuncCall blackbox_func_call = 2; + MemoryOp memory_op = 3; + MemoryInit memory_init = 4; + BrilligCall brillig_call = 5; + Call call = 6; + } + message MemoryOp { + uint32 block_id = 1; + MemOp op = 2; + optional native.Expression predicate = 3; + } + message MemoryInit { + uint32 block_id = 1; + repeated native.Witness init = 2; + BlockType block_type = 3; + } + message BrilligCall { + uint32 id = 1; + repeated BrilligInputs inputs = 2; + repeated BrilligOutputs outputs = 3; + optional native.Expression predicate = 4; + } + message Call { + uint32 id = 1; + repeated native.Witness inputs = 2; + repeated native.Witness outputs = 3; + optional native.Expression predicate = 4; + } +} + +message BlackBoxFuncCall { + oneof value { + AES128Encrypt aes128_encrypt = 1; + AND and = 2; + XOR xor = 3; + RANGE range = 4; + Blake2s blake2s = 5; + Blake3 blake3 = 6; + EcdsaSecp256k1 ecdsa_secp256k1 = 7; + EcdsaSecp256r1 ecdsa_secp256r1 = 8; + MultiScalarMul multi_scalar_mul = 9; + EmbeddedCurveAdd embedded_curve_add = 10; + Keccakf1600 keccak_f1600 = 11; + RecursiveAggregation recursive_aggregation = 12; + BigIntAdd big_int_add = 13; + BigIntSub big_int_sub = 14; + BigIntMul big_int_mul = 15; + BigIntDiv big_int_div = 16; + BigIntFromLeBytes big_int_from_le_bytes = 17; + BigIntToLeBytes big_int_to_le_bytes = 18; + Poseidon2Permutation poseidon2_permutation = 19; + Sha256Compression sha256_compression = 20; + } + message AES128Encrypt { + repeated FunctionInput inputs = 1; + repeated FunctionInput iv = 2; + repeated FunctionInput key = 3; + repeated native.Witness outputs = 4; + } + message AND { + FunctionInput lhs = 1; + FunctionInput rhs = 2; + native.Witness output = 3; + } + message XOR { + FunctionInput lhs = 1; + FunctionInput rhs = 2; + native.Witness output = 3; + } + message RANGE { FunctionInput input = 1; } + message Blake2s { + repeated FunctionInput inputs = 1; + repeated native.Witness outputs = 2; + } + message Blake3 { + repeated FunctionInput inputs = 1; + repeated native.Witness outputs = 2; + } + message EcdsaSecp256k1 { + repeated FunctionInput public_key_x = 1; + repeated FunctionInput public_key_y = 2; + repeated FunctionInput signature = 3; + repeated FunctionInput hashed_message = 4; + native.Witness output = 5; + } + message EcdsaSecp256r1 { + repeated FunctionInput public_key_x = 1; + repeated FunctionInput public_key_y = 2; + repeated FunctionInput signature = 3; + repeated FunctionInput hashed_message = 4; + native.Witness output = 5; + } + message MultiScalarMul { + repeated FunctionInput points = 1; + repeated FunctionInput scalars = 2; + repeated native.Witness outputs = 3; + } + message EmbeddedCurveAdd { + repeated FunctionInput input1 = 1; + repeated FunctionInput input2 = 2; + repeated native.Witness outputs = 3; + } + message Keccakf1600 { + repeated FunctionInput inputs = 1; + repeated native.Witness outputs = 2; + } + message RecursiveAggregation { + repeated FunctionInput verification_key = 1; + repeated FunctionInput proof = 2; + repeated FunctionInput public_inputs = 3; + FunctionInput key_hash = 4; + uint32 proof_type = 5; + } + message BigIntAdd { + uint32 lhs = 1; + uint32 rhs = 2; + uint32 output = 3; + } + message BigIntSub { + uint32 lhs = 1; + uint32 rhs = 2; + uint32 output = 3; + } + message BigIntMul { + uint32 lhs = 1; + uint32 rhs = 2; + uint32 output = 3; + } + message BigIntDiv { + uint32 lhs = 1; + uint32 rhs = 2; + uint32 output = 3; + } + message BigIntFromLeBytes { + repeated FunctionInput inputs = 1; + bytes modulus = 2; + uint32 output = 3; + } + message BigIntToLeBytes { + uint32 input = 1; + repeated native.Witness outputs = 2; + } + message Poseidon2Permutation { + repeated FunctionInput inputs = 1; + repeated native.Witness outputs = 2; + uint32 len = 3; + } + message Sha256Compression { + repeated FunctionInput inputs = 1; + repeated FunctionInput hash_values = 2; + repeated native.Witness outputs = 3; + } +} + +message FunctionInput { + ConstantOrWitnessEnum input = 1; + uint32 num_bits = 2; +} + +message ConstantOrWitnessEnum { + oneof value { + native.Field constant = 1; + native.Witness witness = 2; + } +} + +message MemOp { + native.Expression operation = 1; + native.Expression index = 2; + native.Expression value = 3; +} + +message BlockType { + oneof value { + Memory memory = 1; + CallData call_data = 2; + ReturnData return_data = 3; + } + message Memory {} + message CallData { uint32 value = 1; } + message ReturnData {} +} + +message BrilligInputs { + oneof value { + native.Expression single = 1; + Array array = 2; + uint32 memory_array = 3; + } + message Array { repeated native.Expression values = 2; } +} + +message BrilligOutputs { + oneof value { + native.Witness simple = 1; + Array array = 2; + } + message Array { repeated native.Witness values = 1; } +} diff --git a/noir/noir-repo/acvm-repo/acir/src/proto/acir/native.proto b/noir/noir-repo/acvm-repo/acir/src/proto/acir/native.proto new file mode 100644 index 00000000000..561a79c5701 --- /dev/null +++ b/noir/noir-repo/acvm-repo/acir/src/proto/acir/native.proto @@ -0,0 +1,24 @@ +syntax = "proto3"; + +package acvm.acir.native; + +message Field { bytes value = 1; } + +message Witness { uint32 index = 1; } + +message Expression { + repeated MulTerm mul_terms = 1; + repeated LinearCombination linear_combinations = 2; + Field q_c = 3; + + message MulTerm { + Field q_m = 1; + Witness witness_left = 2; + Witness witness_right = 3; + } + + message LinearCombination { + Field q_l = 1; + Witness witness = 2; + } +} \ No newline at end of file diff --git a/noir/noir-repo/acvm-repo/acir/src/proto/acir/program.proto b/noir/noir-repo/acvm-repo/acir/src/proto/acir/program.proto new file mode 100644 index 00000000000..0032966d0cb --- /dev/null +++ b/noir/noir-repo/acvm-repo/acir/src/proto/acir/program.proto @@ -0,0 +1,17 @@ +syntax = "proto3"; + +package acvm.acir.program; + +// Only including the ACIR types for Berratenberg, not Brillig. +import "acir/circuit.proto"; + +// Same as the top level `program.proto` but ignores the +// `unconstrained_functions` field, so that Berratenberg doesn't need to +// deserialize it. +message Program { + // ACIR circuits + repeated acvm.acir.circuit.Circuit functions = 1; + + reserved 2; + reserved "unconstrained_functions"; +} \ No newline at end of file diff --git a/noir/noir-repo/acvm-repo/acir/src/proto/acir/witness.proto b/noir/noir-repo/acvm-repo/acir/src/proto/acir/witness.proto new file mode 100644 index 00000000000..bb6a29341ee --- /dev/null +++ b/noir/noir-repo/acvm-repo/acir/src/proto/acir/witness.proto @@ -0,0 +1,23 @@ +syntax = "proto3"; + +package acvm.acir.witness; + +import "acir/native.proto"; + +message WitnessMap { + repeated WitnessValue values = 1; + + message WitnessValue { + native.Witness witness = 1; + native.Field field = 2; + } +} + +message WitnessStack { + repeated StackItem stack = 1; + + message StackItem { + uint32 index = 1; + WitnessMap witness = 2; + } +} \ No newline at end of file diff --git a/noir/noir-repo/acvm-repo/acir/src/proto/brillig.proto b/noir/noir-repo/acvm-repo/acir/src/proto/brillig.proto new file mode 100644 index 00000000000..cba8c797889 --- /dev/null +++ b/noir/noir-repo/acvm-repo/acir/src/proto/brillig.proto @@ -0,0 +1,308 @@ +syntax = "proto3"; + +package acvm.brillig; + +import "acir/native.proto"; + +message BrilligBytecode { repeated BrilligOpcode bytecode = 1; } + +message BrilligOpcode { + oneof value { + BinaryFieldOp binary_field_op = 1; + BinaryIntOp binary_int_op = 2; + Not not = 3; + Cast cast = 4; + JumpIfNot jump_if_not = 5; + JumpIf jump_if = 6; + Jump jump = 7; + CalldataCopy calldata_copy = 8; + Call call = 9; + Const const = 10; + IndirectConst indirect_const = 11; + Return return = 12; + ForeignCall foreign_call = 13; + Mov mov = 14; + ConditionalMov conditional_mov = 15; + Load load = 16; + Store store = 17; + BlackBox black_box = 18; + Trap trap = 19; + Stop stop = 20; + } + message BinaryFieldOp { + MemoryAddress destination = 1; + BinaryFieldOpKind op = 2; + MemoryAddress lhs = 3; + MemoryAddress rhs = 4; + } + message BinaryIntOp { + MemoryAddress destination = 1; + BinaryIntOpKind op = 2; + IntegerBitSize bit_size = 3; + MemoryAddress lhs = 4; + MemoryAddress rhs = 5; + } + message Not { + MemoryAddress destination = 1; + MemoryAddress source = 2; + IntegerBitSize bit_size = 3; + } + message Cast { + MemoryAddress destination = 1; + MemoryAddress source = 2; + BitSize bit_size = 3; + } + message JumpIfNot { + MemoryAddress condition = 1; + uint64 location = 2; + } + message JumpIf { + MemoryAddress condition = 1; + uint64 location = 2; + } + message Jump { uint64 location = 1; } + message CalldataCopy { + MemoryAddress destination_address = 1; + MemoryAddress size_address = 2; + MemoryAddress offset_address = 3; + } + message Call { uint64 location = 1; } + message Const { + MemoryAddress destination = 1; + BitSize bit_size = 2; + acir.native.Field value = 3; + } + message IndirectConst { + MemoryAddress destination_pointer = 1; + BitSize bit_size = 2; + acir.native.Field value = 3; + } + message Return {} + message ForeignCall { + string function = 1; + repeated ValueOrArray destinations = 2; + repeated HeapValueType destination_value_types = 3; + repeated ValueOrArray inputs = 4; + repeated HeapValueType input_value_types = 5; + } + message Mov { + MemoryAddress destination = 1; + MemoryAddress source = 2; + } + message ConditionalMov { + MemoryAddress destination = 1; + MemoryAddress source_a = 2; + MemoryAddress source_b = 3; + MemoryAddress condition = 4; + } + message Load { + MemoryAddress destination = 1; + MemoryAddress source_pointer = 2; + } + message Store { + MemoryAddress destination_pointer = 1; + MemoryAddress source = 2; + } + message BlackBox { BlackBoxOp op = 1; } + message Trap { HeapVector revert_data = 1; } + message Stop { HeapVector return_data = 1; } +} + +message MemoryAddress { + oneof value { + uint64 direct = 1; + uint64 relative = 2; + } +} + +message ValueOrArray { + oneof value { + MemoryAddress memory_address = 1; + HeapArray heap_array = 2; + HeapVector heap_vector = 3; + } +} + +message HeapArray { + MemoryAddress pointer = 1; + uint64 size = 2; +} + +message HeapVector { + MemoryAddress pointer = 1; + MemoryAddress size = 2; +} + +message HeapValueType { + oneof value { + BitSize simple = 1; + Array array = 2; + Vector vector = 3; + } + message Array { + repeated HeapValueType value_types = 1; + uint64 size = 2; + } + message Vector { repeated HeapValueType value_types = 1; } +} + +enum BinaryFieldOpKind { + BFO_UNSPECIFIED = 0; + BFO_ADD = 1; + BFO_SUB = 2; + BFO_MUL = 3; + BFO_DIV = 4; + BFO_INTEGER_DIV = 5; + BFO_EQUALS = 6; + BFO_LESS_THAN = 7; + BFO_LESS_THAN_EQUALS = 8; +} + +enum BinaryIntOpKind { + BIO_UNSPECIFIED = 0; + BIO_ADD = 1; + BIO_SUB = 2; + BIO_MUL = 3; + BIO_DIV = 4; + BIO_EQUALS = 5; + BIO_LESS_THAN = 6; + BIO_LESS_THAN_EQUALS = 7; + BIO_AND = 8; + BIO_OR = 9; + BIO_XOR = 10; + BIO_SHL = 11; + BIO_SHR = 12; +} + +enum IntegerBitSize { + IBS_UNSPECIFIED = 0; + IBS_U1 = 1; + IBS_U8 = 8; + IBS_U16 = 16; + IBS_U32 = 32; + IBS_U64 = 64; + IBS_U128 = 128; +} + +message BitSize { + oneof value { + Field field = 1; + IntegerBitSize integer = 2; + } + message Field {} +} + +message BlackBoxOp { + oneof value { + AES128Encrypt aes128_encrypt = 1; + Blake2s blake2s = 2; + Blake3 blake3 = 3; + Keccakf1600 keccak_f1600 = 4; + EcdsaSecp256k1 ecdsa_secp256k1 = 5; + EcdsaSecp256r1 ecdsa_secp256r1 = 6; + MultiScalarMul multi_scalar_mul = 7; + EmbeddedCurveAdd embedded_curve_add = 8; + BigIntAdd big_int_add = 9; + BigIntSub big_int_sub = 10; + BigIntMul big_int_mul = 11; + BigIntDiv big_int_div = 12; + BigIntFromLeBytes big_int_from_le_bytes = 13; + BigIntToLeBytes big_int_to_le_bytes = 14; + Poseidon2Permutation poseidon2_permutation = 15; + Sha256Compression sha256_compression = 16; + ToRadix to_radix = 17; + } + message AES128Encrypt { + HeapVector inputs = 1; + HeapArray iv = 2; + HeapArray key = 3; + HeapVector outputs = 4; + } + message Blake2s { + HeapVector message = 1; + HeapArray output = 2; + } + message Blake3 { + HeapVector message = 1; + HeapArray output = 2; + } + message Keccakf1600 { + HeapArray input = 1; + HeapArray output = 2; + } + message EcdsaSecp256k1 { + HeapVector hashed_msg = 1; + HeapArray public_key_x = 2; + HeapArray public_key_y = 3; + HeapArray signature = 4; + MemoryAddress result = 5; + } + message EcdsaSecp256r1 { + HeapVector hashed_msg = 1; + HeapArray public_key_x = 2; + HeapArray public_key_y = 3; + HeapArray signature = 4; + MemoryAddress result = 5; + } + + message MultiScalarMul { + HeapVector points = 1; + HeapVector scalars = 2; + HeapArray outputs = 3; + } + message EmbeddedCurveAdd { + MemoryAddress input1_x = 1; + MemoryAddress input1_y = 2; + MemoryAddress input1_infinite = 3; + MemoryAddress input2_x = 4; + MemoryAddress input2_y = 5; + MemoryAddress input2_infinite = 6; + HeapArray result = 7; + } + message BigIntAdd { + MemoryAddress lhs = 1; + MemoryAddress rhs = 2; + MemoryAddress output = 3; + } + message BigIntSub { + MemoryAddress lhs = 1; + MemoryAddress rhs = 2; + MemoryAddress output = 3; + } + message BigIntMul { + MemoryAddress lhs = 1; + MemoryAddress rhs = 2; + MemoryAddress output = 3; + } + message BigIntDiv { + MemoryAddress lhs = 1; + MemoryAddress rhs = 2; + MemoryAddress output = 3; + } + message BigIntFromLeBytes { + HeapVector inputs = 1; + HeapVector modulus = 2; + MemoryAddress output = 3; + } + message BigIntToLeBytes { + MemoryAddress input = 1; + HeapVector output = 2; + } + message Poseidon2Permutation { + HeapVector message = 1; + HeapArray output = 2; + MemoryAddress len = 3; + } + message Sha256Compression { + HeapArray input = 1; + HeapArray hash_values = 2; + HeapArray output = 3; + } + message ToRadix { + MemoryAddress input = 1; + MemoryAddress radix = 2; + MemoryAddress output_pointer = 3; + MemoryAddress num_limbs = 4; + MemoryAddress output_bits = 5; + } +} \ No newline at end of file diff --git a/noir/noir-repo/acvm-repo/acir/src/proto/convert/acir.rs b/noir/noir-repo/acvm-repo/acir/src/proto/convert/acir.rs new file mode 100644 index 00000000000..1a6d9169b30 --- /dev/null +++ b/noir/noir-repo/acvm-repo/acir/src/proto/convert/acir.rs @@ -0,0 +1,682 @@ +use crate::{ + circuit::{ + self, + brillig::BrilligFunctionId, + opcodes::{self, AcirFunctionId, BlockId}, + }, + proto::acir::circuit::{ + AssertMessage, AssertionPayload, BlackBoxFuncCall, BlockType, BrilligInputs, + BrilligOutputs, Circuit, ConstantOrWitnessEnum, ExpressionOrMemory, ExpressionWidth, + FunctionInput, MemOp, Opcode, OpcodeLocation, + }, +}; +use acir_field::AcirField; +use color_eyre::eyre::{self, Context}; +use noir_protobuf::{ProtoCodec, decode_oneof_map}; + +use super::ProtoSchema; + +impl ProtoCodec, Circuit> for ProtoSchema +where + F: AcirField, +{ + fn encode(value: &circuit::Circuit) -> Circuit { + Circuit { + current_witness_index: value.current_witness_index, + opcodes: Self::encode_vec(&value.opcodes), + expression_width: Self::encode_some(&value.expression_width), + private_parameters: Self::encode_vec(value.private_parameters.iter()), + public_parameters: Self::encode_vec(value.public_parameters.0.iter()), + return_values: Self::encode_vec(value.return_values.0.iter()), + assert_messages: Self::encode_vec(&value.assert_messages), + } + } + + fn decode(value: &Circuit) -> eyre::Result> { + Ok(circuit::Circuit { + current_witness_index: value.current_witness_index, + opcodes: Self::decode_vec_wrap(&value.opcodes, "opcodes")?, + expression_width: Self::decode_some_wrap(&value.expression_width, "expression_width")?, + private_parameters: Self::decode_vec_wrap( + &value.private_parameters, + "private_parameters", + )? + .into_iter() + .collect(), + public_parameters: circuit::PublicInputs( + Self::decode_vec_wrap(&value.public_parameters, "public_parameters")? + .into_iter() + .collect(), + ), + return_values: circuit::PublicInputs( + Self::decode_vec_wrap(&value.return_values, "return_values")?.into_iter().collect(), + ), + assert_messages: Self::decode_vec_wrap(&value.assert_messages, "assert_messages")?, + }) + } +} + +impl ProtoCodec for ProtoSchema { + fn encode(value: &circuit::ExpressionWidth) -> ExpressionWidth { + use crate::proto::acir::circuit::expression_width::*; + let value = match value { + circuit::ExpressionWidth::Unbounded => Value::Unbounded(Unbounded {}), + circuit::ExpressionWidth::Bounded { width } => { + Value::Bounded(Bounded { width: Self::encode(width) }) + } + }; + ExpressionWidth { value: Some(value) } + } + + fn decode(value: &ExpressionWidth) -> eyre::Result { + use crate::proto::acir::circuit::expression_width::*; + decode_oneof_map(&value.value, |value| match value { + Value::Unbounded(_) => Ok(circuit::ExpressionWidth::Unbounded), + Value::Bounded(v) => Ok(circuit::ExpressionWidth::Bounded { + width: Self::decode_wrap(&v.width, "width")?, + }), + }) + } +} + +impl ProtoCodec<(circuit::OpcodeLocation, circuit::AssertionPayload), AssertMessage> + for ProtoSchema +where + F: AcirField, +{ + fn encode(value: &(circuit::OpcodeLocation, circuit::AssertionPayload)) -> AssertMessage { + AssertMessage { + location: Self::encode_some(&value.0), + payload: Self::encode_some(&value.1), + } + } + + fn decode( + value: &AssertMessage, + ) -> eyre::Result<(circuit::OpcodeLocation, circuit::AssertionPayload)> { + let location = Self::decode_some_wrap(&value.location, "location")?; + let payload = Self::decode_some_wrap(&value.payload, "payload")?; + Ok((location, payload)) + } +} + +impl ProtoCodec for ProtoSchema { + fn encode(value: &circuit::OpcodeLocation) -> OpcodeLocation { + use crate::proto::acir::circuit::opcode_location::*; + let value = match value { + circuit::OpcodeLocation::Acir(size) => Value::Acir(Self::encode(size)), + circuit::OpcodeLocation::Brillig { acir_index, brillig_index } => { + Value::Brillig(BrilligLocation { + acir_index: Self::encode(acir_index), + brillig_index: Self::encode(brillig_index), + }) + } + }; + OpcodeLocation { value: Some(value) } + } + + fn decode(value: &OpcodeLocation) -> eyre::Result { + use crate::proto::acir::circuit::opcode_location::*; + decode_oneof_map(&value.value, |value| match value { + Value::Acir(location) => { + Ok(circuit::OpcodeLocation::Acir(Self::decode_wrap(location, "location")?)) + } + Value::Brillig(location) => Ok(circuit::OpcodeLocation::Brillig { + acir_index: Self::decode_wrap(&location.acir_index, "acir_index")?, + brillig_index: Self::decode_wrap(&location.brillig_index, "brillig_index")?, + }), + }) + } +} + +impl ProtoCodec, AssertionPayload> for ProtoSchema +where + F: AcirField, +{ + fn encode(value: &circuit::AssertionPayload) -> AssertionPayload { + AssertionPayload { + error_selector: value.error_selector, + payload: Self::encode_vec(&value.payload), + } + } + + fn decode(value: &AssertionPayload) -> eyre::Result> { + Ok(circuit::AssertionPayload { + error_selector: value.error_selector, + payload: Self::decode_vec_wrap(&value.payload, "payload")?, + }) + } +} + +impl ProtoCodec, ExpressionOrMemory> for ProtoSchema +where + F: AcirField, +{ + fn encode(value: &circuit::ExpressionOrMemory) -> ExpressionOrMemory { + use crate::proto::acir::circuit::expression_or_memory::*; + let value = match value { + circuit::ExpressionOrMemory::Expression(expression) => { + Value::Expression(Self::encode(expression)) + } + circuit::ExpressionOrMemory::Memory(block_id) => Value::Memory(block_id.0), + }; + ExpressionOrMemory { value: Some(value) } + } + + fn decode(value: &ExpressionOrMemory) -> eyre::Result> { + use crate::proto::acir::circuit::expression_or_memory::*; + decode_oneof_map(&value.value, |value| match value { + Value::Expression(expression) => Ok(circuit::ExpressionOrMemory::Expression( + Self::decode_wrap(expression, "expression")?, + )), + Value::Memory(id) => Ok(circuit::ExpressionOrMemory::Memory(BlockId(*id))), + }) + } +} + +impl ProtoCodec, Opcode> for ProtoSchema +where + F: AcirField, +{ + fn encode(value: &circuit::Opcode) -> Opcode { + use crate::proto::acir::circuit::opcode::*; + let value = match value { + circuit::Opcode::AssertZero(expression) => Value::AssertZero(Self::encode(expression)), + circuit::Opcode::BlackBoxFuncCall(black_box_func_call) => { + Value::BlackboxFuncCall(Self::encode(black_box_func_call)) + } + circuit::Opcode::MemoryOp { block_id, op, predicate } => Value::MemoryOp(MemoryOp { + block_id: block_id.0, + op: Self::encode_some(op), + predicate: predicate.as_ref().map(Self::encode), + }), + circuit::Opcode::MemoryInit { block_id, init, block_type } => { + Value::MemoryInit(MemoryInit { + block_id: block_id.0, + init: Self::encode_vec(init), + block_type: Self::encode_some(block_type), + }) + } + circuit::Opcode::BrilligCall { id, inputs, outputs, predicate } => { + Value::BrilligCall(BrilligCall { + id: id.0, + inputs: Self::encode_vec(inputs), + outputs: Self::encode_vec(outputs), + predicate: predicate.as_ref().map(Self::encode), + }) + } + circuit::Opcode::Call { id, inputs, outputs, predicate } => Value::Call(Call { + id: id.0, + inputs: Self::encode_vec(inputs), + outputs: Self::encode_vec(outputs), + predicate: predicate.as_ref().map(Self::encode), + }), + }; + Opcode { value: Some(value) } + } + + fn decode(value: &Opcode) -> eyre::Result> { + use crate::proto::acir::circuit::opcode::*; + decode_oneof_map(&value.value, |value| match value { + Value::AssertZero(expression) => { + Ok(circuit::Opcode::AssertZero(Self::decode_wrap(expression, "assert_zero")?)) + } + Value::BlackboxFuncCall(black_box_func_call) => Ok(circuit::Opcode::BlackBoxFuncCall( + Self::decode_wrap(black_box_func_call, "blackbox_func_call")?, + )), + Value::MemoryOp(memory_op) => Ok(circuit::Opcode::MemoryOp { + block_id: BlockId(memory_op.block_id), + op: Self::decode_some_wrap(&memory_op.op, "op")?, + predicate: Self::decode_opt_wrap(&memory_op.predicate, "predicate")?, + }), + Value::MemoryInit(memory_init) => Ok(circuit::Opcode::MemoryInit { + block_id: BlockId(memory_init.block_id), + init: Self::decode_vec_wrap(&memory_init.init, "init")?, + block_type: Self::decode_some_wrap(&memory_init.block_type, "block_type")?, + }), + Value::BrilligCall(brillig_call) => Ok(circuit::Opcode::BrilligCall { + id: BrilligFunctionId(brillig_call.id), + inputs: Self::decode_vec_wrap(&brillig_call.inputs, "inputs")?, + outputs: Self::decode_vec_wrap(&brillig_call.outputs, "outputs")?, + predicate: Self::decode_opt_wrap(&brillig_call.predicate, "predicate")?, + }), + Value::Call(call) => Ok(circuit::Opcode::Call { + id: AcirFunctionId(call.id), + inputs: Self::decode_vec_wrap(&call.inputs, "inputs")?, + outputs: Self::decode_vec_wrap(&call.outputs, "outputs")?, + predicate: Self::decode_opt_wrap(&call.predicate, "predicate")?, + }), + }) + } +} + +impl ProtoCodec, MemOp> for ProtoSchema +where + F: AcirField, +{ + fn encode(value: &opcodes::MemOp) -> MemOp { + MemOp { + operation: Self::encode_some(&value.operation), + index: Self::encode_some(&value.index), + value: Self::encode_some(&value.value), + } + } + + fn decode(value: &MemOp) -> eyre::Result> { + Ok(opcodes::MemOp { + operation: Self::decode_some_wrap(&value.operation, "operation")?, + index: Self::decode_some_wrap(&value.index, "index")?, + value: Self::decode_some_wrap(&value.value, "value")?, + }) + } +} + +impl ProtoCodec, BlackBoxFuncCall> for ProtoSchema +where + F: AcirField, +{ + fn encode(value: &opcodes::BlackBoxFuncCall) -> BlackBoxFuncCall { + use crate::proto::acir::circuit::black_box_func_call::*; + let value = match value { + opcodes::BlackBoxFuncCall::AES128Encrypt { inputs, iv, key, outputs } => { + Value::Aes128Encrypt(Aes128Encrypt { + inputs: Self::encode_vec(inputs), + iv: Self::encode_vec(iv.as_ref()), + key: Self::encode_vec(key.as_ref()), + outputs: Self::encode_vec(outputs), + }) + } + opcodes::BlackBoxFuncCall::AND { lhs, rhs, output } => Value::And(And { + lhs: Self::encode_some(lhs), + rhs: Self::encode_some(rhs), + output: Self::encode_some(output), + }), + opcodes::BlackBoxFuncCall::XOR { lhs, rhs, output } => Value::Xor(Xor { + lhs: Self::encode_some(lhs), + rhs: Self::encode_some(rhs), + output: Self::encode_some(output), + }), + opcodes::BlackBoxFuncCall::RANGE { input } => { + Value::Range(Range { input: Self::encode_some(input) }) + } + opcodes::BlackBoxFuncCall::Blake2s { inputs, outputs } => Value::Blake2s(Blake2s { + inputs: Self::encode_vec(inputs), + outputs: Self::encode_vec(outputs.as_ref()), + }), + opcodes::BlackBoxFuncCall::Blake3 { inputs, outputs } => Value::Blake3(Blake3 { + inputs: Self::encode_vec(inputs), + outputs: Self::encode_vec(outputs.as_ref()), + }), + opcodes::BlackBoxFuncCall::EcdsaSecp256k1 { + public_key_x, + public_key_y, + signature, + hashed_message, + output, + } => Value::EcdsaSecp256k1(EcdsaSecp256k1 { + public_key_x: Self::encode_vec(public_key_x.as_ref()), + public_key_y: Self::encode_vec(public_key_y.as_ref()), + signature: Self::encode_vec(signature.as_ref()), + hashed_message: Self::encode_vec(hashed_message.as_ref()), + output: Self::encode_some(output), + }), + opcodes::BlackBoxFuncCall::EcdsaSecp256r1 { + public_key_x, + public_key_y, + signature, + hashed_message, + output, + } => Value::EcdsaSecp256r1(EcdsaSecp256r1 { + public_key_x: Self::encode_vec(public_key_x.as_ref()), + public_key_y: Self::encode_vec(public_key_y.as_ref()), + signature: Self::encode_vec(signature.as_ref()), + hashed_message: Self::encode_vec(hashed_message.as_ref()), + output: Self::encode_some(output), + }), + opcodes::BlackBoxFuncCall::MultiScalarMul { points, scalars, outputs } => { + let (w1, w2, w3) = outputs; + Value::MultiScalarMul(MultiScalarMul { + points: Self::encode_vec(points), + scalars: Self::encode_vec(scalars), + outputs: Self::encode_vec([w1, w2, w3]), + }) + } + opcodes::BlackBoxFuncCall::EmbeddedCurveAdd { input1, input2, outputs } => { + let (w1, w2, w3) = outputs; + Value::EmbeddedCurveAdd(EmbeddedCurveAdd { + input1: Self::encode_vec(input1.as_ref()), + input2: Self::encode_vec(input2.as_ref()), + outputs: Self::encode_vec([w1, w2, w3]), + }) + } + opcodes::BlackBoxFuncCall::Keccakf1600 { inputs, outputs } => { + Value::KeccakF1600(Keccakf1600 { + inputs: Self::encode_vec(inputs.as_ref()), + outputs: Self::encode_vec(outputs.as_ref()), + }) + } + opcodes::BlackBoxFuncCall::RecursiveAggregation { + verification_key, + proof, + public_inputs, + key_hash, + proof_type, + } => Value::RecursiveAggregation(RecursiveAggregation { + verification_key: Self::encode_vec(verification_key), + proof: Self::encode_vec(proof), + public_inputs: Self::encode_vec(public_inputs), + key_hash: Self::encode_some(key_hash), + proof_type: *proof_type, + }), + opcodes::BlackBoxFuncCall::BigIntAdd { lhs, rhs, output } => { + Value::BigIntAdd(BigIntAdd { lhs: *lhs, rhs: *rhs, output: *output }) + } + opcodes::BlackBoxFuncCall::BigIntSub { lhs, rhs, output } => { + Value::BigIntSub(BigIntSub { lhs: *lhs, rhs: *rhs, output: *output }) + } + opcodes::BlackBoxFuncCall::BigIntMul { lhs, rhs, output } => { + Value::BigIntMul(BigIntMul { lhs: *lhs, rhs: *rhs, output: *output }) + } + opcodes::BlackBoxFuncCall::BigIntDiv { lhs, rhs, output } => { + Value::BigIntDiv(BigIntDiv { lhs: *lhs, rhs: *rhs, output: *output }) + } + opcodes::BlackBoxFuncCall::BigIntFromLeBytes { inputs, modulus, output } => { + Value::BigIntFromLeBytes(BigIntFromLeBytes { + inputs: Self::encode_vec(inputs), + modulus: modulus.clone(), + output: *output, + }) + } + opcodes::BlackBoxFuncCall::BigIntToLeBytes { input, outputs } => { + Value::BigIntToLeBytes(BigIntToLeBytes { + input: *input, + outputs: Self::encode_vec(outputs), + }) + } + opcodes::BlackBoxFuncCall::Poseidon2Permutation { inputs, outputs, len } => { + Value::Poseidon2Permutation(Poseidon2Permutation { + inputs: Self::encode_vec(inputs), + outputs: Self::encode_vec(outputs), + len: *len, + }) + } + opcodes::BlackBoxFuncCall::Sha256Compression { inputs, hash_values, outputs } => { + Value::Sha256Compression(Sha256Compression { + inputs: Self::encode_vec(inputs.as_ref()), + hash_values: Self::encode_vec(hash_values.as_ref()), + outputs: Self::encode_vec(outputs.as_ref()), + }) + } + }; + BlackBoxFuncCall { value: Some(value) } + } + + fn decode(value: &BlackBoxFuncCall) -> eyre::Result> { + use crate::proto::acir::circuit::black_box_func_call::*; + decode_oneof_map( + &value.value, + |value| -> Result, eyre::Error> { + match value { + Value::Aes128Encrypt(v) => Ok(opcodes::BlackBoxFuncCall::AES128Encrypt { + inputs: Self::decode_vec_wrap(&v.inputs, "inputs")?, + iv: Self::decode_box_arr_wrap(&v.iv, "iv")?, + key: Self::decode_box_arr_wrap(&v.key, "key")?, + outputs: Self::decode_vec_wrap(&v.outputs, "witness")?, + }), + Value::And(v) => Ok(opcodes::BlackBoxFuncCall::AND { + lhs: Self::decode_some_wrap(&v.lhs, "lhs")?, + rhs: Self::decode_some_wrap(&v.rhs, "rhs")?, + output: Self::decode_some_wrap(&v.output, "output")?, + }), + Value::Xor(v) => Ok(opcodes::BlackBoxFuncCall::XOR { + lhs: Self::decode_some_wrap(&v.lhs, "lhs")?, + rhs: Self::decode_some_wrap(&v.rhs, "rhs")?, + output: Self::decode_some_wrap(&v.output, "output")?, + }), + Value::Range(v) => Ok(opcodes::BlackBoxFuncCall::RANGE { + input: Self::decode_some_wrap(&v.input, "input")?, + }), + Value::Blake2s(v) => Ok(opcodes::BlackBoxFuncCall::Blake2s { + inputs: Self::decode_vec_wrap(&v.inputs, "inputs")?, + outputs: Self::decode_box_arr_wrap(&v.outputs, "outputs")?, + }), + Value::Blake3(v) => Ok(opcodes::BlackBoxFuncCall::Blake3 { + inputs: Self::decode_vec_wrap(&v.inputs, "inputs")?, + outputs: Self::decode_box_arr_wrap(&v.outputs, "outputs")?, + }), + Value::EcdsaSecp256k1(v) => Ok(opcodes::BlackBoxFuncCall::EcdsaSecp256k1 { + public_key_x: Self::decode_box_arr_wrap(&v.public_key_x, "public_key_x")?, + public_key_y: Self::decode_box_arr_wrap(&v.public_key_y, "public_key_y")?, + signature: Self::decode_box_arr_wrap(&v.signature, "signature")?, + hashed_message: Self::decode_box_arr_wrap( + &v.hashed_message, + "hashed_message", + )?, + output: Self::decode_some_wrap(&v.output, "output")?, + }), + Value::EcdsaSecp256r1(v) => Ok(opcodes::BlackBoxFuncCall::EcdsaSecp256r1 { + public_key_x: Self::decode_box_arr_wrap(&v.public_key_x, "public_key_x")?, + public_key_y: Self::decode_box_arr_wrap(&v.public_key_y, "public_key_y")?, + signature: Self::decode_box_arr_wrap(&v.signature, "signature")?, + hashed_message: Self::decode_box_arr_wrap( + &v.hashed_message, + "hashed_message", + )?, + output: Self::decode_some_wrap(&v.output, "output")?, + }), + Value::MultiScalarMul(v) => Ok(opcodes::BlackBoxFuncCall::MultiScalarMul { + points: Self::decode_vec_wrap(&v.points, "points")?, + scalars: Self::decode_vec_wrap(&v.scalars, "scalars")?, + outputs: Self::decode_arr_wrap(&v.outputs, "outputs") + .map(|[w1, w2, w3]| (w1, w2, w3))?, + }), + Value::EmbeddedCurveAdd(v) => Ok(opcodes::BlackBoxFuncCall::EmbeddedCurveAdd { + input1: Self::decode_box_arr_wrap(&v.input1, "input1")?, + input2: Self::decode_box_arr_wrap(&v.input2, "input2")?, + outputs: Self::decode_arr_wrap(&v.outputs, "outputs") + .map(|[w1, w2, w3]| (w1, w2, w3))?, + }), + Value::KeccakF1600(v) => Ok(opcodes::BlackBoxFuncCall::Keccakf1600 { + inputs: Self::decode_box_arr_wrap(&v.inputs, "inputs")?, + outputs: Self::decode_box_arr_wrap(&v.outputs, "outputs")?, + }), + Value::RecursiveAggregation(v) => { + Ok(opcodes::BlackBoxFuncCall::RecursiveAggregation { + verification_key: Self::decode_vec_wrap( + &v.verification_key, + "verification_key", + )?, + proof: Self::decode_vec_wrap(&v.proof, "proof")?, + public_inputs: Self::decode_vec_wrap( + &v.public_inputs, + "public_inputs", + )?, + key_hash: Self::decode_some_wrap(&v.key_hash, "key_hash")?, + proof_type: v.proof_type, + }) + } + Value::BigIntAdd(v) => Ok(opcodes::BlackBoxFuncCall::BigIntAdd { + lhs: v.lhs, + rhs: v.rhs, + output: v.output, + }), + Value::BigIntSub(v) => Ok(opcodes::BlackBoxFuncCall::BigIntSub { + lhs: v.lhs, + rhs: v.rhs, + output: v.output, + }), + Value::BigIntMul(v) => Ok(opcodes::BlackBoxFuncCall::BigIntMul { + lhs: v.lhs, + rhs: v.rhs, + output: v.output, + }), + Value::BigIntDiv(v) => Ok(opcodes::BlackBoxFuncCall::BigIntDiv { + lhs: v.lhs, + rhs: v.rhs, + output: v.output, + }), + Value::BigIntFromLeBytes(v) => { + Ok(opcodes::BlackBoxFuncCall::BigIntFromLeBytes { + inputs: Self::decode_vec_wrap(&v.inputs, "inputs")?, + modulus: v.modulus.clone(), + output: v.output, + }) + } + Value::BigIntToLeBytes(v) => Ok(opcodes::BlackBoxFuncCall::BigIntToLeBytes { + input: v.input, + outputs: Self::decode_vec_wrap(&v.outputs, "outputs")?, + }), + Value::Poseidon2Permutation(v) => { + Ok(opcodes::BlackBoxFuncCall::Poseidon2Permutation { + inputs: Self::decode_vec_wrap(&v.inputs, "inputs")?, + outputs: Self::decode_vec_wrap(&v.outputs, "outputs")?, + len: v.len, + }) + } + Value::Sha256Compression(v) => { + Ok(opcodes::BlackBoxFuncCall::Sha256Compression { + inputs: Self::decode_box_arr_wrap(&v.inputs, "inputs")?, + hash_values: Self::decode_box_arr_wrap(&v.hash_values, "hash_values")?, + outputs: Self::decode_box_arr_wrap(&v.outputs, "outputs")?, + }) + } + } + }, + ) + } +} + +impl ProtoCodec, FunctionInput> for ProtoSchema +where + F: AcirField, +{ + fn encode(value: &opcodes::FunctionInput) -> FunctionInput { + FunctionInput { input: Self::encode_some(value.input_ref()), num_bits: value.num_bits() } + } + + fn decode(value: &FunctionInput) -> eyre::Result> { + let input = Self::decode_some_wrap(&value.input, "input")?; + + match input { + opcodes::ConstantOrWitnessEnum::Constant(c) => { + opcodes::FunctionInput::constant(c, value.num_bits).wrap_err("constant") + } + opcodes::ConstantOrWitnessEnum::Witness(w) => { + Ok(opcodes::FunctionInput::witness(w, value.num_bits)) + } + } + } +} + +impl ProtoCodec, ConstantOrWitnessEnum> for ProtoSchema +where + F: AcirField, +{ + fn encode(value: &opcodes::ConstantOrWitnessEnum) -> ConstantOrWitnessEnum { + use crate::proto::acir::circuit::constant_or_witness_enum::*; + let value = match value { + opcodes::ConstantOrWitnessEnum::Constant(field) => Value::Constant(Self::encode(field)), + opcodes::ConstantOrWitnessEnum::Witness(witness) => { + Value::Witness(Self::encode(witness)) + } + }; + ConstantOrWitnessEnum { value: Some(value) } + } + + fn decode(value: &ConstantOrWitnessEnum) -> eyre::Result> { + use crate::proto::acir::circuit::constant_or_witness_enum::*; + decode_oneof_map(&value.value, |value| match value { + Value::Constant(field) => { + Ok(opcodes::ConstantOrWitnessEnum::Constant(Self::decode_wrap(field, "constant")?)) + } + Value::Witness(witness) => { + Ok(opcodes::ConstantOrWitnessEnum::Witness(Self::decode_wrap(witness, "witness")?)) + } + }) + } +} + +impl ProtoCodec for ProtoSchema { + fn encode(value: &opcodes::BlockType) -> BlockType { + use crate::proto::acir::circuit::block_type::*; + let value = match value { + opcodes::BlockType::Memory => Value::Memory(Memory {}), + opcodes::BlockType::CallData(value) => Value::CallData(CallData { value: *value }), + opcodes::BlockType::ReturnData => Value::ReturnData(ReturnData {}), + }; + BlockType { value: Some(value) } + } + + fn decode(value: &BlockType) -> eyre::Result { + use crate::proto::acir::circuit::block_type::*; + decode_oneof_map(&value.value, |value| match value { + Value::Memory(_) => Ok(opcodes::BlockType::Memory), + Value::CallData(v) => Ok(opcodes::BlockType::CallData(v.value)), + Value::ReturnData(_) => Ok(opcodes::BlockType::ReturnData), + }) + } +} + +impl ProtoCodec, BrilligInputs> for ProtoSchema +where + F: AcirField, +{ + fn encode(value: &circuit::brillig::BrilligInputs) -> BrilligInputs { + use crate::proto::acir::circuit::brillig_inputs::*; + let value = match value { + circuit::brillig::BrilligInputs::Single(expression) => { + Value::Single(Self::encode(expression)) + } + circuit::brillig::BrilligInputs::Array(expressions) => { + Value::Array(Array { values: Self::encode_vec(expressions) }) + } + circuit::brillig::BrilligInputs::MemoryArray(block_id) => { + Value::MemoryArray(block_id.0) + } + }; + BrilligInputs { value: Some(value) } + } + + fn decode(value: &BrilligInputs) -> eyre::Result> { + use crate::proto::acir::circuit::brillig_inputs::*; + decode_oneof_map(&value.value, |value| match value { + Value::Single(expression) => Ok(circuit::brillig::BrilligInputs::Single( + Self::decode_wrap(expression, "single")?, + )), + Value::Array(array) => Ok(circuit::brillig::BrilligInputs::Array( + Self::decode_vec_wrap(&array.values, "array")?, + )), + Value::MemoryArray(id) => { + Ok(circuit::brillig::BrilligInputs::MemoryArray(BlockId(*id))) + } + }) + } +} + +impl ProtoCodec for ProtoSchema { + fn encode(value: &circuit::brillig::BrilligOutputs) -> BrilligOutputs { + use crate::proto::acir::circuit::brillig_outputs::*; + let value = match value { + circuit::brillig::BrilligOutputs::Simple(witness) => { + Value::Simple(Self::encode(witness)) + } + circuit::brillig::BrilligOutputs::Array(witnesses) => { + Value::Array(Array { values: Self::encode_vec(witnesses) }) + } + }; + BrilligOutputs { value: Some(value) } + } + + fn decode(value: &BrilligOutputs) -> eyre::Result { + use crate::proto::acir::circuit::brillig_outputs::*; + + decode_oneof_map(&value.value, |value| match value { + Value::Simple(witness) => { + Ok(circuit::brillig::BrilligOutputs::Simple(Self::decode_wrap(witness, "simple")?)) + } + Value::Array(array) => Ok(circuit::brillig::BrilligOutputs::Array( + Self::decode_vec_wrap(&array.values, "array")?, + )), + }) + } +} diff --git a/noir/noir-repo/acvm-repo/acir/src/proto/convert/brillig.rs b/noir/noir-repo/acvm-repo/acir/src/proto/convert/brillig.rs new file mode 100644 index 00000000000..d07ba462782 --- /dev/null +++ b/noir/noir-repo/acvm-repo/acir/src/proto/convert/brillig.rs @@ -0,0 +1,721 @@ +use crate::{ + circuit, + proto::brillig::{BitSize, BlackBoxOp, HeapArray, HeapValueType, HeapVector, ValueOrArray}, +}; +use acir_field::AcirField; +use color_eyre::eyre::{self, bail}; +use noir_protobuf::{ProtoCodec, decode_oneof_map}; + +use crate::proto::brillig::{ + BinaryFieldOpKind, BinaryIntOpKind, BrilligBytecode, BrilligOpcode, IntegerBitSize, + MemoryAddress, brillig_opcode, +}; + +use super::ProtoSchema; + +impl ProtoCodec, BrilligBytecode> for ProtoSchema +where + F: AcirField, +{ + fn encode(value: &circuit::brillig::BrilligBytecode) -> BrilligBytecode { + BrilligBytecode { bytecode: Self::encode_vec(&value.bytecode) } + } + + fn decode(value: &BrilligBytecode) -> eyre::Result> { + Ok(circuit::brillig::BrilligBytecode { + bytecode: Self::decode_vec_wrap(&value.bytecode, "bytecode")?, + }) + } +} + +impl ProtoCodec, BrilligOpcode> for ProtoSchema +where + F: AcirField, +{ + fn encode(value: &brillig::Opcode) -> BrilligOpcode { + use brillig_opcode::*; + + let value = match value { + brillig::Opcode::BinaryFieldOp { destination, op, lhs, rhs } => { + Value::BinaryFieldOp(BinaryFieldOp { + destination: Self::encode_some(destination), + op: Self::encode_enum(op), + lhs: Self::encode_some(lhs), + rhs: Self::encode_some(rhs), + }) + } + brillig::Opcode::BinaryIntOp { destination, op, bit_size, lhs, rhs } => { + Value::BinaryIntOp(BinaryIntOp { + destination: Self::encode_some(destination), + op: Self::encode_enum(op), + bit_size: Self::encode_enum(bit_size), + lhs: Self::encode_some(lhs), + rhs: Self::encode_some(rhs), + }) + } + brillig::Opcode::Not { destination, source, bit_size } => Value::Not(Not { + destination: Self::encode_some(destination), + source: Self::encode_some(source), + bit_size: Self::encode_enum(bit_size), + }), + brillig::Opcode::Cast { destination, source, bit_size } => Value::Cast(Cast { + destination: Self::encode_some(destination), + source: Self::encode_some(source), + bit_size: Self::encode_some(bit_size), + }), + brillig::Opcode::JumpIfNot { condition, location } => Value::JumpIfNot(JumpIfNot { + condition: Self::encode_some(condition), + location: Self::encode(location), + }), + brillig::Opcode::JumpIf { condition, location } => Value::JumpIf(JumpIf { + condition: Self::encode_some(condition), + location: Self::encode(location), + }), + brillig::Opcode::Jump { location } => { + Value::Jump(Jump { location: Self::encode(location) }) + } + brillig::Opcode::CalldataCopy { destination_address, size_address, offset_address } => { + Value::CalldataCopy(CalldataCopy { + destination_address: Self::encode_some(destination_address), + size_address: Self::encode_some(size_address), + offset_address: Self::encode_some(offset_address), + }) + } + brillig::Opcode::Call { location } => { + Value::Call(Call { location: Self::encode(location) }) + } + brillig::Opcode::Const { destination, bit_size, value } => Value::Const(Const { + destination: Self::encode_some(destination), + bit_size: Self::encode_some(bit_size), + value: Self::encode_some(value), + }), + brillig::Opcode::IndirectConst { destination_pointer, bit_size, value } => { + Value::IndirectConst(IndirectConst { + destination_pointer: Self::encode_some(destination_pointer), + bit_size: Self::encode_some(bit_size), + value: Self::encode_some(value), + }) + } + brillig::Opcode::Return => Value::Return(Return {}), + brillig::Opcode::ForeignCall { + function, + destinations, + destination_value_types, + inputs, + input_value_types, + } => Value::ForeignCall(ForeignCall { + function: function.to_string(), + destinations: Self::encode_vec(destinations), + destination_value_types: Self::encode_vec(destination_value_types), + inputs: Self::encode_vec(inputs), + input_value_types: Self::encode_vec(input_value_types), + }), + brillig::Opcode::Mov { destination, source } => Value::Mov(Mov { + destination: Self::encode_some(destination), + source: Self::encode_some(source), + }), + brillig::Opcode::ConditionalMov { destination, source_a, source_b, condition } => { + Value::ConditionalMov(ConditionalMov { + destination: Self::encode_some(destination), + source_a: Self::encode_some(source_a), + source_b: Self::encode_some(source_b), + condition: Self::encode_some(condition), + }) + } + brillig::Opcode::Load { destination, source_pointer } => Value::Load(Load { + destination: Self::encode_some(destination), + source_pointer: Self::encode_some(source_pointer), + }), + brillig::Opcode::Store { destination_pointer, source } => Value::Store(Store { + destination_pointer: Self::encode_some(destination_pointer), + source: Self::encode_some(source), + }), + brillig::Opcode::BlackBox(black_box_op) => { + Value::BlackBox(BlackBox { op: Self::encode_some(black_box_op) }) + } + brillig::Opcode::Trap { revert_data } => { + Value::Trap(Trap { revert_data: Self::encode_some(revert_data) }) + } + brillig::Opcode::Stop { return_data } => { + Value::Stop(Stop { return_data: Self::encode_some(return_data) }) + } + }; + BrilligOpcode { value: Some(value) } + } + + fn decode(value: &BrilligOpcode) -> eyre::Result> { + use brillig_opcode::*; + + decode_oneof_map(&value.value, |value| match value { + Value::BinaryFieldOp(v) => Ok(brillig::Opcode::BinaryFieldOp { + destination: Self::decode_some_wrap(&v.destination, "destination")?, + op: Self::decode_enum_wrap(v.op, "op")?, + lhs: Self::decode_some_wrap(&v.lhs, "lhs")?, + rhs: Self::decode_some_wrap(&v.rhs, "rhs")?, + }), + Value::BinaryIntOp(v) => Ok(brillig::Opcode::BinaryIntOp { + destination: Self::decode_some_wrap(&v.destination, "destination")?, + op: Self::decode_enum_wrap(v.op, "op")?, + bit_size: Self::decode_enum_wrap(v.bit_size, "bit_size")?, + lhs: Self::decode_some_wrap(&v.lhs, "lhs")?, + rhs: Self::decode_some_wrap(&v.rhs, "rhs")?, + }), + Value::Not(v) => Ok(brillig::Opcode::Not { + destination: Self::decode_some_wrap(&v.destination, "destination")?, + source: Self::decode_some_wrap(&v.source, "source")?, + bit_size: Self::decode_enum_wrap(v.bit_size, "bit_size")?, + }), + Value::Cast(v) => Ok(brillig::Opcode::Cast { + destination: Self::decode_some_wrap(&v.destination, "destination")?, + source: Self::decode_some_wrap(&v.source, "source")?, + bit_size: Self::decode_some_wrap(&v.bit_size, "bit_size")?, + }), + Value::JumpIfNot(v) => Ok(brillig::Opcode::JumpIfNot { + condition: Self::decode_some_wrap(&v.condition, "condition")?, + location: Self::decode_wrap(&v.location, "location")?, + }), + Value::JumpIf(v) => Ok(brillig::Opcode::JumpIf { + condition: Self::decode_some_wrap(&v.condition, "condition")?, + location: Self::decode_wrap(&v.location, "location")?, + }), + Value::Jump(v) => { + Ok(brillig::Opcode::Jump { location: Self::decode_wrap(&v.location, "location")? }) + } + Value::CalldataCopy(v) => Ok(brillig::Opcode::CalldataCopy { + destination_address: Self::decode_some_wrap( + &v.destination_address, + "destination_address", + )?, + size_address: Self::decode_some_wrap(&v.size_address, "size_address")?, + offset_address: Self::decode_some_wrap(&v.offset_address, "offset_address")?, + }), + Value::Call(v) => { + Ok(brillig::Opcode::Call { location: Self::decode_wrap(&v.location, "location")? }) + } + Value::Const(v) => Ok(brillig::Opcode::Const { + destination: Self::decode_some_wrap(&v.destination, "destination")?, + bit_size: Self::decode_some_wrap(&v.bit_size, "bit_size")?, + value: Self::decode_some_wrap(&v.value, "value")?, + }), + Value::IndirectConst(v) => Ok(brillig::Opcode::IndirectConst { + destination_pointer: Self::decode_some_wrap( + &v.destination_pointer, + "destination_pointer", + )?, + bit_size: Self::decode_some_wrap(&v.bit_size, "bit_size")?, + value: Self::decode_some_wrap(&v.value, "value")?, + }), + Value::Return(_) => Ok(brillig::Opcode::Return), + Value::ForeignCall(v) => Ok(brillig::Opcode::ForeignCall { + function: v.function.clone(), + destinations: Self::decode_vec_wrap(&v.destinations, "destinations")?, + destination_value_types: Self::decode_vec_wrap( + &v.destination_value_types, + "destination_value_types", + )?, + inputs: Self::decode_vec_wrap(&v.inputs, "inputs")?, + input_value_types: Self::decode_vec_wrap( + &v.input_value_types, + "input_value_types", + )?, + }), + Value::Mov(v) => Ok(brillig::Opcode::Mov { + destination: Self::decode_some_wrap(&v.destination, "destination")?, + source: Self::decode_some_wrap(&v.source, "source")?, + }), + Value::ConditionalMov(v) => Ok(brillig::Opcode::ConditionalMov { + destination: Self::decode_some_wrap(&v.destination, "destination")?, + source_a: Self::decode_some_wrap(&v.source_a, "source_a")?, + source_b: Self::decode_some_wrap(&v.source_b, "source_b")?, + condition: Self::decode_some_wrap(&v.condition, "condition")?, + }), + Value::Load(v) => Ok(brillig::Opcode::Load { + destination: Self::decode_some_wrap(&v.destination, "destination")?, + source_pointer: Self::decode_some_wrap(&v.source_pointer, "source_pointer")?, + }), + Value::Store(v) => Ok(brillig::Opcode::Store { + destination_pointer: Self::decode_some_wrap( + &v.destination_pointer, + "destination_pointer", + )?, + source: Self::decode_some_wrap(&v.source, "source")?, + }), + Value::BlackBox(v) => { + Ok(brillig::Opcode::BlackBox(Self::decode_some_wrap(&v.op, "black_box")?)) + } + Value::Trap(v) => Ok(brillig::Opcode::Trap { + revert_data: Self::decode_some_wrap(&v.revert_data, "revert_data")?, + }), + Value::Stop(v) => Ok(brillig::Opcode::Stop { + return_data: Self::decode_some_wrap(&v.return_data, "return_data")?, + }), + }) + } +} + +impl ProtoCodec for ProtoSchema { + fn encode(value: &brillig::MemoryAddress) -> MemoryAddress { + use crate::proto::brillig::memory_address::*; + let value = match value { + brillig::MemoryAddress::Direct(addr) => Value::Direct(Self::encode(addr)), + brillig::MemoryAddress::Relative(addr) => Value::Relative(Self::encode(addr)), + }; + MemoryAddress { value: Some(value) } + } + + fn decode(value: &MemoryAddress) -> eyre::Result { + use crate::proto::brillig::memory_address::*; + decode_oneof_map(&value.value, |value| match value { + Value::Direct(v) => Self::decode_wrap(v, "direct").map(brillig::MemoryAddress::Direct), + Value::Relative(v) => { + Self::decode_wrap(v, "relative").map(brillig::MemoryAddress::Relative) + } + }) + } +} + +impl ProtoCodec for ProtoSchema { + fn encode(value: &brillig::BinaryFieldOp) -> BinaryFieldOpKind { + match value { + brillig::BinaryFieldOp::Add => BinaryFieldOpKind::BfoAdd, + brillig::BinaryFieldOp::Sub => BinaryFieldOpKind::BfoSub, + brillig::BinaryFieldOp::Mul => BinaryFieldOpKind::BfoMul, + brillig::BinaryFieldOp::Div => BinaryFieldOpKind::BfoDiv, + brillig::BinaryFieldOp::IntegerDiv => BinaryFieldOpKind::BfoIntegerDiv, + brillig::BinaryFieldOp::Equals => BinaryFieldOpKind::BfoEquals, + brillig::BinaryFieldOp::LessThan => BinaryFieldOpKind::BfoLessThan, + brillig::BinaryFieldOp::LessThanEquals => BinaryFieldOpKind::BfoLessThanEquals, + } + } + + fn decode(value: &BinaryFieldOpKind) -> eyre::Result { + match value { + BinaryFieldOpKind::BfoUnspecified => bail!("unspecified BinaryFieldOp"), + BinaryFieldOpKind::BfoAdd => Ok(brillig::BinaryFieldOp::Add), + BinaryFieldOpKind::BfoSub => Ok(brillig::BinaryFieldOp::Sub), + BinaryFieldOpKind::BfoMul => Ok(brillig::BinaryFieldOp::Mul), + BinaryFieldOpKind::BfoDiv => Ok(brillig::BinaryFieldOp::Div), + BinaryFieldOpKind::BfoIntegerDiv => Ok(brillig::BinaryFieldOp::IntegerDiv), + BinaryFieldOpKind::BfoEquals => Ok(brillig::BinaryFieldOp::Equals), + BinaryFieldOpKind::BfoLessThan => Ok(brillig::BinaryFieldOp::LessThan), + BinaryFieldOpKind::BfoLessThanEquals => Ok(brillig::BinaryFieldOp::LessThanEquals), + } + } +} + +impl ProtoCodec for ProtoSchema { + fn encode(value: &brillig::BinaryIntOp) -> BinaryIntOpKind { + match value { + brillig::BinaryIntOp::Add => BinaryIntOpKind::BioAdd, + brillig::BinaryIntOp::Sub => BinaryIntOpKind::BioSub, + brillig::BinaryIntOp::Mul => BinaryIntOpKind::BioMul, + brillig::BinaryIntOp::Div => BinaryIntOpKind::BioDiv, + brillig::BinaryIntOp::Equals => BinaryIntOpKind::BioEquals, + brillig::BinaryIntOp::LessThan => BinaryIntOpKind::BioLessThan, + brillig::BinaryIntOp::LessThanEquals => BinaryIntOpKind::BioLessThanEquals, + brillig::BinaryIntOp::And => BinaryIntOpKind::BioAnd, + brillig::BinaryIntOp::Or => BinaryIntOpKind::BioOr, + brillig::BinaryIntOp::Xor => BinaryIntOpKind::BioXor, + brillig::BinaryIntOp::Shl => BinaryIntOpKind::BioShl, + brillig::BinaryIntOp::Shr => BinaryIntOpKind::BioShr, + } + } + + fn decode(value: &BinaryIntOpKind) -> eyre::Result { + match value { + BinaryIntOpKind::BioUnspecified => bail!("unspecified BinaryIntOp"), + BinaryIntOpKind::BioAdd => Ok(brillig::BinaryIntOp::Add), + BinaryIntOpKind::BioSub => Ok(brillig::BinaryIntOp::Sub), + BinaryIntOpKind::BioMul => Ok(brillig::BinaryIntOp::Mul), + BinaryIntOpKind::BioDiv => Ok(brillig::BinaryIntOp::Div), + BinaryIntOpKind::BioEquals => Ok(brillig::BinaryIntOp::Equals), + BinaryIntOpKind::BioLessThan => Ok(brillig::BinaryIntOp::LessThan), + BinaryIntOpKind::BioLessThanEquals => Ok(brillig::BinaryIntOp::LessThanEquals), + BinaryIntOpKind::BioAnd => Ok(brillig::BinaryIntOp::And), + BinaryIntOpKind::BioOr => Ok(brillig::BinaryIntOp::Or), + BinaryIntOpKind::BioXor => Ok(brillig::BinaryIntOp::Xor), + BinaryIntOpKind::BioShl => Ok(brillig::BinaryIntOp::Shl), + BinaryIntOpKind::BioShr => Ok(brillig::BinaryIntOp::Shr), + } + } +} + +impl ProtoCodec for ProtoSchema { + fn encode(value: &brillig::IntegerBitSize) -> IntegerBitSize { + match value { + brillig::IntegerBitSize::U1 => IntegerBitSize::IbsU1, + brillig::IntegerBitSize::U8 => IntegerBitSize::IbsU8, + brillig::IntegerBitSize::U16 => IntegerBitSize::IbsU16, + brillig::IntegerBitSize::U32 => IntegerBitSize::IbsU32, + brillig::IntegerBitSize::U64 => IntegerBitSize::IbsU64, + brillig::IntegerBitSize::U128 => IntegerBitSize::IbsU128, + } + } + + fn decode(value: &IntegerBitSize) -> eyre::Result { + match value { + IntegerBitSize::IbsUnspecified => bail!("unspecified IntegerBitSize"), + IntegerBitSize::IbsU1 => Ok(brillig::IntegerBitSize::U1), + IntegerBitSize::IbsU8 => Ok(brillig::IntegerBitSize::U8), + IntegerBitSize::IbsU16 => Ok(brillig::IntegerBitSize::U16), + IntegerBitSize::IbsU32 => Ok(brillig::IntegerBitSize::U32), + IntegerBitSize::IbsU64 => Ok(brillig::IntegerBitSize::U64), + IntegerBitSize::IbsU128 => Ok(brillig::IntegerBitSize::U128), + } + } +} + +impl ProtoCodec for ProtoSchema { + fn encode(value: &brillig::BitSize) -> BitSize { + use crate::proto::brillig::bit_size::*; + let value = match value { + brillig::BitSize::Field => Value::Field(Field {}), + brillig::BitSize::Integer(integer_bit_size) => { + Value::Integer(Self::encode_enum(integer_bit_size)) + } + }; + BitSize { value: Some(value) } + } + + fn decode(value: &BitSize) -> eyre::Result { + use crate::proto::brillig::bit_size::*; + decode_oneof_map(&value.value, |value| match value { + Value::Field(_) => Ok(brillig::BitSize::Field), + Value::Integer(size) => { + Ok(brillig::BitSize::Integer(Self::decode_enum_wrap(*size, "size")?)) + } + }) + } +} + +impl ProtoCodec for ProtoSchema { + fn encode(value: &brillig::ValueOrArray) -> ValueOrArray { + use crate::proto::brillig::value_or_array::*; + let value = match value { + brillig::ValueOrArray::MemoryAddress(memory_address) => { + Value::MemoryAddress(Self::encode(memory_address)) + } + brillig::ValueOrArray::HeapArray(heap_array) => { + Value::HeapArray(Self::encode(heap_array)) + } + brillig::ValueOrArray::HeapVector(heap_vector) => { + Value::HeapVector(Self::encode(heap_vector)) + } + }; + ValueOrArray { value: Some(value) } + } + + fn decode(value: &ValueOrArray) -> eyre::Result { + use crate::proto::brillig::value_or_array::*; + decode_oneof_map(&value.value, |value| match value { + Value::MemoryAddress(v) => { + Ok(brillig::ValueOrArray::MemoryAddress(Self::decode_wrap(v, "memory_address")?)) + } + Value::HeapArray(v) => { + Ok(brillig::ValueOrArray::HeapArray(Self::decode_wrap(v, "heap_array")?)) + } + Value::HeapVector(v) => { + Ok(brillig::ValueOrArray::HeapVector(Self::decode_wrap(v, "heap_vector")?)) + } + }) + } +} + +impl ProtoCodec for ProtoSchema { + fn encode(value: &brillig::HeapValueType) -> HeapValueType { + use crate::proto::brillig::heap_value_type::*; + let value = match value { + brillig::HeapValueType::Simple(bit_size) => Value::Simple(Self::encode(bit_size)), + brillig::HeapValueType::Array { value_types, size } => Value::Array(Array { + value_types: Self::encode_vec(value_types), + size: *size as u64, + }), + brillig::HeapValueType::Vector { value_types } => { + Value::Vector(Vector { value_types: Self::encode_vec(value_types) }) + } + }; + HeapValueType { value: Some(value) } + } + + fn decode(value: &HeapValueType) -> eyre::Result { + use crate::proto::brillig::heap_value_type::*; + decode_oneof_map(&value.value, |value| match value { + Value::Simple(bit_size) => { + Ok(brillig::HeapValueType::Simple(Self::decode_wrap(bit_size, "simple")?)) + } + Value::Array(v) => Ok(brillig::HeapValueType::Array { + value_types: Self::decode_vec_wrap(&v.value_types, "value_types")?, + size: Self::decode_wrap(&v.size, "size")?, + }), + Value::Vector(v) => Ok(brillig::HeapValueType::Vector { + value_types: Self::decode_vec_wrap(&v.value_types, "value_types")?, + }), + }) + } +} + +impl ProtoCodec for ProtoSchema { + fn encode(value: &brillig::HeapArray) -> HeapArray { + HeapArray { pointer: Self::encode_some(&value.pointer), size: Self::encode(&value.size) } + } + + fn decode(value: &HeapArray) -> eyre::Result { + Ok(brillig::HeapArray { + pointer: Self::decode_some_wrap(&value.pointer, "pointer")?, + size: Self::decode_wrap(&value.size, "size")?, + }) + } +} + +impl ProtoCodec for ProtoSchema { + fn encode(value: &brillig::HeapVector) -> HeapVector { + HeapVector { + pointer: Self::encode_some(&value.pointer), + size: Self::encode_some(&value.size), + } + } + + fn decode(value: &HeapVector) -> eyre::Result { + Ok(brillig::HeapVector { + pointer: Self::decode_some_wrap(&value.pointer, "pointer")?, + size: Self::decode_some_wrap(&value.size, "size")?, + }) + } +} + +impl ProtoCodec for ProtoSchema { + fn encode(value: &brillig::BlackBoxOp) -> BlackBoxOp { + use crate::proto::brillig::black_box_op::*; + let value = match value { + brillig::BlackBoxOp::AES128Encrypt { inputs, iv, key, outputs } => { + Value::Aes128Encrypt(Aes128Encrypt { + inputs: Self::encode_some(inputs), + iv: Self::encode_some(iv), + key: Self::encode_some(key), + outputs: Self::encode_some(outputs), + }) + } + brillig::BlackBoxOp::Blake2s { message, output } => Value::Blake2s(Blake2s { + message: Self::encode_some(message), + output: Self::encode_some(output), + }), + brillig::BlackBoxOp::Blake3 { message, output } => Value::Blake3(Blake3 { + message: Self::encode_some(message), + output: Self::encode_some(output), + }), + brillig::BlackBoxOp::Keccakf1600 { input, output } => Value::KeccakF1600(Keccakf1600 { + input: Self::encode_some(input), + output: Self::encode_some(output), + }), + brillig::BlackBoxOp::EcdsaSecp256k1 { + hashed_msg, + public_key_x, + public_key_y, + signature, + result, + } => Value::EcdsaSecp256k1(EcdsaSecp256k1 { + hashed_msg: Self::encode_some(hashed_msg), + public_key_x: Self::encode_some(public_key_x), + public_key_y: Self::encode_some(public_key_y), + signature: Self::encode_some(signature), + result: Self::encode_some(result), + }), + brillig::BlackBoxOp::EcdsaSecp256r1 { + hashed_msg, + public_key_x, + public_key_y, + signature, + result, + } => Value::EcdsaSecp256r1(EcdsaSecp256r1 { + hashed_msg: Self::encode_some(hashed_msg), + public_key_x: Self::encode_some(public_key_x), + public_key_y: Self::encode_some(public_key_y), + signature: Self::encode_some(signature), + result: Self::encode_some(result), + }), + brillig::BlackBoxOp::MultiScalarMul { points, scalars, outputs } => { + Value::MultiScalarMul(MultiScalarMul { + points: Self::encode_some(points), + scalars: Self::encode_some(scalars), + outputs: Self::encode_some(outputs), + }) + } + brillig::BlackBoxOp::EmbeddedCurveAdd { + input1_x, + input1_y, + input1_infinite, + input2_x, + input2_y, + input2_infinite, + result, + } => Value::EmbeddedCurveAdd(EmbeddedCurveAdd { + input1_x: Self::encode_some(input1_x), + input1_y: Self::encode_some(input1_y), + input1_infinite: Self::encode_some(input1_infinite), + input2_x: Self::encode_some(input2_x), + input2_y: Self::encode_some(input2_y), + input2_infinite: Self::encode_some(input2_infinite), + result: Self::encode_some(result), + }), + brillig::BlackBoxOp::BigIntAdd { lhs, rhs, output } => Value::BigIntAdd(BigIntAdd { + lhs: Self::encode_some(lhs), + rhs: Self::encode_some(rhs), + output: Self::encode_some(output), + }), + brillig::BlackBoxOp::BigIntSub { lhs, rhs, output } => Value::BigIntSub(BigIntSub { + lhs: Self::encode_some(lhs), + rhs: Self::encode_some(rhs), + output: Self::encode_some(output), + }), + brillig::BlackBoxOp::BigIntMul { lhs, rhs, output } => Value::BigIntMul(BigIntMul { + lhs: Self::encode_some(lhs), + rhs: Self::encode_some(rhs), + output: Self::encode_some(output), + }), + brillig::BlackBoxOp::BigIntDiv { lhs, rhs, output } => Value::BigIntDiv(BigIntDiv { + lhs: Self::encode_some(lhs), + rhs: Self::encode_some(rhs), + output: Self::encode_some(output), + }), + brillig::BlackBoxOp::BigIntFromLeBytes { inputs, modulus, output } => { + Value::BigIntFromLeBytes(BigIntFromLeBytes { + inputs: Self::encode_some(inputs), + modulus: Self::encode_some(modulus), + output: Self::encode_some(output), + }) + } + brillig::BlackBoxOp::BigIntToLeBytes { input, output } => { + Value::BigIntToLeBytes(BigIntToLeBytes { + input: Self::encode_some(input), + output: Self::encode_some(output), + }) + } + brillig::BlackBoxOp::Poseidon2Permutation { message, output, len } => { + Value::Poseidon2Permutation(Poseidon2Permutation { + message: Self::encode_some(message), + output: Self::encode_some(output), + len: Self::encode_some(len), + }) + } + brillig::BlackBoxOp::Sha256Compression { input, hash_values, output } => { + Value::Sha256Compression(Sha256Compression { + input: Self::encode_some(input), + hash_values: Self::encode_some(hash_values), + output: Self::encode_some(output), + }) + } + brillig::BlackBoxOp::ToRadix { + input, + radix, + output_pointer, + num_limbs, + output_bits, + } => Value::ToRadix(ToRadix { + input: Self::encode_some(input), + radix: Self::encode_some(radix), + output_pointer: Self::encode_some(output_pointer), + num_limbs: Self::encode_some(num_limbs), + output_bits: Self::encode_some(output_bits), + }), + }; + BlackBoxOp { value: Some(value) } + } + + fn decode(value: &BlackBoxOp) -> eyre::Result { + use crate::proto::brillig::black_box_op::*; + decode_oneof_map(&value.value, |value| match value { + Value::Aes128Encrypt(v) => Ok(brillig::BlackBoxOp::AES128Encrypt { + inputs: Self::decode_some_wrap(&v.inputs, "inputs")?, + iv: Self::decode_some_wrap(&v.iv, "iv")?, + key: Self::decode_some_wrap(&v.key, "key")?, + outputs: Self::decode_some_wrap(&v.outputs, "outputs")?, + }), + Value::Blake2s(v) => Ok(brillig::BlackBoxOp::Blake2s { + message: Self::decode_some_wrap(&v.message, "message")?, + output: Self::decode_some_wrap(&v.output, "output")?, + }), + Value::Blake3(v) => Ok(brillig::BlackBoxOp::Blake3 { + message: Self::decode_some_wrap(&v.message, "message")?, + output: Self::decode_some_wrap(&v.output, "output")?, + }), + Value::KeccakF1600(v) => Ok(brillig::BlackBoxOp::Keccakf1600 { + input: Self::decode_some_wrap(&v.input, "input")?, + output: Self::decode_some_wrap(&v.output, "output")?, + }), + Value::EcdsaSecp256k1(v) => Ok(brillig::BlackBoxOp::EcdsaSecp256k1 { + hashed_msg: Self::decode_some_wrap(&v.hashed_msg, "hashed_msg")?, + public_key_x: Self::decode_some_wrap(&v.public_key_x, "public_key_x")?, + public_key_y: Self::decode_some_wrap(&v.public_key_y, "public_key_y")?, + signature: Self::decode_some_wrap(&v.signature, "signature")?, + result: Self::decode_some_wrap(&v.result, "result")?, + }), + Value::EcdsaSecp256r1(v) => Ok(brillig::BlackBoxOp::EcdsaSecp256r1 { + hashed_msg: Self::decode_some_wrap(&v.hashed_msg, "hashed_msg")?, + public_key_x: Self::decode_some_wrap(&v.public_key_x, "public_key_x")?, + public_key_y: Self::decode_some_wrap(&v.public_key_y, "public_key_y")?, + signature: Self::decode_some_wrap(&v.signature, "signature")?, + result: Self::decode_some_wrap(&v.result, "result")?, + }), + Value::MultiScalarMul(v) => Ok(brillig::BlackBoxOp::MultiScalarMul { + points: Self::decode_some_wrap(&v.points, "points")?, + scalars: Self::decode_some_wrap(&v.scalars, "scalars")?, + outputs: Self::decode_some_wrap(&v.outputs, "outputs")?, + }), + Value::EmbeddedCurveAdd(v) => Ok(brillig::BlackBoxOp::EmbeddedCurveAdd { + input1_x: Self::decode_some_wrap(&v.input1_x, "input1_x")?, + input1_y: Self::decode_some_wrap(&v.input1_y, "input1_y")?, + input1_infinite: Self::decode_some_wrap(&v.input1_infinite, "input1_infinite")?, + input2_x: Self::decode_some_wrap(&v.input2_x, "input2_x")?, + input2_y: Self::decode_some_wrap(&v.input2_y, "input2_y")?, + input2_infinite: Self::decode_some_wrap(&v.input2_infinite, "input2_infinite")?, + result: Self::decode_some_wrap(&v.result, "result")?, + }), + Value::BigIntAdd(v) => Ok(brillig::BlackBoxOp::BigIntAdd { + lhs: Self::decode_some_wrap(&v.lhs, "lhs")?, + rhs: Self::decode_some_wrap(&v.rhs, "rhs")?, + output: Self::decode_some_wrap(&v.output, "output")?, + }), + Value::BigIntSub(v) => Ok(brillig::BlackBoxOp::BigIntSub { + lhs: Self::decode_some_wrap(&v.lhs, "lhs")?, + rhs: Self::decode_some_wrap(&v.rhs, "rhs")?, + output: Self::decode_some_wrap(&v.output, "output")?, + }), + Value::BigIntMul(v) => Ok(brillig::BlackBoxOp::BigIntMul { + lhs: Self::decode_some_wrap(&v.lhs, "lhs")?, + rhs: Self::decode_some_wrap(&v.rhs, "rhs")?, + output: Self::decode_some_wrap(&v.output, "output")?, + }), + Value::BigIntDiv(v) => Ok(brillig::BlackBoxOp::BigIntDiv { + lhs: Self::decode_some_wrap(&v.lhs, "lhs")?, + rhs: Self::decode_some_wrap(&v.rhs, "rhs")?, + output: Self::decode_some_wrap(&v.output, "output")?, + }), + Value::BigIntFromLeBytes(v) => Ok(brillig::BlackBoxOp::BigIntFromLeBytes { + inputs: Self::decode_some_wrap(&v.inputs, "inputs")?, + modulus: Self::decode_some_wrap(&v.modulus, "modulus")?, + output: Self::decode_some_wrap(&v.output, "output")?, + }), + Value::BigIntToLeBytes(v) => Ok(brillig::BlackBoxOp::BigIntToLeBytes { + input: Self::decode_some_wrap(&v.input, "input")?, + output: Self::decode_some_wrap(&v.output, "output")?, + }), + Value::Poseidon2Permutation(v) => Ok(brillig::BlackBoxOp::Poseidon2Permutation { + message: Self::decode_some_wrap(&v.message, "message")?, + output: Self::decode_some_wrap(&v.output, "output")?, + len: Self::decode_some_wrap(&v.len, "len")?, + }), + Value::Sha256Compression(v) => Ok(brillig::BlackBoxOp::Sha256Compression { + input: Self::decode_some_wrap(&v.input, "input")?, + hash_values: Self::decode_some_wrap(&v.hash_values, "hash_values")?, + output: Self::decode_some_wrap(&v.output, "output")?, + }), + Value::ToRadix(v) => Ok(brillig::BlackBoxOp::ToRadix { + input: Self::decode_some_wrap(&v.input, "input")?, + radix: Self::decode_some_wrap(&v.radix, "radix")?, + output_pointer: Self::decode_some_wrap(&v.output_pointer, "output_pointer")?, + num_limbs: Self::decode_some_wrap(&v.num_limbs, "num_limbs")?, + output_bits: Self::decode_some_wrap(&v.output_bits, "output_bits")?, + }), + }) + } +} diff --git a/noir/noir-repo/acvm-repo/acir/src/proto/convert/mod.rs b/noir/noir-repo/acvm-repo/acir/src/proto/convert/mod.rs new file mode 100644 index 00000000000..9d68ba665e1 --- /dev/null +++ b/noir/noir-repo/acvm-repo/acir/src/proto/convert/mod.rs @@ -0,0 +1,49 @@ +use std::marker::PhantomData; + +use acir_field::AcirField; +use color_eyre::eyre::{self, Context}; +use noir_protobuf::ProtoCodec; + +use crate::circuit; +use crate::proto::program::Program; + +mod acir; +mod brillig; +mod native; +mod witness; + +pub(crate) struct ProtoSchema { + field: PhantomData, +} + +impl ProtoCodec, Program> for ProtoSchema +where + F: AcirField, +{ + fn encode(value: &circuit::Program) -> Program { + Program { + functions: Self::encode_vec(&value.functions), + unconstrained_functions: Self::encode_vec(&value.unconstrained_functions), + } + } + + fn decode(value: &Program) -> eyre::Result> { + Ok(circuit::Program { + functions: Self::decode_vec_wrap(&value.functions, "functions")?, + unconstrained_functions: Self::decode_vec_wrap( + &value.unconstrained_functions, + "unconstrained_functions", + )?, + }) + } +} + +impl ProtoCodec for ProtoSchema { + fn encode(value: &usize) -> u64 { + *value as u64 + } + + fn decode(value: &u64) -> eyre::Result { + (*value).try_into().wrap_err("failed to convert u64 to usize") + } +} diff --git a/noir/noir-repo/acvm-repo/acir/src/proto/convert/native.rs b/noir/noir-repo/acvm-repo/acir/src/proto/convert/native.rs new file mode 100644 index 00000000000..5d34ecf14f9 --- /dev/null +++ b/noir/noir-repo/acvm-repo/acir/src/proto/convert/native.rs @@ -0,0 +1,80 @@ +use acir_field::AcirField; +use color_eyre::eyre; +use noir_protobuf::{ProtoCodec, decode_vec_map_wrap}; + +use crate::{ + native_types, + proto::acir::native::{Expression, Field, Witness}, +}; + +use super::ProtoSchema; + +impl ProtoCodec for ProtoSchema { + fn encode(value: &F) -> Field { + Field { value: value.to_le_bytes() } + } + + fn decode(value: &Field) -> eyre::Result { + Ok(F::from_le_bytes_reduce(&value.value)) + } +} + +impl ProtoCodec for ProtoSchema { + fn encode(value: &native_types::Witness) -> Witness { + Witness { index: value.0 } + } + + fn decode(value: &Witness) -> eyre::Result { + Ok(native_types::Witness(value.index)) + } +} + +impl ProtoCodec, Expression> for ProtoSchema +where + F: AcirField, +{ + fn encode(value: &native_types::Expression) -> Expression { + use crate::proto::acir::native::expression::*; + Expression { + mul_terms: value + .mul_terms + .iter() + .map(|(q_m, wl, wr)| MulTerm { + q_m: Self::encode_some(q_m), + witness_left: Self::encode_some(wl), + witness_right: Self::encode_some(wr), + }) + .collect(), + linear_combinations: value + .linear_combinations + .iter() + .map(|(q_l, w)| LinearCombination { + q_l: Self::encode_some(q_l), + witness: Self::encode_some(w), + }) + .collect(), + q_c: Self::encode_some(&value.q_c), + } + } + + fn decode(value: &Expression) -> eyre::Result> { + Ok(native_types::Expression { + mul_terms: decode_vec_map_wrap(&value.mul_terms, "mul_terms", |mt| { + let q_m = Self::decode_some_wrap(&mt.q_m, "q_m")?; + let wl = Self::decode_some_wrap(&mt.witness_left, "witness_left")?; + let wr = Self::decode_some_wrap(&mt.witness_right, "witness_right")?; + Ok((q_m, wl, wr)) + })?, + linear_combinations: decode_vec_map_wrap( + &value.linear_combinations, + "linear_combinations", + |lc| { + let q_l = Self::decode_some_wrap(&lc.q_l, "q_l")?; + let w = Self::decode_some_wrap(&lc.witness, "witness")?; + Ok((q_l, w)) + }, + )?, + q_c: Self::decode_some_wrap(&value.q_c, "q_c")?, + }) + } +} diff --git a/noir/noir-repo/acvm-repo/acir/src/proto/convert/witness.rs b/noir/noir-repo/acvm-repo/acir/src/proto/convert/witness.rs new file mode 100644 index 00000000000..e926b9ea00b --- /dev/null +++ b/noir/noir-repo/acvm-repo/acir/src/proto/convert/witness.rs @@ -0,0 +1,64 @@ +use acir_field::AcirField; +use noir_protobuf::ProtoCodec; + +use crate::native_types; +use crate::proto::acir::witness::{WitnessMap, WitnessStack}; + +use super::ProtoSchema; + +impl ProtoCodec, WitnessMap> for ProtoSchema +where + F: AcirField, +{ + fn encode(value: &native_types::WitnessMap) -> WitnessMap { + use crate::proto::acir::witness::witness_map::*; + + let values = value + .clone() + .into_iter() + .map(|(w, f)| WitnessValue { + witness: Self::encode_some(&w), + field: Self::encode_some(&f), + }) + .collect(); + + WitnessMap { values } + } + + fn decode(value: &WitnessMap) -> color_eyre::eyre::Result> { + let mut wm = native_types::WitnessMap::default(); + for wv in &value.values { + wm.insert( + Self::decode_some_wrap(&wv.witness, "witness")?, + Self::decode_some_wrap(&wv.field, "field")?, + ); + } + Ok(wm) + } +} + +impl ProtoCodec, WitnessStack> for ProtoSchema +where + F: AcirField, +{ + fn encode(value: &native_types::WitnessStack) -> WitnessStack { + use crate::proto::acir::witness::witness_stack::*; + + let mut value = value.clone(); + let mut stack = Vec::new(); + while let Some(item) = value.pop() { + stack.push(StackItem { index: item.index, witness: Self::encode_some(&item.witness) }); + } + stack.reverse(); + + WitnessStack { stack } + } + + fn decode(value: &WitnessStack) -> color_eyre::eyre::Result> { + let mut ws = native_types::WitnessStack::default(); + for item in &value.stack { + ws.push(item.index, Self::decode_some_wrap(&item.witness, "witness")?); + } + Ok(ws) + } +} diff --git a/noir/noir-repo/acvm-repo/acir/src/proto/mod.rs b/noir/noir-repo/acvm-repo/acir/src/proto/mod.rs new file mode 100644 index 00000000000..ba5bbc5108c --- /dev/null +++ b/noir/noir-repo/acvm-repo/acir/src/proto/mod.rs @@ -0,0 +1,29 @@ +pub(crate) mod convert; + +pub(crate) mod acir { + + #[allow(unreachable_pub)] + pub(crate) mod native { + include!(concat!(env!("OUT_DIR"), "/acvm.acir.native.rs")); + } + + #[allow(unreachable_pub)] + pub(crate) mod witness { + include!(concat!(env!("OUT_DIR"), "/acvm.acir.witness.rs")); + } + + #[allow(unreachable_pub)] + pub(crate) mod circuit { + include!(concat!(env!("OUT_DIR"), "/acvm.acir.circuit.rs")); + } +} + +#[allow(unreachable_pub, clippy::enum_variant_names)] +pub(crate) mod brillig { + include!(concat!(env!("OUT_DIR"), "/acvm.brillig.rs")); +} + +#[allow(unreachable_pub)] +pub(crate) mod program { + include!(concat!(env!("OUT_DIR"), "/acvm.program.rs")); +} diff --git a/noir/noir-repo/acvm-repo/acir/src/proto/program.proto b/noir/noir-repo/acvm-repo/acir/src/proto/program.proto new file mode 100644 index 00000000000..d54d0aff5a9 --- /dev/null +++ b/noir/noir-repo/acvm-repo/acir/src/proto/program.proto @@ -0,0 +1,15 @@ +syntax = "proto3"; + +package acvm.program; + +import public "acir/circuit.proto"; +import public "brillig.proto"; + +// A program represents an entire circuit with ACIR and Brillig functions and +// potentially multiple endpoints. +message Program { + // ACIR circuits + repeated acvm.acir.circuit.Circuit functions = 1; + // Brillig functions + repeated acvm.brillig.BrilligBytecode unconstrained_functions = 2; +} diff --git a/noir/noir-repo/acvm-repo/acir/tests/test_program_serialization.rs b/noir/noir-repo/acvm-repo/acir/tests/test_program_serialization.rs index 4ff571106a1..4f9a5fb76c5 100644 --- a/noir/noir-repo/acvm-repo/acir/tests/test_program_serialization.rs +++ b/noir/noir-repo/acvm-repo/acir/tests/test_program_serialization.rs @@ -56,7 +56,10 @@ fn addition_circuit() { 135, 223, 13, 27, 135, 121, 106, 119, 3, 58, 173, 124, 163, 140, 1, 0, 0, ]; - assert_eq!(bytes, expected_serialization) + assert_eq!(bytes, expected_serialization); + + let program_de = Program::deserialize_program(&bytes).unwrap(); + assert_eq!(program_de, program); } #[test] @@ -99,7 +102,10 @@ fn multi_scalar_mul_circuit() { 179, 90, 23, 212, 196, 199, 187, 192, 0, 0, 0, ]; - assert_eq!(bytes, expected_serialization) + assert_eq!(bytes, expected_serialization); + + let program_de = Program::deserialize_program(&bytes).unwrap(); + assert_eq!(program_de, program); } #[test] @@ -173,7 +179,10 @@ fn simple_brillig_foreign_call() { 191, 40, 237, 37, 127, 1, 190, 36, 121, 0, 128, 254, 118, 42, 127, 2, 0, 0, ]; - assert_eq!(bytes, expected_serialization) + assert_eq!(bytes, expected_serialization); + + let program_de = Program::deserialize_program(&bytes).unwrap(); + assert_eq!(program_de, program); } #[test] @@ -323,7 +332,10 @@ fn complex_brillig_foreign_call() { 250, 76, 4, 233, 188, 7, 0, 0, ]; - assert_eq!(bytes, expected_serialization) + assert_eq!(bytes, expected_serialization); + + let program_de = Program::deserialize_program(&bytes).unwrap(); + assert_eq!(program_de, program); } #[test] @@ -365,7 +377,10 @@ fn memory_op_circuit() { 0, 0, ]; - assert_eq!(bytes, expected_serialization) + assert_eq!(bytes, expected_serialization); + + let program_de = Program::deserialize_program(&bytes).unwrap(); + assert_eq!(program_de, program); } #[test] @@ -472,4 +487,7 @@ fn nested_acir_call_circuit() { 253, 11, 4, 0, 0, ]; assert_eq!(bytes, expected_serialization); + + let program_de = Program::deserialize_program(&bytes).unwrap(); + assert_eq!(program_de, program); } diff --git a/noir/noir-repo/acvm-repo/acir_field/src/generic_ark.rs b/noir/noir-repo/acvm-repo/acir_field/src/generic_ark.rs index 04761dd1ed0..3f2b27b864c 100644 --- a/noir/noir-repo/acvm-repo/acir_field/src/generic_ark.rs +++ b/noir/noir-repo/acvm-repo/acir_field/src/generic_ark.rs @@ -25,6 +25,7 @@ pub trait AcirField: + From + std::hash::Hash + Eq + + 'static { fn one() -> Self; fn zero() -> Self; @@ -85,3 +86,198 @@ pub trait AcirField: /// This method truncates fn fetch_nearest_bytes(&self, num_bits: usize) -> Vec; } + +/// Define a _newtype_ wrapper around an `AcirField` by implementing all the +/// boilerplate for forwarding the field operations. +/// +/// This allows the wrapper to implement traits such as `Arbitrary`, and then +/// be used by code that is generic in `F: AcirField`. +/// +/// # Example +/// ```ignore +/// field_wrapper!(TestField, FieldElement); +/// ``` +#[macro_export] +macro_rules! field_wrapper { + ($wrapper:ident, $field:ident) => { + #[derive( + Clone, + Debug, + PartialEq, + Eq, + Hash, + PartialOrd, + Copy, + Default, + serde::Serialize, + serde::Deserialize, + )] + struct $wrapper(pub $field); + + impl $crate::AcirField for $wrapper { + fn one() -> Self { + Self($field::one()) + } + + fn zero() -> Self { + Self($field::zero()) + } + + fn is_zero(&self) -> bool { + self.0.is_zero() + } + + fn is_one(&self) -> bool { + self.0.is_one() + } + + fn pow(&self, exponent: &Self) -> Self { + Self(self.0.pow(&exponent.0)) + } + + fn max_num_bits() -> u32 { + $field::max_num_bits() + } + + fn max_num_bytes() -> u32 { + $field::max_num_bytes() + } + + fn modulus() -> num_bigint::BigUint { + $field::modulus() + } + + fn num_bits(&self) -> u32 { + self.0.num_bits() + } + + fn to_u128(self) -> u128 { + self.0.to_u128() + } + + fn try_into_u128(self) -> Option { + self.0.try_into_u128() + } + + fn to_i128(self) -> i128 { + self.0.to_i128() + } + + fn try_to_u64(&self) -> Option { + self.0.try_to_u64() + } + + fn try_to_u32(&self) -> Option { + self.0.try_to_u32() + } + + fn inverse(&self) -> Self { + Self(self.0.inverse()) + } + + fn to_hex(self) -> String { + self.0.to_hex() + } + + fn from_hex(hex_str: &str) -> Option { + $field::from_hex(hex_str).map(Self) + } + + fn to_be_bytes(self) -> Vec { + self.0.to_be_bytes() + } + + fn from_be_bytes_reduce(bytes: &[u8]) -> Self { + Self($field::from_be_bytes_reduce(bytes)) + } + + fn from_le_bytes_reduce(bytes: &[u8]) -> Self { + Self($field::from_le_bytes_reduce(bytes)) + } + + fn to_le_bytes(self) -> Vec { + self.0.to_le_bytes() + } + + fn fetch_nearest_bytes(&self, num_bits: usize) -> Vec { + self.0.fetch_nearest_bytes(num_bits) + } + } + + impl From for $wrapper { + fn from(value: bool) -> Self { + Self($field::from(value)) + } + } + + impl From for $wrapper { + fn from(value: u128) -> Self { + Self($field::from(value)) + } + } + + impl From for $wrapper { + fn from(value: usize) -> Self { + Self($field::from(value)) + } + } + + impl std::ops::SubAssign<$wrapper> for $wrapper { + fn sub_assign(&mut self, rhs: $wrapper) { + self.0.sub_assign(rhs.0); + } + } + + impl std::ops::AddAssign<$wrapper> for $wrapper { + fn add_assign(&mut self, rhs: $wrapper) { + self.0.add_assign(rhs.0); + } + } + + impl std::ops::Add<$wrapper> for $wrapper { + type Output = Self; + + fn add(self, rhs: $wrapper) -> Self::Output { + Self(self.0.add(rhs.0)) + } + } + + impl std::ops::Sub<$wrapper> for $wrapper { + type Output = Self; + + fn sub(self, rhs: $wrapper) -> Self::Output { + Self(self.0.sub(rhs.0)) + } + } + + impl std::ops::Mul<$wrapper> for $wrapper { + type Output = Self; + + fn mul(self, rhs: $wrapper) -> Self::Output { + Self(self.0.mul(rhs.0)) + } + } + + impl std::ops::Div<$wrapper> for $wrapper { + type Output = Self; + + fn div(self, rhs: $wrapper) -> Self::Output { + Self(self.0.div(rhs.0)) + } + } + + impl std::ops::Neg for $wrapper { + type Output = Self; + + fn neg(self) -> Self::Output { + Self(self.0.neg()) + } + } + + impl std::fmt::Display for $wrapper { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + self.0.fmt(f) + } + } + }; +} diff --git a/noir/noir-repo/acvm-repo/acvm/src/compiler/optimizers/merge_expressions.rs b/noir/noir-repo/acvm-repo/acvm/src/compiler/optimizers/merge_expressions.rs index 2590c5f208a..0e0d22edcd8 100644 --- a/noir/noir-repo/acvm-repo/acvm/src/compiler/optimizers/merge_expressions.rs +++ b/noir/noir-repo/acvm-repo/acvm/src/compiler/optimizers/merge_expressions.rs @@ -12,7 +12,7 @@ use acir::{ use crate::compiler::CircuitSimulator; -pub(crate) struct MergeExpressionsOptimizer { +pub(crate) struct MergeExpressionsOptimizer { resolved_blocks: HashMap>, modified_gates: HashMap>, deleted_gates: BTreeSet, diff --git a/noir/noir-repo/acvm-repo/acvm/src/compiler/optimizers/redundant_range.rs b/noir/noir-repo/acvm-repo/acvm/src/compiler/optimizers/redundant_range.rs index 67dce75411e..120a963192e 100644 --- a/noir/noir-repo/acvm-repo/acvm/src/compiler/optimizers/redundant_range.rs +++ b/noir/noir-repo/acvm-repo/acvm/src/compiler/optimizers/redundant_range.rs @@ -27,7 +27,7 @@ use std::collections::{BTreeMap, HashSet}; /// /// This optimization pass will keep the 16-bit range constraint /// and remove the 32-bit range constraint opcode. -pub(crate) struct RangeOptimizer { +pub(crate) struct RangeOptimizer { /// Maps witnesses to their lowest known bit sizes. lists: BTreeMap, circuit: Circuit, diff --git a/noir/noir-repo/acvm-repo/acvm/src/compiler/optimizers/unused_memory.rs b/noir/noir-repo/acvm-repo/acvm/src/compiler/optimizers/unused_memory.rs index 8b7e52d66f2..3a256aafe63 100644 --- a/noir/noir-repo/acvm-repo/acvm/src/compiler/optimizers/unused_memory.rs +++ b/noir/noir-repo/acvm-repo/acvm/src/compiler/optimizers/unused_memory.rs @@ -1,13 +1,16 @@ -use acir::circuit::{Circuit, Opcode, brillig::BrilligInputs, opcodes::BlockId}; +use acir::{ + AcirField, + circuit::{Circuit, Opcode, brillig::BrilligInputs, opcodes::BlockId}, +}; use std::collections::HashSet; /// `UnusedMemoryOptimizer` will remove initializations of memory blocks which are unused. -pub(crate) struct UnusedMemoryOptimizer { +pub(crate) struct UnusedMemoryOptimizer { unused_memory_initializations: HashSet, circuit: Circuit, } -impl UnusedMemoryOptimizer { +impl UnusedMemoryOptimizer { /// Creates a new `UnusedMemoryOptimizer ` by collecting unused memory init /// opcodes from `Circuit`. pub(crate) fn new(circuit: Circuit) -> Self { diff --git a/noir/noir-repo/acvm-repo/acvm/src/pwg/arithmetic.rs b/noir/noir-repo/acvm-repo/acvm/src/pwg/arithmetic.rs index 78da63ddc0f..a2921bcbc9b 100644 --- a/noir/noir-repo/acvm-repo/acvm/src/pwg/arithmetic.rs +++ b/noir/noir-repo/acvm-repo/acvm/src/pwg/arithmetic.rs @@ -254,7 +254,52 @@ mod tests { use acir::FieldElement; #[test] - fn expression_solver_smoke_test() { + fn solves_simple_assignment() { + let a = Witness(0); + + // a - 1 == 0; + let opcode_a = Expression { + mul_terms: vec![], + linear_combinations: vec![(FieldElement::one(), a)], + q_c: -FieldElement::one(), + }; + + let mut values = WitnessMap::new(); + assert_eq!(ExpressionSolver::solve(&mut values, &opcode_a), Ok(())); + + assert_eq!(values.get(&a).unwrap(), &FieldElement::from(1_i128)); + } + + #[test] + fn solves_unknown_in_mul_term() { + let a = Witness(0); + let b = Witness(1); + let c = Witness(2); + let d = Witness(3); + + // a * b - b - c - d == 0; + let opcode_a = Expression { + mul_terms: vec![(FieldElement::one(), a, b)], + linear_combinations: vec![ + (-FieldElement::one(), b), + (-FieldElement::one(), c), + (-FieldElement::one(), d), + ], + q_c: FieldElement::zero(), + }; + + let mut values = WitnessMap::new(); + values.insert(b, FieldElement::from(2_i128)); + values.insert(c, FieldElement::from(1_i128)); + values.insert(d, FieldElement::from(1_i128)); + + assert_eq!(ExpressionSolver::solve(&mut values, &opcode_a), Ok(())); + + assert_eq!(values.get(&a).unwrap(), &FieldElement::from(2_i128)); + } + + #[test] + fn solves_unknown_in_linear_term() { let a = Witness(0); let b = Witness(1); let c = Witness(2); diff --git a/noir/noir-repo/acvm-repo/acvm/src/pwg/blackbox/hash.rs b/noir/noir-repo/acvm-repo/acvm/src/pwg/blackbox/hash.rs index 93f9ea410bc..a4af9de55cf 100644 --- a/noir/noir-repo/acvm-repo/acvm/src/pwg/blackbox/hash.rs +++ b/noir/noir-repo/acvm-repo/acvm/src/pwg/blackbox/hash.rs @@ -136,11 +136,10 @@ pub(crate) fn solve_poseidon2_permutation_opcode( } // Read witness assignments - let mut state = Vec::new(); - for input in inputs.iter() { - let witness_assignment = input_to_value(initial_witness, *input, false)?; - state.push(witness_assignment); - } + let state: Vec = inputs + .iter() + .map(|input| input_to_value(initial_witness, *input, false)) + .collect::>()?; let state = backend.poseidon2_permutation(&state, len)?; diff --git a/noir/noir-repo/acvm-repo/acvm/src/pwg/blackbox/range.rs b/noir/noir-repo/acvm-repo/acvm/src/pwg/blackbox/range.rs index 927706f4671..039a04b9063 100644 --- a/noir/noir-repo/acvm-repo/acvm/src/pwg/blackbox/range.rs +++ b/noir/noir-repo/acvm-repo/acvm/src/pwg/blackbox/range.rs @@ -21,3 +21,36 @@ pub(crate) fn solve_range_opcode( } Ok(()) } + +#[cfg(test)] +mod tests { + use std::collections::BTreeMap; + + use acir::{ + FieldElement, + circuit::opcodes::FunctionInput, + native_types::{Witness, WitnessMap}, + }; + + use crate::pwg::blackbox::solve_range_opcode; + + #[test] + fn rejects_too_large_inputs() { + let witness_map = + WitnessMap::from(BTreeMap::from([(Witness(0), FieldElement::from(256u32))])); + let input: FunctionInput = FunctionInput::witness(Witness(0), 8); + assert!(solve_range_opcode(&witness_map, &input, false).is_err()); + } + + #[test] + fn accepts_valid_inputs() { + let values: [u32; 4] = [0, 1, 8, 255]; + + for value in values { + let witness_map = + WitnessMap::from(BTreeMap::from([(Witness(0), FieldElement::from(value))])); + let input: FunctionInput = FunctionInput::witness(Witness(0), 8); + assert!(solve_range_opcode(&witness_map, &input, false).is_ok()); + } + } +} diff --git a/noir/noir-repo/acvm-repo/acvm/src/pwg/mod.rs b/noir/noir-repo/acvm-repo/acvm/src/pwg/mod.rs index f5d56df17c1..3c66b08eb4c 100644 --- a/noir/noir-repo/acvm-repo/acvm/src/pwg/mod.rs +++ b/noir/noir-repo/acvm-repo/acvm/src/pwg/mod.rs @@ -178,7 +178,7 @@ pub struct ProfilingSample { pub brillig_function_id: Option, } -pub struct ACVM<'a, F, B: BlackBoxFunctionSolver> { +pub struct ACVM<'a, F: AcirField, B: BlackBoxFunctionSolver> { status: ACVMStatus, backend: &'a B, diff --git a/noir/noir-repo/acvm-repo/blackbox_solver/src/curve_specific_solver.rs b/noir/noir-repo/acvm-repo/blackbox_solver/src/curve_specific_solver.rs index 37fe5d05363..af0104b54f0 100644 --- a/noir/noir-repo/acvm-repo/blackbox_solver/src/curve_specific_solver.rs +++ b/noir/noir-repo/acvm-repo/blackbox_solver/src/curve_specific_solver.rs @@ -25,8 +25,8 @@ pub trait BlackBoxFunctionSolver { ) -> Result<(F, F, F), BlackBoxResolutionError>; fn poseidon2_permutation( &self, - _inputs: &[F], - _len: u32, + inputs: &[F], + len: u32, ) -> Result, BlackBoxResolutionError>; } diff --git a/noir/noir-repo/acvm-repo/brillig/Cargo.toml b/noir/noir-repo/acvm-repo/brillig/Cargo.toml index 5a9720238ac..0c052d8f700 100644 --- a/noir/noir-repo/acvm-repo/brillig/Cargo.toml +++ b/noir/noir-repo/acvm-repo/brillig/Cargo.toml @@ -18,7 +18,11 @@ workspace = true [dependencies] acir_field.workspace = true serde.workspace = true +proptest = { workspace = true, optional = true } +proptest-derive = { workspace = true, optional = true } [features] +default = [] bn254 = ["acir_field/bn254"] bls12_381 = ["acir_field/bls12_381"] +arb = ["proptest", "proptest-derive"] diff --git a/noir/noir-repo/acvm-repo/brillig/src/black_box.rs b/noir/noir-repo/acvm-repo/brillig/src/black_box.rs index eb496d0f826..67ddf21589f 100644 --- a/noir/noir-repo/acvm-repo/brillig/src/black_box.rs +++ b/noir/noir-repo/acvm-repo/brillig/src/black_box.rs @@ -4,6 +4,7 @@ use serde::{Deserialize, Serialize}; /// These opcodes provide an equivalent of ACIR blackbox functions. /// They are implemented as native functions in the VM. #[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize, Hash)] +#[cfg_attr(feature = "arb", derive(proptest_derive::Arbitrary))] pub enum BlackBoxOp { /// Encrypts a message using AES128. AES128Encrypt { diff --git a/noir/noir-repo/acvm-repo/brillig/src/opcodes.rs b/noir/noir-repo/acvm-repo/brillig/src/opcodes.rs index 1cb31ca3d0a..bd083b914d4 100644 --- a/noir/noir-repo/acvm-repo/brillig/src/opcodes.rs +++ b/noir/noir-repo/acvm-repo/brillig/src/opcodes.rs @@ -5,6 +5,7 @@ use serde::{Deserialize, Serialize}; pub type Label = usize; #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize, Deserialize)] +#[cfg_attr(feature = "arb", derive(proptest_derive::Arbitrary))] pub enum MemoryAddress { Direct(usize), Relative(usize), @@ -82,6 +83,7 @@ impl HeapValueType { /// A fixed-sized array starting from a Brillig memory location. #[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Copy, Hash)] +#[cfg_attr(feature = "arb", derive(proptest_derive::Arbitrary))] pub struct HeapArray { pub pointer: MemoryAddress, pub size: usize, @@ -95,12 +97,14 @@ impl Default for HeapArray { /// A memory-sized vector passed starting from a Brillig memory location and with a memory-held size #[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Copy, Hash)] +#[cfg_attr(feature = "arb", derive(proptest_derive::Arbitrary))] pub struct HeapVector { pub pointer: MemoryAddress, pub size: MemoryAddress, } #[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Copy, PartialOrd, Ord, Hash)] +#[cfg_attr(feature = "arb", derive(proptest_derive::Arbitrary))] pub enum IntegerBitSize { U1, U8, @@ -153,6 +157,7 @@ impl std::fmt::Display for IntegerBitSize { } #[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Copy, PartialOrd, Ord, Hash)] +#[cfg_attr(feature = "arb", derive(proptest_derive::Arbitrary))] pub enum BitSize { Field, Integer(IntegerBitSize), @@ -182,6 +187,7 @@ impl BitSize { /// this needs to be encoded somehow when dealing with an external system. /// For simplicity, the extra type information is given right in the ForeignCall instructions. #[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Copy, Hash)] +#[cfg_attr(feature = "arb", derive(proptest_derive::Arbitrary))] pub enum ValueOrArray { /// A single value passed to or from an external call /// It is an 'immediate' value - used without dereferencing. @@ -199,6 +205,7 @@ pub enum ValueOrArray { } #[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Hash)] +#[cfg_attr(feature = "arb", derive(proptest_derive::Arbitrary))] pub enum BrilligOpcode { /// Takes the fields in addresses `lhs` and `rhs` /// Performs the specified binary operation @@ -315,6 +322,7 @@ pub enum BrilligOpcode { /// Binary fixed-length field expressions #[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize, Hash)] +#[cfg_attr(feature = "arb", derive(proptest_derive::Arbitrary))] pub enum BinaryFieldOp { Add, Sub, @@ -333,6 +341,7 @@ pub enum BinaryFieldOp { /// Binary fixed-length integer expressions #[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize, Hash)] +#[cfg_attr(feature = "arb", derive(proptest_derive::Arbitrary))] pub enum BinaryIntOp { Add, Sub, @@ -355,3 +364,31 @@ pub enum BinaryIntOp { /// (>>) Shift right Shr, } + +#[cfg(feature = "arb")] +mod tests { + use proptest::arbitrary::Arbitrary; + use proptest::prelude::*; + + use super::{BitSize, HeapValueType}; + + // Need to define recursive strategy for `HeapValueType` + impl Arbitrary for HeapValueType { + type Parameters = (); + type Strategy = BoxedStrategy; + + fn arbitrary_with(_args: Self::Parameters) -> Self::Strategy { + let leaf = any::().prop_map(HeapValueType::Simple); + leaf.prop_recursive(2, 3, 2, |inner| { + prop_oneof![ + (prop::collection::vec(inner.clone(), 1..3), any::()).prop_map( + |(value_types, size)| { HeapValueType::Array { value_types, size } } + ), + (prop::collection::vec(inner.clone(), 1..3)) + .prop_map(|value_types| { HeapValueType::Vector { value_types } }), + ] + }) + .boxed() + } + } +} diff --git a/noir/noir-repo/acvm-repo/brillig_vm/src/lib.rs b/noir/noir-repo/acvm-repo/brillig_vm/src/lib.rs index 935c296d5ae..a2c52eaa429 100644 --- a/noir/noir-repo/acvm-repo/brillig_vm/src/lib.rs +++ b/noir/noir-repo/acvm-repo/brillig_vm/src/lib.rs @@ -1737,7 +1737,7 @@ mod tests { ) -> VM<'a, F, StubbedBlackBoxSolver> { let mut vm = VM::new(calldata, opcodes, solver, false); brillig_execute(&mut vm); - assert_eq!(vm.call_stack, vec![]); + assert!(vm.call_stack.is_empty()); vm } diff --git a/noir/noir-repo/compiler/integration-tests/scripts/codegen-verifiers.sh b/noir/noir-repo/compiler/integration-tests/scripts/codegen-verifiers.sh index de1f71a4cc0..f1fc321d1ff 100755 --- a/noir/noir-repo/compiler/integration-tests/scripts/codegen-verifiers.sh +++ b/noir/noir-repo/compiler/integration-tests/scripts/codegen-verifiers.sh @@ -17,19 +17,19 @@ KEYS=$(mktemp -d) # Codegen verifier contract for 1_mul mul_dir=$repo_root/test_programs/execution_success/1_mul nargo --program-dir $mul_dir compile -$NARGO_BACKEND_PATH write_vk -b $mul_dir/target/1_mul.json -o $KEYS/1_mul -$NARGO_BACKEND_PATH contract -k $KEYS/1_mul -o $contracts_dir/1_mul.sol +$NARGO_BACKEND_PATH OLD_API write_vk -b $mul_dir/target/1_mul.json -o $KEYS/1_mul +$NARGO_BACKEND_PATH OLD_API contract -k $KEYS/1_mul -o $contracts_dir/1_mul.sol # Codegen verifier contract for assert_statement assert_statement_dir=$repo_root/test_programs/execution_success/assert_statement nargo --program-dir $assert_statement_dir compile -$NARGO_BACKEND_PATH write_vk -b $assert_statement_dir/target/assert_statement.json -o $KEYS/assert_statement -$NARGO_BACKEND_PATH contract -k $KEYS/assert_statement -o $contracts_dir/assert_statement.sol +$NARGO_BACKEND_PATH OLD_API write_vk -b $assert_statement_dir/target/assert_statement.json -o $KEYS/assert_statement +$NARGO_BACKEND_PATH OLD_API contract -k $KEYS/assert_statement -o $contracts_dir/assert_statement.sol # Codegen verifier contract for recursion recursion_dir=$repo_root/compiler/integration-tests/circuits/recursion nargo --program-dir $recursion_dir compile -$NARGO_BACKEND_PATH write_vk -b $recursion_dir/target/recursion.json -o $KEYS/recursion -$NARGO_BACKEND_PATH contract -k $KEYS/recursion ./ -o $contracts_dir/recursion.sol +$NARGO_BACKEND_PATH OLD_API write_vk -b $recursion_dir/target/recursion.json -o $KEYS/recursion +$NARGO_BACKEND_PATH OLD_API contract -k $KEYS/recursion -o $contracts_dir/recursion.sol -rm -rf $KEYS \ No newline at end of file +rm -rf $KEYS diff --git a/noir/noir-repo/compiler/noirc_driver/src/abi_gen.rs b/noir/noir-repo/compiler/noirc_driver/src/abi_gen.rs index 3bbe2181798..25d79bf16a2 100644 --- a/noir/noir-repo/compiler/noirc_driver/src/abi_gen.rs +++ b/noir/noir-repo/compiler/noirc_driver/src/abi_gen.rs @@ -136,7 +136,7 @@ pub(super) fn abi_type_from_hir_type(context: &Context, typ: &Type) -> AbiType { | Type::Slice(_) | Type::Function(_, _, _, _) => unreachable!("{typ} cannot be used in the abi"), Type::FmtString(_, _) => unreachable!("format strings cannot be used in the abi"), - Type::MutableReference(_) => unreachable!("&mut cannot be used in the abi"), + Type::Reference(..) => unreachable!("references cannot be used in the abi"), } } diff --git a/noir/noir-repo/compiler/noirc_driver/src/lib.rs b/noir/noir-repo/compiler/noirc_driver/src/lib.rs index a70d9fc72b6..3712634d707 100644 --- a/noir/noir-repo/compiler/noirc_driver/src/lib.rs +++ b/noir/noir-repo/compiler/noirc_driver/src/lib.rs @@ -10,7 +10,7 @@ use clap::Args; use fm::{FileId, FileManager}; use iter_extended::vecmap; use noirc_abi::{AbiParameter, AbiType, AbiValue}; -use noirc_errors::{CustomDiagnostic, DiagnosticKind, FileDiagnostic}; +use noirc_errors::{CustomDiagnostic, DiagnosticKind}; use noirc_evaluator::brillig::BrilligOptions; use noirc_evaluator::create_program; use noirc_evaluator::errors::RuntimeError; @@ -133,20 +133,16 @@ pub struct CompileOptions { #[arg(long)] pub skip_underconstrained_check: bool, - /// Flag to turn on the compiler check for missing Brillig call constraints. - /// Warning: This can degrade compilation speed but will also find some correctness errors. + /// Flag to turn off the compiler check for missing Brillig call constraints. + /// Warning: This can improve compilation speed but can also lead to correctness errors. /// This check should always be run on production code. #[arg(long)] - pub enable_brillig_constraints_check: bool, + pub skip_brillig_constraints_check: bool, /// Flag to turn on extra Brillig bytecode to be generated to guard against invalid states in testing. #[arg(long, hide = true)] pub enable_brillig_debug_assertions: bool, - /// Hidden Brillig call check flag to maintain CI compatibility (currently ignored) - #[arg(long, hide = true)] - pub skip_brillig_constraints_check: bool, - /// Flag to turn on the lookback feature of the Brillig call constraints /// check, allowing tracking argument values before the call happens preventing /// certain rare false positives (leads to a slowdown on large rollout functions) @@ -227,8 +223,8 @@ impl From for CompileError { } } -impl From for FileDiagnostic { - fn from(error: CompileError) -> FileDiagnostic { +impl From for CustomDiagnostic { + fn from(error: CompileError) -> CustomDiagnostic { match error { CompileError::RuntimeError(err) => err.into(), CompileError::MonomorphizationError(err) => err.into(), @@ -237,10 +233,10 @@ impl From for FileDiagnostic { } /// Helper type used to signify where only warnings are expected in file diagnostics -pub type Warnings = Vec; +pub type Warnings = Vec; /// Helper type used to signify where errors or warnings are expected in file diagnostics -pub type ErrorsAndWarnings = Vec; +pub type ErrorsAndWarnings = Vec; /// Helper type for connecting a compilation artifact to the errors or warnings which were produced during compilation. pub type CompilationResult = Result<(T, Warnings), ErrorsAndWarnings>; @@ -350,20 +346,16 @@ pub fn check_crate( ) -> CompilationResult<()> { let diagnostics = CrateDefMap::collect_defs(crate_id, context, options.frontend_options()); let crate_files = context.crate_files(&crate_id); - let warnings_and_errors: Vec = diagnostics - .into_iter() - .map(|error| { - let location = error.location(); - let diagnostic = CustomDiagnostic::from(&error); - diagnostic.in_file(location.file) - }) + let warnings_and_errors: Vec = diagnostics + .iter() + .map(CustomDiagnostic::from) .filter(|diagnostic| { // We filter out any warnings if they're going to be ignored later on to free up memory. - !options.silence_warnings || diagnostic.diagnostic.kind != DiagnosticKind::Warning + !options.silence_warnings || diagnostic.kind != DiagnosticKind::Warning }) .filter(|error| { // Only keep warnings from the crate we are checking - if error.diagnostic.is_warning() { crate_files.contains(&error.file_id) } else { true } + if error.is_warning() { crate_files.contains(&error.file) } else { true } }) .collect(); @@ -401,16 +393,16 @@ pub fn compile_main( // TODO(#2155): This error might be a better to exist in Nargo let err = CustomDiagnostic::from_message( "cannot compile crate into a program as it does not contain a `main` function", - ) - .in_file(FileId::default()); + FileId::default(), + ); vec![err] })?; let compiled_program = compile_no_check(context, options, main, cached_program, options.force_compile) - .map_err(FileDiagnostic::from)?; + .map_err(|error| vec![CustomDiagnostic::from(error)])?; - let compilation_warnings = vecmap(compiled_program.warnings.clone(), FileDiagnostic::from); + let compilation_warnings = vecmap(compiled_program.warnings.clone(), CustomDiagnostic::from); if options.deny_warnings && !compilation_warnings.is_empty() { return Err(compilation_warnings); } @@ -439,14 +431,16 @@ pub fn compile_contract( let mut errors = warnings; if contracts.len() > 1 { - let err = CustomDiagnostic::from_message("Packages are limited to a single contract") - .in_file(FileId::default()); + let err = CustomDiagnostic::from_message( + "Packages are limited to a single contract", + FileId::default(), + ); return Err(vec![err]); } else if contracts.is_empty() { let err = CustomDiagnostic::from_message( "cannot compile crate into a contract as it does not contain any contracts", - ) - .in_file(FileId::default()); + FileId::default(), + ); return Err(vec![err]); }; @@ -483,12 +477,8 @@ pub fn compile_contract( } /// True if there are (non-warning) errors present and we should halt compilation -fn has_errors(errors: &[FileDiagnostic], deny_warnings: bool) -> bool { - if deny_warnings { - !errors.is_empty() - } else { - errors.iter().any(|error| error.diagnostic.is_error()) - } +fn has_errors(errors: &[CustomDiagnostic], deny_warnings: bool) -> bool { + if deny_warnings { !errors.is_empty() } else { errors.iter().any(|error| error.is_error()) } } /// Compile all of the functions associated with a Noir contract. @@ -525,7 +515,7 @@ fn compile_contract_inner( let function = match compile_no_check(context, &options, function_id, None, true) { Ok(function) => function, Err(new_error) => { - errors.push(FileDiagnostic::from(new_error)); + errors.push(new_error.into()); continue; } }; @@ -704,7 +694,7 @@ pub fn compile_no_check( skip_underconstrained_check: options.skip_underconstrained_check, enable_brillig_constraints_check_lookback: options .enable_brillig_constraints_check_lookback, - enable_brillig_constraints_check: options.enable_brillig_constraints_check, + skip_brillig_constraints_check: options.skip_brillig_constraints_check, inliner_aggressiveness: options.inliner_aggressiveness, max_bytecode_increase_percent: options.max_bytecode_increase_percent, }; diff --git a/noir/noir-repo/compiler/noirc_driver/tests/contracts.rs b/noir/noir-repo/compiler/noirc_driver/tests/contracts.rs index ea42cb23376..0732a7728ca 100644 --- a/noir/noir-repo/compiler/noirc_driver/tests/contracts.rs +++ b/noir/noir-repo/compiler/noirc_driver/tests/contracts.rs @@ -33,10 +33,10 @@ contract Bar {}"; assert_eq!( errors, - vec![ - CustomDiagnostic::from_message("Packages are limited to a single contract") - .in_file(FileId::default()) - ], + vec![CustomDiagnostic::from_message( + "Packages are limited to a single contract", + FileId::default() + )], "stdlib is producing warnings" ); diff --git a/noir/noir-repo/compiler/noirc_errors/src/lib.rs b/noir/noir-repo/compiler/noirc_errors/src/lib.rs index 146217f91a0..91d121603ba 100644 --- a/noir/noir-repo/compiler/noirc_errors/src/lib.rs +++ b/noir/noir-repo/compiler/noirc_errors/src/lib.rs @@ -8,21 +8,3 @@ mod position; pub mod reporter; pub use position::{Located, Location, Position, Span, Spanned}; pub use reporter::{CustomDiagnostic, DiagnosticKind}; - -#[derive(Debug, Clone, PartialEq, Eq)] -pub struct FileDiagnostic { - pub file_id: fm::FileId, - pub diagnostic: CustomDiagnostic, -} - -impl FileDiagnostic { - pub fn new(file_id: fm::FileId, diagnostic: CustomDiagnostic) -> FileDiagnostic { - FileDiagnostic { file_id, diagnostic } - } -} - -impl From for Vec { - fn from(value: FileDiagnostic) -> Self { - vec![value] - } -} diff --git a/noir/noir-repo/compiler/noirc_errors/src/reporter.rs b/noir/noir-repo/compiler/noirc_errors/src/reporter.rs index e516f690ddc..d406e897d65 100644 --- a/noir/noir-repo/compiler/noirc_errors/src/reporter.rs +++ b/noir/noir-repo/compiler/noirc_errors/src/reporter.rs @@ -1,6 +1,6 @@ use std::io::IsTerminal; -use crate::{FileDiagnostic, Location, Span}; +use crate::{Location, Span}; use codespan_reporting::diagnostic::{Diagnostic, Label}; use codespan_reporting::files::Files; use codespan_reporting::term; @@ -8,6 +8,7 @@ use codespan_reporting::term::termcolor::{ColorChoice, StandardStream}; #[derive(Debug, Clone, PartialEq, Eq)] pub struct CustomDiagnostic { + pub file: fm::FileId, pub message: String, pub secondaries: Vec, pub notes: Vec, @@ -35,8 +36,9 @@ pub struct ReportedErrors { } impl CustomDiagnostic { - pub fn from_message(msg: &str) -> CustomDiagnostic { + pub fn from_message(msg: &str, file: fm::FileId) -> CustomDiagnostic { Self { + file, message: msg.to_owned(), secondaries: Vec::new(), notes: Vec::new(), @@ -54,6 +56,7 @@ impl CustomDiagnostic { kind: DiagnosticKind, ) -> CustomDiagnostic { CustomDiagnostic { + file: secondary_location.file, message: primary_message, secondaries: vec![CustomLabel::new(secondary_message, secondary_location)], notes: Vec::new(), @@ -109,6 +112,7 @@ impl CustomDiagnostic { secondary_location: Location, ) -> CustomDiagnostic { CustomDiagnostic { + file: secondary_location.file, message: primary_message, secondaries: vec![CustomLabel::new(secondary_message, secondary_location)], notes: Vec::new(), @@ -119,10 +123,6 @@ impl CustomDiagnostic { } } - pub fn in_file(self, file_id: fm::FileId) -> FileDiagnostic { - FileDiagnostic::new(file_id, self) - } - pub fn with_call_stack(mut self, call_stack: Vec) -> Self { self.call_stack = call_stack; self @@ -185,16 +185,16 @@ impl CustomLabel { /// of diagnostics that were errors. pub fn report_all<'files>( files: &'files impl Files<'files, FileId = fm::FileId>, - diagnostics: &[FileDiagnostic], + diagnostics: &[CustomDiagnostic], deny_warnings: bool, silence_warnings: bool, ) -> ReportedErrors { // Report warnings before any errors let (warnings_and_bugs, mut errors): (Vec<_>, _) = - diagnostics.iter().partition(|item| !item.diagnostic.is_error()); + diagnostics.iter().partition(|item| !item.is_error()); let (warnings, mut bugs): (Vec<_>, _) = - warnings_and_bugs.iter().partition(|item| item.diagnostic.is_warning()); + warnings_and_bugs.iter().partition(|item| item.is_warning()); let mut diagnostics = if silence_warnings { Vec::new() } else { warnings }; diagnostics.append(&mut bugs); diagnostics.append(&mut errors); @@ -205,14 +205,14 @@ pub fn report_all<'files>( ReportedErrors { error_count } } -impl FileDiagnostic { +impl CustomDiagnostic { /// Print the report; return true if it was an error. pub fn report<'files>( &self, files: &'files impl Files<'files, FileId = fm::FileId>, deny_warnings: bool, ) -> bool { - report(files, &self.diagnostic, deny_warnings) + report(files, self, deny_warnings) } } diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs b/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs index 742ccb4c9de..d8f1f9d0997 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs @@ -1,6 +1,6 @@ use crate::brillig::brillig_ir::artifact::Label; use crate::brillig::brillig_ir::brillig_variable::{ - BrilligArray, BrilligVariable, SingleAddrVariable, type_to_heap_value_type, + BrilligArray, BrilligVariable, BrilligVector, SingleAddrVariable, type_to_heap_value_type, }; use crate::brillig::brillig_ir::registers::RegisterAllocator; @@ -935,8 +935,64 @@ impl<'block, Registers: RegisterAllocator> BrilligBlock<'block, Registers> { Instruction::EnableSideEffectsIf { .. } => { unreachable!("enable_side_effects not supported by brillig") } - Instruction::IfElse { .. } => { - unreachable!("IfElse instructions should not be possible in brillig") + Instruction::IfElse { then_condition, then_value, else_condition: _, else_value } => { + let then_condition = self.convert_ssa_single_addr_value(*then_condition, dfg); + let then_value = self.convert_ssa_value(*then_value, dfg); + let else_value = self.convert_ssa_value(*else_value, dfg); + let result = self.variables.define_variable( + self.function_context, + self.brillig_context, + dfg.instruction_results(instruction_id)[0], + dfg, + ); + match (then_value, else_value) { + ( + BrilligVariable::SingleAddr(then_address), + BrilligVariable::SingleAddr(else_address), + ) => { + self.brillig_context.conditional_move_instruction( + then_condition, + then_address, + else_address, + result.extract_single_addr(), + ); + } + ( + BrilligVariable::BrilligArray(then_array), + BrilligVariable::BrilligArray(else_array), + ) => { + // Pointer to the array which result from the if-else + let pointer = self.brillig_context.allocate_register(); + self.brillig_context.conditional_move_instruction( + then_condition, + SingleAddrVariable::new_usize(then_array.pointer), + SingleAddrVariable::new_usize(else_array.pointer), + SingleAddrVariable::new_usize(pointer), + ); + let if_else_array = BrilligArray { pointer, size: then_array.size }; + // Copy the if-else array to the result + self.brillig_context + .call_array_copy_procedure(if_else_array, result.extract_array()); + } + ( + BrilligVariable::BrilligVector(then_vector), + BrilligVariable::BrilligVector(else_vector), + ) => { + // Pointer to the vector which result from the if-else + let pointer = self.brillig_context.allocate_register(); + self.brillig_context.conditional_move_instruction( + then_condition, + SingleAddrVariable::new_usize(then_vector.pointer), + SingleAddrVariable::new_usize(else_vector.pointer), + SingleAddrVariable::new_usize(pointer), + ); + let if_else_vector = BrilligVector { pointer }; + // Copy the if-else vector to the result + self.brillig_context + .call_vector_copy_procedure(if_else_vector, result.extract_vector()); + } + _ => unreachable!("ICE - then and else values must have the same type"), + } } Instruction::MakeArray { elements: array, typ } => { let value_id = dfg.instruction_results(instruction_id)[0]; diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_ir/debug_show.rs b/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_ir/debug_show.rs index b547e56abef..c0b6492618c 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_ir/debug_show.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_ir/debug_show.rs @@ -126,6 +126,24 @@ impl DebugShow { debug_println!(self.enable_debug_trace, " MOV {}, {}", destination, source); } + /// Emits a `conditional mov` instruction. + pub(crate) fn conditional_mov_instruction( + &self, + destination: MemoryAddress, + source_then: MemoryAddress, + source_else: MemoryAddress, + condition: MemoryAddress, + ) { + debug_println!( + self.enable_debug_trace, + " {} = MOV if {} then {}, else {}", + destination, + condition, + source_then, + source_else + ); + } + /// Emits a `cast` instruction. pub(crate) fn cast_instruction( &self, diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_ir/instructions.rs b/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_ir/instructions.rs index 6481972d707..fad9892cfb1 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_ir/instructions.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_ir/instructions.rs @@ -75,6 +75,28 @@ impl BrilligContext< ); } + /// Insert a conditional move instruction + pub(crate) fn conditional_move_instruction( + &mut self, + condition: SingleAddrVariable, + then_address: SingleAddrVariable, + else_address: SingleAddrVariable, + destination: SingleAddrVariable, + ) { + self.debug_show.conditional_mov_instruction( + destination.address, + then_address.address, + else_address.address, + condition.address, + ); + self.push_opcode(BrilligOpcode::ConditionalMov { + destination: destination.address, + source_a: then_address.address, + source_b: else_address.address, + condition: condition.address, + }); + } + fn binary( &mut self, lhs: SingleAddrVariable, diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/errors.rs b/noir/noir-repo/compiler/noirc_evaluator/src/errors.rs index 202124f7931..deaefd40ae3 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/errors.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/errors.rs @@ -8,7 +8,7 @@ //! //! An Error of the latter is an error in the implementation of the compiler use iter_extended::vecmap; -use noirc_errors::{CustomDiagnostic as Diagnostic, FileDiagnostic, Location}; +use noirc_errors::{CustomDiagnostic, Location}; use noirc_frontend::signed_field::SignedField; use thiserror::Error; @@ -73,8 +73,8 @@ pub enum SsaReport { Bug(InternalBug), } -impl From for FileDiagnostic { - fn from(error: SsaReport) -> FileDiagnostic { +impl From for CustomDiagnostic { + fn from(error: SsaReport) -> CustomDiagnostic { match error { SsaReport::Warning(warning) => { let message = warning.to_string(); @@ -87,10 +87,10 @@ impl From for FileDiagnostic { }, }; let call_stack = vecmap(call_stack, |location| location); - let file_id = call_stack.last().map(|location| location.file).unwrap_or_default(); let location = call_stack.last().expect("Expected RuntimeError to have a location"); - let diagnostic = Diagnostic::simple_warning(message, secondary_message, *location); - diagnostic.with_call_stack(call_stack).in_file(file_id) + let diagnostic = + CustomDiagnostic::simple_warning(message, secondary_message, *location); + diagnostic.with_call_stack(call_stack) } SsaReport::Bug(bug) => { let message = bug.to_string(); @@ -104,10 +104,10 @@ impl From for FileDiagnostic { InternalBug::AssertFailed { call_stack } => ("As a result, the compiled circuit is ensured to fail. Other assertions may also fail during execution".to_string(), call_stack) }; let call_stack = vecmap(call_stack, |location| location); - let file_id = call_stack.last().map(|location| location.file).unwrap_or_default(); let location = call_stack.last().expect("Expected RuntimeError to have a location"); - let diagnostic = Diagnostic::simple_bug(message, secondary_message, *location); - diagnostic.with_call_stack(call_stack).in_file(file_id) + let diagnostic = + CustomDiagnostic::simple_bug(message, secondary_message, *location); + diagnostic.with_call_stack(call_stack) } } } @@ -181,20 +181,19 @@ impl RuntimeError { } } -impl From for FileDiagnostic { - fn from(error: RuntimeError) -> FileDiagnostic { +impl From for CustomDiagnostic { + fn from(error: RuntimeError) -> CustomDiagnostic { let call_stack = vecmap(error.call_stack(), |location| *location); - let file_id = call_stack.last().map(|location| location.file).unwrap_or_default(); let diagnostic = error.into_diagnostic(); - diagnostic.with_call_stack(call_stack).in_file(file_id) + diagnostic.with_call_stack(call_stack) } } impl RuntimeError { - fn into_diagnostic(self) -> Diagnostic { + fn into_diagnostic(self) -> CustomDiagnostic { match self { RuntimeError::InternalError(cause) => { - Diagnostic::simple_error( + CustomDiagnostic::simple_error( "Internal Consistency Evaluators Errors: \n This is likely a bug. Consider opening an issue at https://github.com/noir-lang/noir/issues".to_owned(), cause.to_string(), @@ -206,7 +205,7 @@ impl RuntimeError { let location = self.call_stack().last().expect("Expected RuntimeError to have a location"); - Diagnostic::simple_error( + CustomDiagnostic::simple_error( primary_message, "If attempting to fetch the length of a slice, try converting to an array. Slices only use dynamic lengths.".to_string(), *location, @@ -217,7 +216,7 @@ impl RuntimeError { let location = self.call_stack().last().unwrap_or_else(|| panic!("Expected RuntimeError to have a location. Error message: {message}")); - Diagnostic::simple_error(message, String::new(), *location) + CustomDiagnostic::simple_error(message, String::new(), *location) } } } diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/lib.rs b/noir/noir-repo/compiler/noirc_evaluator/src/lib.rs index 75ea557d3de..5c212e8f38a 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/lib.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/lib.rs @@ -20,10 +20,20 @@ pub(crate) fn trim_leading_whitespace_from_lines(src: &str) -> String { while first_line.is_empty() { first_line = lines.next().unwrap(); } + let first_line_original_length = first_line.len(); let mut result = first_line.trim_start().to_string(); + let first_line_trimmed_length = result.len(); + + // Try to see how many spaces we chopped off the first line + let difference = first_line_original_length - first_line_trimmed_length; for line in lines { result.push('\n'); - result.push_str(line.trim_start()); + // Try to remove just `difference` spaces to preserve indents + if line.len() - line.trim_start().len() >= difference { + result.push_str(&line.chars().skip(difference).collect::()); + } else { + result.push_str(line.trim_start()); + } } result } diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa.rs index b4219ce278f..935918c6b7e 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa.rs @@ -72,8 +72,8 @@ pub struct SsaEvaluatorOptions { /// Skip the check for under constrained values pub skip_underconstrained_check: bool, - /// Enable the missing Brillig call constraints check - pub enable_brillig_constraints_check: bool, + /// Skip the missing Brillig call constraints check + pub skip_brillig_constraints_check: bool, /// Enable the lookback feature of the Brillig call constraints /// check (prevents some rare false positives, leads to a slowdown @@ -143,7 +143,7 @@ pub(crate) fn optimize_into_acir( )); } - if options.enable_brillig_constraints_check { + if !options.skip_brillig_constraints_check { ssa_level_warnings.extend(time( "After Check for Missing Brillig Call Constraints", options.print_codegen_timings, @@ -211,6 +211,7 @@ fn optimize_all(builder: SsaBuilder, options: &SsaEvaluatorOptions) -> Result, // Map of argument value ids to the Brillig call ids employing them call_arguments: HashMap>, - // Maintains count of calls being tracked - tracking_count: usize, + // The set of calls currently being tracked + tracking: HashSet, // Opt-in to use the lookback feature (tracking the argument values // of a Brillig call before the call happens if their usage precedes // it). Can prevent certain false positives, at the cost of @@ -138,8 +138,6 @@ struct BrilligTaintedIds { array_elements: HashMap>, // Initial result value ids, along with element ids for arrays root_results: HashSet, - // The flag signaling that the call should be now tracked - tracking: bool, } #[derive(Clone, Debug)] @@ -195,7 +193,6 @@ impl BrilligTaintedIds { results: results_status, array_elements, root_results: HashSet::from_iter(results.iter().copied()), - tracking: false, } } @@ -394,23 +391,19 @@ impl DependencyContext { for argument in &arguments { if let Some(calls) = self.call_arguments.get(argument) { for call in calls { - if let Some(tainted_ids) = self.tainted.get_mut(call) { - tainted_ids.tracking = true; - self.tracking_count += 1; + if self.tainted.contains_key(call) { + self.tracking.insert(*call); } } } } } - if let Some(tainted_ids) = self.tainted.get_mut(instruction) { - if !tainted_ids.tracking { - tainted_ids.tracking = true; - self.tracking_count += 1; - } + if self.tainted.contains_key(instruction) { + self.tracking.insert(*instruction); } // We can skip over instructions while nothing is being tracked - if self.tracking_count > 0 { + if !self.tracking.is_empty() { let mut results = Vec::new(); // Collect non-constant instruction results @@ -524,7 +517,7 @@ impl DependencyContext { // results involving the array in question, to properly // populate the array element tainted sets Instruction::ArrayGet { array, index } => { - self.process_array_get(function, *array, *index, &results); + self.process_array_get(*array, *index, &results, function); // Record all the used arguments as parents of the results self.update_children(&arguments, &results); } @@ -563,7 +556,10 @@ impl DependencyContext { .tainted .keys() .map(|brillig_call| { - trace!("tainted structure for {}: {:?}", brillig_call, self.tainted[brillig_call]); + trace!( + "tainted structure for {:?}: {:?}", + brillig_call, self.tainted[brillig_call] + ); SsaReport::Bug(InternalBug::UncheckedBrilligCall { call_stack: function.dfg.get_instruction_call_stack(*brillig_call), }) @@ -587,8 +583,8 @@ impl DependencyContext { self.side_effects_condition.map(|v| parents.insert(v)); // Don't update sets for the calls not yet being tracked - for (_, tainted_ids) in self.tainted.iter_mut() { - if tainted_ids.tracking { + for call in &self.tracking { + if let Some(tainted_ids) = self.tainted.get_mut(call) { tainted_ids.update_children(&parents, children); } } @@ -605,15 +601,15 @@ impl DependencyContext { .collect(); // Skip untracked calls - for (_, tainted_ids) in self.tainted.iter_mut() { - if tainted_ids.tracking { + for call in &self.tracking { + if let Some(tainted_ids) = self.tainted.get_mut(call) { tainted_ids.store_partial_constraints(&constrained_values); } } - self.tainted.retain(|_, tainted_ids| { + self.tainted.retain(|call, tainted_ids| { if tainted_ids.check_constrained() { - self.tracking_count -= 1; + self.tracking.remove(call); false } else { true @@ -624,10 +620,10 @@ impl DependencyContext { /// Process ArrayGet instruction for tracked Brillig calls fn process_array_get( &mut self, - function: &Function, array: ValueId, index: ValueId, element_results: &[ValueId], + function: &Function, ) { use acvm::acir::AcirField; @@ -635,8 +631,8 @@ impl DependencyContext { if let Some(value) = function.dfg.get_numeric_constant(index) { if let Some(index) = value.try_to_u32() { // Skip untracked calls - for (_, tainted_ids) in self.tainted.iter_mut() { - if tainted_ids.tracking { + for call in &self.tracking { + if let Some(tainted_ids) = self.tainted.get_mut(call) { tainted_ids.process_array_get(array, index as usize, element_results); } } diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/dfg.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/dfg.rs index 6b9ed3932e3..132985830eb 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/dfg.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/dfg.rs @@ -239,10 +239,9 @@ impl DataFlowGraph { instruction, Instruction::IncrementRc { .. } | Instruction::DecrementRc { .. } ), - RuntimeType::Brillig(_) => !matches!( - instruction, - Instruction::EnableSideEffectsIf { .. } | Instruction::IfElse { .. } - ), + RuntimeType::Brillig(_) => { + !matches!(instruction, Instruction::EnableSideEffectsIf { .. }) + } } } @@ -377,6 +376,11 @@ impl DataFlowGraph { } } + /// Replace an existing instruction with a new one. + pub(crate) fn set_instruction(&mut self, id: InstructionId, instruction: Instruction) { + self.instructions[id] = instruction; + } + /// Set the value of value_to_replace to refer to the value referred to by new_value. /// /// This is the preferred method to call for optimizations simplifying diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/function_inserter.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/function_inserter.rs index 9e4557e06a6..13b5ead5eb6 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/function_inserter.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/function_inserter.rs @@ -79,6 +79,13 @@ impl<'f> FunctionInserter<'f> { (instruction, self.function.dfg.get_instruction_call_stack_id(id)) } + /// Get an instruction, map all its values, and replace it with the resolved instruction. + pub(crate) fn map_instruction_in_place(&mut self, id: InstructionId) { + let mut instruction = self.function.dfg[id].clone(); + instruction.map_values_mut(|id| self.resolve(id)); + self.function.dfg.set_instruction(id, instruction); + } + /// Maps a terminator in place, replacing any ValueId in the terminator with the /// resolved version of that value id from this FunctionInserter's internal value mapping. pub(crate) fn map_terminator_in_place(&mut self, block: BasicBlockId) { @@ -251,4 +258,22 @@ impl<'f> FunctionInserter<'f> { self.values.entry(*param).or_insert(*new_param); } } + + /// Merge the internal mapping into the given mapping + /// The merge is guaranteed to be coherent because ambiguous cases are prevented + pub(crate) fn extract_mapping(&self, mapping: &mut HashMap) { + for (k, v) in &self.values { + if mapping.contains_key(k) { + unreachable!("cannot merge key"); + } + if mapping.contains_key(v) { + unreachable!("cannot merge value"); + } + mapping.insert(*k, *v); + } + } + + pub(crate) fn set_mapping(&mut self, mapping: HashMap) { + self.values = mapping; + } } diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/instruction/call.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/instruction/call.rs index 73d6d9c2a6a..d32a562a037 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/instruction/call.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/instruction/call.rs @@ -610,7 +610,9 @@ fn simplify_black_box_func( "ICE: `BlackBoxFunc::RANGE` calls should be transformed into a `Instruction::Cast`" ) } - BlackBoxFunc::Sha256Compression => SimplifyResult::None, //TODO(Guillaume) + BlackBoxFunc::Sha256Compression => { + blackbox::simplify_sha256_compression(dfg, arguments, block, call_stack) + } BlackBoxFunc::AES128Encrypt => SimplifyResult::None, } } diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/instruction/call/blackbox.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/instruction/call/blackbox.rs index ea9daf5e4c5..b3696610b17 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/instruction/call/blackbox.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/instruction/call/blackbox.rs @@ -1,5 +1,6 @@ use std::sync::Arc; +use acvm::blackbox_solver::sha256_compression; use acvm::{BlackBoxFunctionSolver, BlackBoxResolutionError, FieldElement, acir::AcirField}; use crate::ssa::ir::call_stack::CallStackId; @@ -233,6 +234,55 @@ pub(super) fn simplify_poseidon2_permutation( } } +pub(super) fn simplify_sha256_compression( + dfg: &mut DataFlowGraph, + arguments: &[ValueId], + block: BasicBlockId, + call_stack: CallStackId, +) -> SimplifyResult { + match (dfg.get_array_constant(arguments[0]), dfg.get_array_constant(arguments[1])) { + (Some((state, _)), Some((msg_blocks, _))) + if array_is_constant(dfg, &state) && array_is_constant(dfg, &msg_blocks) => + { + let state: Option> = state + .iter() + .map(|id| { + dfg.get_numeric_constant(*id) + .expect("value id from array should point at constant") + .try_to_u32() + }) + .collect(); + + let Some(mut state) = state.and_then(|vec| <[u32; 8]>::try_from(vec).ok()) else { + return SimplifyResult::None; + }; + + let msg_blocks: Option> = msg_blocks + .iter() + .map(|id| { + dfg.get_numeric_constant(*id) + .expect("value id from array should point at constant") + .try_to_u32() + }) + .collect(); + + let Some(msg_blocks) = msg_blocks.and_then(|vec| <[u32; 16]>::try_from(vec).ok()) + else { + return SimplifyResult::None; + }; + + sha256_compression(&mut state, &msg_blocks); + + let new_state = state.into_iter().map(FieldElement::from); + let typ = NumericType::Unsigned { bit_size: 32 }; + let result_array = make_constant_array(dfg, new_state, typ, block, call_stack); + + SimplifyResult::SimplifiedTo(result_array) + } + _ => SimplifyResult::None, + } +} + pub(super) fn simplify_hash( dfg: &mut DataFlowGraph, arguments: &[ValueId], @@ -308,7 +358,7 @@ pub(super) fn simplify_signature( #[cfg(feature = "bn254")] #[cfg(test)] -mod test { +mod multi_scalar_mul { use crate::ssa::Ssa; use crate::ssa::opt::assert_normalized_ssa_equals; @@ -317,7 +367,7 @@ mod test { fn full_constant_folding() { let src = r#" acir(inline) fn main f0 { - b0(): + b0(): v0 = make_array [Field 2, Field 3, Field 5, Field 5] : [Field; 4] v1 = make_array [Field 1, Field 17631683881184975370165255887551781615748388533673675138860, Field 0, Field 1, Field 17631683881184975370165255887551781615748388533673675138860, Field 0] : [Field; 6] v2 = call multi_scalar_mul (v1, v0) -> [Field; 3] @@ -327,7 +377,7 @@ mod test { let expected_src = r#" acir(inline) fn main f0 { - b0(): + b0(): v3 = make_array [Field 2, Field 3, Field 5, Field 5] : [Field; 4] v7 = make_array [Field 1, Field 17631683881184975370165255887551781615748388533673675138860, Field 0, Field 1, Field 17631683881184975370165255887551781615748388533673675138860, Field 0] : [Field; 6] v10 = make_array [Field 1478523918288173385110236399861791147958001875200066088686689589556927843200, Field 700144278551281040379388961242974992655630750193306467120985766322057145630, Field 0] : [Field; 3] @@ -342,7 +392,7 @@ mod test { fn simplify_zero() { let src = r#" acir(inline) fn main f0 { - b0(v0: Field, v1: Field): + b0(v0: Field, v1: Field): v2 = make_array [v0, Field 0, Field 0, Field 0, v0, Field 0] : [Field; 6] v3 = make_array [ Field 0, Field 0, Field 1, v0, v1, Field 0, Field 1, v0, Field 0] : [Field; 9] @@ -355,7 +405,7 @@ mod test { //First point is zero, second scalar is zero, so we should be left with the scalar mul of the last point. let expected_src = r#" acir(inline) fn main f0 { - b0(v0: Field, v1: Field): + b0(v0: Field, v1: Field): v3 = make_array [v0, Field 0, Field 0, Field 0, v0, Field 0] : [Field; 6] v5 = make_array [Field 0, Field 0, Field 1, v0, v1, Field 0, Field 1, v0, Field 0] : [Field; 9] v6 = make_array [v0, Field 0] : [Field; 2] @@ -372,7 +422,7 @@ mod test { fn partial_constant_folding() { let src = r#" acir(inline) fn main f0 { - b0(v0: Field, v1: Field): + b0(v0: Field, v1: Field): v2 = make_array [Field 1, Field 0, v0, Field 0, Field 2, Field 0] : [Field; 6] v3 = make_array [ Field 1, Field 17631683881184975370165255887551781615748388533673675138860, Field 0, v0, v1, Field 0, Field 1, Field 17631683881184975370165255887551781615748388533673675138860, Field 0] : [Field; 9] @@ -383,7 +433,7 @@ mod test { //First and last scalar/point are constant, so we should be left with the msm of the middle point and the folded constant point let expected_src = r#" acir(inline) fn main f0 { - b0(v0: Field, v1: Field): + b0(v0: Field, v1: Field): v5 = make_array [Field 1, Field 0, v0, Field 0, Field 2, Field 0] : [Field; 6] v7 = make_array [Field 1, Field 17631683881184975370165255887551781615748388533673675138860, Field 0, v0, v1, Field 0, Field 1, Field 17631683881184975370165255887551781615748388533673675138860, Field 0] : [Field; 9] v8 = make_array [v0, Field 0, Field 1, Field 0] : [Field; 4] @@ -395,3 +445,32 @@ mod test { assert_normalized_ssa_equals(ssa, expected_src); } } + +#[cfg(test)] +mod sha256_compression { + use crate::ssa::Ssa; + use crate::ssa::opt::assert_normalized_ssa_equals; + + #[test] + fn is_optimized_out_with_constant_arguments() { + let src = r#" + acir(inline) fn main f0 { + b0(): + v0 = make_array [u32 0, u32 0, u32 0, u32 0, u32 0, u32 0, u32 0, u32 0] : [u32; 8] + v1 = make_array [u32 0, u32 0, u32 0, u32 0, u32 0, u32 0, u32 0, u32 0, u32 0, u32 0, u32 0, u32 0, u32 0, u32 0, u32 0, u32 0] : [u32; 16] + v2 = call sha256_compression(v0, v1) -> [u32; 8] + return v2 + }"#; + let ssa = Ssa::from_str_simplifying(src).unwrap(); + let expected_src = r#" + acir(inline) fn main f0 { + b0(): + v1 = make_array [u32 0, u32 0, u32 0, u32 0, u32 0, u32 0, u32 0, u32 0] : [u32; 8] + v2 = make_array [u32 0, u32 0, u32 0, u32 0, u32 0, u32 0, u32 0, u32 0, u32 0, u32 0, u32 0, u32 0, u32 0, u32 0, u32 0, u32 0] : [u32; 16] + v11 = make_array [u32 2091193876, u32 1113340840, u32 3461668143, u32 3254913767, u32 3068490961, u32 2551409935, u32 2927503052, u32 3205228454] : [u32; 8] + return v11 + } + "#; + assert_normalized_ssa_equals(ssa, expected_src); + } +} diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/instruction/constrain.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/instruction/constrain.rs index 48587cb4b7b..6f26fef071e 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/instruction/constrain.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/instruction/constrain.rs @@ -210,7 +210,7 @@ mod tests { // Regression test for https://github.com/noir-lang/noir/issues/7451 let src = " acir(inline) predicate_pure fn main f0 { - b0(v0: u8): + b0(v0: u8): v1 = and u8 255, v0 return v1 } @@ -220,7 +220,7 @@ mod tests { let expected = " acir(inline) fn main f0 { - b0(v0: u8): + b0(v0: u8): return v0 } "; diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/basic_conditional.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/basic_conditional.rs new file mode 100644 index 00000000000..669a3dd7783 --- /dev/null +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/basic_conditional.rs @@ -0,0 +1,526 @@ +use std::collections::HashSet; + +use acvm::AcirField; +use fxhash::FxHashMap as HashMap; +use iter_extended::vecmap; + +use crate::ssa::{ + Ssa, + ir::{ + basic_block::BasicBlockId, + cfg::ControlFlowGraph, + dfg::DataFlowGraph, + function::{Function, FunctionId}, + function_inserter::FunctionInserter, + instruction::{BinaryOp, Instruction, TerminatorInstruction}, + post_order::PostOrder, + value::ValueId, + }, +}; + +use super::flatten_cfg::Context; +#[derive(Debug, Clone)] +struct BasicConditional { + block_entry: BasicBlockId, + block_then: Option, + block_else: Option, + block_exit: BasicBlockId, +} + +impl Ssa { + #[tracing::instrument(level = "trace", skip(self))] + /// This pass flatten simple IF-THEN-ELSE statements + /// This optimization pass identifies simple conditional control flow patterns in unconstrained code + /// and flattens them to reduce the number of basic blocks and improve performance. + /// + /// e.g: if c {a} else {b} would be flattened to c*(a-b)+b + /// A simple conditional pattern is defined as an IF-THEN (with optional ELSE) statement, with no nested conditional nor loop statements + /// Performance improvement is based on a simple execution cost metric + pub(crate) fn flatten_basic_conditionals(mut self) -> Ssa { + // Retrieve the 'no_predicates' attribute of the functions in a map, to avoid problems with borrowing + let mut no_predicates = HashMap::default(); + for function in self.functions.values() { + no_predicates.insert(function.id(), function.is_no_predicates()); + } + for function in self.functions.values_mut() { + flatten_function(function, &mut no_predicates); + } + self + } +} + +/// Returns the blocks of the simple conditional sub-graph whose input block is the entry. +/// Returns None if the input block is not the entry block of a simple conditional. +fn is_conditional( + block: BasicBlockId, + cfg: &ControlFlowGraph, + function: &Function, +) -> Option { + // jump overhead is the cost for doing the conditional and jump around the blocks + // We use 10 as a rough estimate, the real cost is less. + let jump_overhead = 10; + let mut successors = cfg.successors(block); + let mut result = None; + // a conditional must have 2 branches + if successors.len() != 2 { + return None; + } + let left = successors.next().unwrap(); + let right = successors.next().unwrap(); + let mut left_successors = cfg.successors(left); + let mut right_successors = cfg.successors(right); + let left_successors_len = left_successors.len(); + let right_successors_len = right_successors.len(); + let next_left = left_successors.next(); + let next_right = right_successors.next(); + if next_left == Some(block) || next_right == Some(block) { + // this is a loop, not a conditional + return None; + } + if left_successors_len == 1 && right_successors_len == 1 && next_left == next_right { + // The branches join on one block so it is a non-nested conditional + let cost_left = block_cost(left, &function.dfg); + let cost_right = block_cost(right, &function.dfg); + // For the flattening to be valuable, we compare the cost of the flattened code with the average cost of the 2 branches, + // including an overhead to take into account the jumps between the blocks. + let cost = cost_right.saturating_add(cost_left); + if cost < cost / 2 + jump_overhead { + if let Some(TerminatorInstruction::JmpIf { + condition: _, + then_destination, + else_destination, + call_stack: _, + }) = function.dfg[block].terminator() + { + result = Some(BasicConditional { + block_entry: block, + block_then: Some(*then_destination), + block_else: Some(*else_destination), + block_exit: next_left.unwrap(), + }); + } + } + } else if left_successors_len == 1 && next_left == Some(right) { + // Left branch joins the right branch, e.g if/then statement with no else + // This case may not happen (i.e not generated), but it is safer to handle it (e.g in case it happens due to some optimizations) + let cost = block_cost(left, &function.dfg); + if cost < cost / 2 + jump_overhead { + if let Some(TerminatorInstruction::JmpIf { + condition: _, + then_destination, + else_destination, + call_stack: _, + }) = function.dfg[block].terminator() + { + let (block_then, block_else) = if left == *then_destination { + (Some(left), None) + } else if left == *else_destination { + (None, Some(left)) + } else { + return None; + }; + + result = Some(BasicConditional { + block_entry: block, + block_then, + block_else, + block_exit: right, + }); + } + } + } else if right_successors_len == 1 && next_right == Some(left) { + // Right branch joins the left branch, e.g if/else statement with no then + // This case may not happen (i.e not generated), but it is safer to handle it (e.g in case it happens due to some optimizations) + let cost = block_cost(right, &function.dfg); + if cost < cost / 2 + jump_overhead { + if let Some(TerminatorInstruction::JmpIf { + condition: _, + then_destination, + else_destination, + call_stack: _, + }) = function.dfg[block].terminator() + { + let (block_then, block_else) = if right == *then_destination { + (Some(right), None) + } else if right == *else_destination { + (None, Some(right)) + } else { + return None; + }; + result = Some(BasicConditional { + block_entry: block, + block_then, + block_else, + block_exit: right, + }); + } + } + } + // A conditional exit would have exactly 2 predecessors + result.filter(|result| cfg.predecessors(result.block_exit).len() == 2) +} + +/// Computes a cost estimate of a basic block +/// returns u32::MAX if the block has side-effect instructions +/// WARNING: these are estimates of the runtime cost of each instruction, +/// 1 being the cost of the simplest instruction. These numbers can be improved. +fn block_cost(block: BasicBlockId, dfg: &DataFlowGraph) -> u32 { + let mut cost: u32 = 0; + for instruction in dfg[block].instructions() { + let instruction_cost = match &dfg[*instruction] { + Instruction::Binary(binary) => { + match binary.operator { + BinaryOp::Add { unchecked } + | BinaryOp::Sub { unchecked } + | BinaryOp::Mul { unchecked } => if unchecked { 3 } else { return u32::MAX }, + BinaryOp::Div + | BinaryOp::Mod => return u32::MAX, + BinaryOp::Eq => 1, + BinaryOp::Lt => 5, + BinaryOp::And + | BinaryOp::Or + | BinaryOp::Xor => 1, + BinaryOp::Shl + | BinaryOp::Shr => return u32::MAX, + } + }, + // A Cast can be either simplified, or lead to a truncate + Instruction::Cast(_, _) => 3, + Instruction::Not(_) => 1, + Instruction::Truncate { .. } => 7, + + Instruction::Constrain(_,_,_) + | Instruction::ConstrainNotEqual(_,_,_) + | Instruction::RangeCheck { .. } + // Calls with no-predicate set to true could be supported, but + // they are likely to be too costly anyways. Simple calls would + // have been inlined already. + | Instruction::Call { .. } + | Instruction::Load { .. } + | Instruction::Store { .. } + | Instruction::ArraySet { .. } => return u32::MAX, + + Instruction::ArrayGet { array, index } => { + // A get can fail because of out-of-bound index + let mut in_bound = false; + // check if index is in bound + if let (Some(index), Some(len)) = (dfg.get_numeric_constant(*index), dfg.try_get_array_length(*array)) { + // The index is in-bounds + if index.to_u128() < len as u128 { + in_bound = true; + } + } + if !in_bound { + return u32::MAX; + } + 1 + }, + // if less than 10 elements, it is translated into a store for each element + // if more than 10, it is a loop, so 20 should be a good estimate, worst case being 10 stores and ~10 index increments + Instruction::MakeArray { .. } => 20, + + Instruction::Allocate + | Instruction::EnableSideEffectsIf { .. } + | Instruction::IncrementRc { .. } + | Instruction::DecrementRc { .. } + | Instruction::Noop => 0, + Instruction::IfElse { .. } => 1, + }; + cost += instruction_cost; + } + cost +} + +/// Identifies all simple conditionals in the function and flattens them +fn flatten_function(function: &mut Function, no_predicates: &mut HashMap) { + // This pass is dedicated to brillig functions + if !function.runtime().is_brillig() { + return; + } + let cfg = ControlFlowGraph::with_function(function); + let mut stack = vec![function.entry_block()]; + let mut processed = HashSet::new(); + let mut conditionals = Vec::new(); + + // 1. Process all blocks of the cfg, starting from the root and following the successors + while let Some(block) = stack.pop() { + // Avoid cycles + if processed.contains(&block) { + continue; + } + processed.insert(block); + + // Identify the simple conditionals + if let Some(conditional) = is_conditional(block, &cfg, function) { + // no need to check the branches, process the join block directly + stack.push(conditional.block_exit); + conditionals.push(conditional); + } else { + stack.extend(cfg.successors(block)); + } + } + + // 2. Flatten all simple conditionals + // process basic conditionals in reverse order so that + // a conditional does not impact the previous ones + conditionals.reverse(); + flatten_multiple(&conditionals, function, no_predicates); +} + +fn flatten_multiple( + conditionals: &Vec, + function: &mut Function, + no_predicates: &mut HashMap, +) { + // 1. process each basic conditional, using a new context per conditional + let post_order = PostOrder::with_function(function); + + let mut mapping = HashMap::default(); + for conditional in conditionals { + let cfg = ControlFlowGraph::with_function(function); + let cfg_root = function.entry_block(); + let mut branch_ends = HashMap::default(); + branch_ends.insert(conditional.block_entry, conditional.block_exit); + let mut context = Context::new(function, cfg, branch_ends, cfg_root); + context.flatten_single_conditional(conditional, no_predicates); + // extract the mapping into 'mapping + context.inserter.extract_mapping(&mut mapping); + } + // 2. re-map the full program for values that may been simplified. + if !mapping.is_empty() { + for block in post_order.as_slice() { + Context::map_block_with_mapping(mapping.clone(), function, *block); + } + } +} + +impl Context<'_> { + fn flatten_single_conditional( + &mut self, + conditional: &BasicConditional, + no_predicates: &mut HashMap, + ) { + // Manually inline 'then', 'else' and 'exit' into the entry block + //0. initialize the context for flattening a 'single conditional' + let old_target = self.target_block; + let old_no_predicate = self.no_predicate; + let mut queue = vec![]; + self.target_block = conditional.block_entry; + self.no_predicate = true; + //1. process 'then' branch + self.inline_block(conditional.block_entry, no_predicates); + let to_process = self.handle_terminator(conditional.block_entry, &queue); + queue.extend(to_process); + if let Some(then) = conditional.block_then { + assert_eq!(queue.pop(), conditional.block_then); + self.inline_block(then, no_predicates); + let to_process = self.handle_terminator(then, &queue); + + for incoming_block in to_process { + if !queue.contains(&incoming_block) { + queue.push(incoming_block); + } + } + } + + //2. process 'else' branch, in case there is no 'then' + let next = queue.pop(); + if next == conditional.block_else { + let next = next.unwrap(); + self.inline_block(next, no_predicates); + let _ = self.handle_terminator(next, &queue); + } else { + assert_eq!(next, Some(conditional.block_exit)); + } + + //3. process 'exit' block + self.inline_block(conditional.block_exit, no_predicates); + // Manually set the terminator of the entry block to the one of the exit block + let terminator = + self.inserter.function.dfg[conditional.block_exit].terminator().unwrap().clone(); + let new_terminator = match terminator { + TerminatorInstruction::JmpIf { + condition, + then_destination, + else_destination, + call_stack, + } => { + let condition = self.inserter.resolve(condition); + TerminatorInstruction::JmpIf { + condition, + then_destination, + else_destination, + call_stack, + } + } + TerminatorInstruction::Jmp { destination, arguments, call_stack } => { + let arguments = vecmap(arguments, |value| self.inserter.resolve(value)); + TerminatorInstruction::Jmp { destination, arguments, call_stack } + } + TerminatorInstruction::Return { return_values, call_stack } => { + let return_values = vecmap(return_values, |value| self.inserter.resolve(value)); + TerminatorInstruction::Return { return_values, call_stack } + } + }; + self.inserter.function.dfg.set_block_terminator(conditional.block_entry, new_terminator); + self.inserter.map_data_bus_in_place(); + //4. restore the context, in case it is re-used. + self.target_block = old_target; + self.no_predicate = old_no_predicate; + } + + fn map_block_with_mapping( + mapping: HashMap, + func: &mut Function, + block: BasicBlockId, + ) { + // Map all instructions in the block + let mut inserter = FunctionInserter::new(func); + inserter.set_mapping(mapping); + let instructions = inserter.function.dfg[block].instructions().to_vec(); + for instruction in instructions { + inserter.map_instruction_in_place(instruction); + } + inserter.map_terminator_in_place(block); + } +} + +#[cfg(test)] +mod test { + use crate::ssa::{Ssa, opt::assert_normalized_ssa_equals}; + + #[test] + fn basic_jmpif() { + let src = " + brillig(inline) fn foo f0 { + b0(v0: u32): + v3 = eq v0, u32 0 + jmpif v3 then: b2, else: b1 + b1(): + jmp b3(u32 5) + b2(): + jmp b3(u32 3) + b3(v1: u32): + return v1 + } + "; + let ssa = Ssa::from_str(src).unwrap(); + assert_eq!(ssa.main().reachable_blocks().len(), 4); + + let expected = " + brillig(inline) fn foo f0 { + b0(v0: u32): + v2 = eq v0, u32 0 + v3 = not v2 + v4 = cast v2 as u32 + v5 = cast v3 as u32 + v7 = unchecked_mul v4, u32 3 + v9 = unchecked_mul v5, u32 5 + v10 = unchecked_add v7, v9 + return v10 + } + "; + + let ssa = ssa.flatten_basic_conditionals(); + assert_normalized_ssa_equals(ssa, expected); + } + + #[test] + fn array_jmpif() { + let src = r#" + brillig(inline) fn foo f0 { + b0(v0: u32): + v3 = eq v0, u32 5 + jmpif v3 then: b2, else: b1 + b1(): + v6 = make_array b"foo" + jmp b3(v6) + b2(): + v10 = make_array b"bar" + jmp b3(v10) + b3(v1: [u8; 3]): + return v1 + } + "#; + let ssa = Ssa::from_str(src).unwrap(); + assert_eq!(ssa.main().reachable_blocks().len(), 4); + let ssa = ssa.flatten_basic_conditionals(); + // make_array is not simplified + assert_normalized_ssa_equals(ssa, src); + } + + #[test] + fn nested_jmpifs() { + let src = " + brillig(inline) fn foo f0 { + b0(v0: u32): + v5 = eq v0, u32 5 + v6 = not v5 + jmpif v5 then: b5, else: b1 + b1(): + v8 = lt v0, u32 3 + jmpif v8 then: b3, else: b2 + b2(): + v9 = truncate v0 to 2 bits, max_bit_size: 32 + jmp b4(v9) + b3(): + v10 = truncate v0 to 1 bits, max_bit_size: 32 + jmp b4(v10) + b4(v1: u32): + jmp b9(v1) + b5(): + v12 = lt u32 2, v0 + jmpif v12 then: b7, else: b6 + b6(): + v13 = truncate v0 to 3 bits, max_bit_size: 32 + jmp b8(v13) + b7(): + v14 = and v0, u32 2 + jmp b8(v14) + b8(v2: u32): + jmp b9(v2) + b9(v3: u32): + return v3 + } + "; + let ssa = Ssa::from_str(src).unwrap(); + assert_eq!(ssa.main().reachable_blocks().len(), 10); + + let expected = " + brillig(inline) fn foo f0 { + b0(v0: u32): + v3 = eq v0, u32 5 + v4 = not v3 + jmpif v3 then: b2, else: b1 + b1(): + v6 = lt v0, u32 3 + v7 = truncate v0 to 1 bits, max_bit_size: 32 + v8 = not v6 + v9 = truncate v0 to 2 bits, max_bit_size: 32 + v10 = cast v6 as u32 + v11 = cast v8 as u32 + v12 = unchecked_mul v10, v7 + v13 = unchecked_mul v11, v9 + v14 = unchecked_add v12, v13 + jmp b3(v14) + b2(): + v16 = lt u32 2, v0 + v17 = and v0, u32 2 + v18 = not v16 + v19 = truncate v0 to 3 bits, max_bit_size: 32 + v20 = cast v16 as u32 + v21 = cast v18 as u32 + v22 = unchecked_mul v20, v17 + v23 = unchecked_mul v21, v19 + v24 = unchecked_add v22, v23 + jmp b3(v24) + b3(v1: u32): + return v1 + } + "; + + let ssa = ssa.flatten_basic_conditionals(); + assert_eq!(ssa.main().reachable_blocks().len(), 4); + assert_normalized_ssa_equals(ssa, expected); + } +} diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/brillig_entry_points.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/brillig_entry_points.rs index 380daabee6c..a2844a59997 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/brillig_entry_points.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/brillig_entry_points.rs @@ -13,7 +13,7 @@ //! generated for different entry points can conflict. //! //! To provide a more concrete example, let's take this program: -//! ``` +//! ```noir //! global ONE: Field = 1; //! global TWO: Field = 2; //! global THREE: Field = 3; @@ -40,7 +40,7 @@ //! } //! ``` //! The two entry points will have different global allocation maps: -//! ``` +//! ```noir //! GlobalInit(Id(1)): //! CONST M32835 = 1 //! CONST M32836 = 2 diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/die.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/die.rs index 51d3f0f0105..e742ad4aa5d 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/die.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/die.rs @@ -135,9 +135,8 @@ struct Context { /// them just yet. flattened: bool, - // When tracking mutations we consider arrays with the same type as all being possibly mutated. - // This we consider to span all blocks of the functions. - mutated_array_types: HashSet, + /// Track IncrementRc instructions per block to determine whether they are useless. + rc_tracker: RcTracker, } impl Context { @@ -167,10 +166,8 @@ impl Context { let block = &function.dfg[block_id]; self.mark_terminator_values_as_used(function, block); - // Lend the shared array type to the tracker. - let mut mutated_array_types = std::mem::take(&mut self.mutated_array_types); - let mut rc_tracker = RcTracker::new(&mut mutated_array_types); - rc_tracker.mark_terminator_arrays_as_used(function, block); + self.rc_tracker.new_block(); + self.rc_tracker.mark_terminator_arrays_as_used(function, block); let instructions_len = block.instructions().len(); @@ -203,12 +200,11 @@ impl Context { } } - rc_tracker.track_inc_rcs_to_remove(*instruction_id, function); + self.rc_tracker.track_inc_rcs_to_remove(*instruction_id, function); } - self.instructions_to_remove.extend(rc_tracker.get_non_mutated_arrays(&function.dfg)); - self.instructions_to_remove.extend(rc_tracker.rc_pairs_to_remove); - + self.instructions_to_remove.extend(self.rc_tracker.get_non_mutated_arrays(&function.dfg)); + self.instructions_to_remove.extend(self.rc_tracker.rc_pairs_to_remove.drain()); // If there are some instructions that might trigger an out of bounds error, // first add constrain checks. Then run the DIE pass again, which will remove those // but leave the constrains (any any value needed by those constrains) @@ -228,9 +224,6 @@ impl Context { .instructions_mut() .retain(|instruction| !self.instructions_to_remove.contains(instruction)); - // Take the mutated array back. - self.mutated_array_types = mutated_array_types; - false } @@ -279,11 +272,15 @@ impl Context { let typ = typ.get_contained_array(); // Want to store the array type which is being referenced, // because it's the underlying array that the `inc_rc` is associated with. - self.mutated_array_types.insert(typ.clone()); + self.add_mutated_array_type(typ.clone()); } } } + fn add_mutated_array_type(&mut self, typ: Type) { + self.rc_tracker.mutated_array_types.insert(typ.get_contained_array().clone()); + } + /// Go through the RC instructions collected when we figured out which values were unused; /// for each RC that refers to an unused value, remove the RC as well. fn remove_rc_instructions(&self, dfg: &mut DataFlowGraph) { @@ -615,8 +612,9 @@ fn apply_side_effects( (lhs, rhs) } +#[derive(Default)] /// Per block RC tracker. -struct RcTracker<'a> { +struct RcTracker { // We can track IncrementRc instructions per block to determine whether they are useless. // IncrementRc and DecrementRc instructions are normally side effectual instructions, but we remove // them if their value is not used anywhere in the function. However, even when their value is used, their existence @@ -631,7 +629,8 @@ struct RcTracker<'a> { // If an array is the same type as one of those non-mutated array types, we can safely remove all IncrementRc instructions on that array. inc_rcs: HashMap>, // Mutated arrays shared across the blocks of the function. - mutated_array_types: &'a mut HashSet, + // When tracking mutations we consider arrays with the same type as all being possibly mutated. + mutated_array_types: HashSet, // The SSA often creates patterns where after simplifications we end up with repeat // IncrementRc instructions on the same value. We track whether the previous instruction was an IncrementRc, // and if the current instruction is also an IncrementRc on the same value we remove the current instruction. @@ -639,15 +638,12 @@ struct RcTracker<'a> { previous_inc_rc: Option, } -impl<'a> RcTracker<'a> { - fn new(mutated_array_types: &'a mut HashSet) -> Self { - Self { - rcs_with_possible_pairs: Default::default(), - rc_pairs_to_remove: Default::default(), - inc_rcs: Default::default(), - previous_inc_rc: Default::default(), - mutated_array_types, - } +impl RcTracker { + fn new_block(&mut self) { + self.rcs_with_possible_pairs.clear(); + self.rc_pairs_to_remove.clear(); + self.inc_rcs.clear(); + self.previous_inc_rc = Default::default(); } fn mark_terminator_arrays_as_used(&mut self, function: &Function, block: &BasicBlock) { @@ -1128,4 +1124,38 @@ mod test { "; assert_normalized_ssa_equals(ssa, expected); } + + #[test] + fn do_not_remove_inc_rc_if_mutated_in_other_block() { + let src = " + brillig(inline) fn main f0 { + b0(v0: &mut [Field; 3]): + v1 = load v0 -> [Field; 3] + inc_rc v1 + jmp b1() + b1(): + v2 = load v0 -> [Field; 3] + v3 = array_set v2, index u32 0, value u32 0 + store v3 at v0 + return + } + "; + let ssa = Ssa::from_str(src).unwrap(); + + let expected = " + brillig(inline) fn main f0 { + b0(v0: &mut [Field; 3]): + v1 = load v0 -> [Field; 3] + inc_rc v1 + jmp b1() + b1(): + v2 = load v0 -> [Field; 3] + v4 = array_set v2, index u32 0, value u32 0 + store v4 at v0 + return + } + "; + let ssa = ssa.dead_instruction_elimination(); + assert_normalized_ssa_equals(ssa, expected); + } } diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg.rs index 2ffaa52f0ea..a25e3db2b08 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg.rs @@ -176,12 +176,15 @@ impl Ssa { } } -struct Context<'f> { - inserter: FunctionInserter<'f>, +pub(crate) struct Context<'f> { + pub(crate) inserter: FunctionInserter<'f>, /// This ControlFlowGraph is the graph from before the function was modified by this flattening pass. cfg: ControlFlowGraph, + /// Target block of the flattening + pub(crate) target_block: BasicBlockId, + /// Maps start of branch -> end of branch branch_ends: HashMap, @@ -213,6 +216,10 @@ struct Context<'f> { /// us from unnecessarily inserting extra instructions, and keeps ids unique which /// helps simplifications. not_instructions: HashMap, + + /// Flag to tell the context to not issue 'enable_side_effect' instructions during flattening. + /// This should be set to true only by flatten_single(), when no instruction is known to fail. + pub(crate) no_predicate: bool, } #[derive(Clone)] @@ -249,6 +256,7 @@ fn flatten_function_cfg(function: &mut Function, no_predicates: &HashMap { - fn flatten(&mut self, no_predicates: &HashMap) { +impl<'f> Context<'f> { + //impl Context<'_> { + pub(crate) fn new( + function: &'f mut Function, + cfg: ControlFlowGraph, + branch_ends: HashMap, + target_block: BasicBlockId, + ) -> Self { + Context { + inserter: FunctionInserter::new(function), + cfg, + branch_ends, + condition_stack: Vec::new(), + arguments_stack: Vec::new(), + local_allocations: HashSet::default(), + not_instructions: HashMap::default(), + target_block, + no_predicate: false, + } + } + + pub(crate) fn flatten(&mut self, no_predicates: &HashMap) { // Flatten the CFG by inlining all instructions from the queued blocks // until all blocks have been flattened. // We follow the terminator of each block to determine which blocks to // process next - let mut queue = vec![self.inserter.function.entry_block()]; + let mut queue = vec![self.target_block]; while let Some(block) = queue.pop() { self.inline_block(block, no_predicates); let to_process = self.handle_terminator(block, &queue); @@ -318,10 +348,14 @@ impl Context<'_> { result } - // Inline all instructions from the given block into the entry block, and track slice capacities - fn inline_block(&mut self, block: BasicBlockId, no_predicates: &HashMap) { - if self.inserter.function.entry_block() == block { - // we do not inline the entry block into itself + // Inline all instructions from the given block into the target block, and track slice capacities + pub(crate) fn inline_block( + &mut self, + block: BasicBlockId, + no_predicates: &HashMap, + ) { + if self.target_block == block { + // we do not inline the target block into itself // for the outer block before we start inlining return; } @@ -354,7 +388,7 @@ impl Context<'_> { /// For a normal block, it would be its successor /// For blocks related to a conditional statement, we ensure to process /// the 'then-branch', then the 'else-branch' (if it exists), and finally the end block - fn handle_terminator( + pub(crate) fn handle_terminator( &mut self, block: BasicBlockId, work_list: &[BasicBlockId], @@ -388,9 +422,9 @@ impl Context<'_> { let return_values = vecmap(return_values.clone(), |value| self.inserter.resolve(value)); let new_return = TerminatorInstruction::Return { return_values, call_stack }; - let entry = self.inserter.function.entry_block(); + let target = self.target_block; - self.inserter.function.dfg.set_block_terminator(entry, new_return); + self.inserter.function.dfg.set_block_terminator(target, new_return); vec![] } } @@ -544,7 +578,7 @@ impl Context<'_> { } else { self.inserter.function.dfg.make_constant(FieldElement::zero(), NumericType::bool()) }; - let block = self.inserter.function.entry_block(); + let block = self.target_block; // Cannot include this in the previous vecmap since it requires exclusive access to self let args = vecmap(args, |(then_arg, else_arg)| { @@ -568,11 +602,11 @@ impl Context<'_> { destination } - /// Insert a new instruction into the function's entry block. + /// Insert a new instruction into the target block. /// Unlike push_instruction, this function will not map any ValueIds. /// within the given instruction, nor will it modify self.values in any way. fn insert_instruction(&mut self, instruction: Instruction, call_stack: CallStackId) -> ValueId { - let block = self.inserter.function.entry_block(); + let block = self.target_block; self.inserter .function .dfg @@ -580,7 +614,7 @@ impl Context<'_> { .first() } - /// Inserts a new instruction into the function's entry block, using the given + /// Inserts a new instruction into the target block, using the given /// control type variables to specify result types if needed. /// Unlike push_instruction, this function will not map any ValueIds. /// within the given instruction, nor will it modify self.values in any way. @@ -590,7 +624,7 @@ impl Context<'_> { ctrl_typevars: Option>, call_stack: CallStackId, ) -> InsertInstructionResult { - let block = self.inserter.function.entry_block(); + let block = self.target_block; self.inserter.function.dfg.insert_instruction_and_results( instruction, block, @@ -600,11 +634,14 @@ impl Context<'_> { } /// Checks the branch condition on the top of the stack and uses it to build and insert an - /// `EnableSideEffectsIf` instruction into the entry block. + /// `EnableSideEffectsIf` instruction into the target block. /// /// If the stack is empty, a "true" u1 constant is taken to be the active condition. This is /// necessary for re-enabling side-effects when re-emerging to a branch depth of 0. fn insert_current_side_effects_enabled(&mut self) { + if self.no_predicate { + return; + } let condition = match self.get_last_condition() { Some(cond) => cond, None => { @@ -616,7 +653,7 @@ impl Context<'_> { self.insert_instruction_with_typevars(enable_side_effects, None, call_stack); } - /// Push the given instruction to the end of the entry block of the current function. + /// Push the given instruction to the end of the target block of the current function. /// /// Note that each ValueId of the instruction will be mapped via self.inserter.resolve. /// As a result, the instruction that will be pushed will actually be a new instruction @@ -631,8 +668,8 @@ impl Context<'_> { let instruction = self.handle_instruction_side_effects(instruction, call_stack); let instruction_is_allocate = matches!(&instruction, Instruction::Allocate); - let entry = self.inserter.function.entry_block(); - let results = self.inserter.push_instruction_value(instruction, id, entry, call_stack); + let results = + self.inserter.push_instruction_value(instruction, id, self.target_block, call_stack); // Remember an allocate was created local to this branch so that we do not try to merge store // values across branches for it later. diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/hint.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/hint.rs index 638da8b7b6e..911554e5d6d 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/hint.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/hint.rs @@ -20,7 +20,7 @@ mod tests { emit_ssa: None, skip_underconstrained_check: true, enable_brillig_constraints_check_lookback: false, - enable_brillig_constraints_check: false, + skip_brillig_constraints_check: true, inliner_aggressiveness: 0, max_bytecode_increase_percent: None, }; diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/inlining.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/inlining.rs index 15414e92eff..161eea182d6 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/inlining.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/inlining.rs @@ -1274,29 +1274,29 @@ mod test { fn inline_simple_functions_with_zero_instructions() { let src = " acir(inline) fn main f0 { - b0(v0: Field): - v2 = call f1(v0) -> Field - v3 = call f1(v0) -> Field - v4 = add v2, v3 - return v4 + b0(v0: Field): + v2 = call f1(v0) -> Field + v3 = call f1(v0) -> Field + v4 = add v2, v3 + return v4 } acir(inline) fn foo f1 { - b0(v0: Field): - return v0 + b0(v0: Field): + return v0 } "; let ssa = Ssa::from_str(src).unwrap(); let expected = " acir(inline) fn main f0 { - b0(v0: Field): - v1 = add v0, v0 - return v1 + b0(v0: Field): + v1 = add v0, v0 + return v1 } acir(inline) fn foo f1 { - b0(v0: Field): - return v0 + b0(v0: Field): + return v0 } "; @@ -1308,33 +1308,33 @@ mod test { fn inline_simple_functions_with_one_instruction() { let src = " acir(inline) fn main f0 { - b0(v0: Field): - v2 = call f1(v0) -> Field - v3 = call f1(v0) -> Field - v4 = add v2, v3 - return v4 + b0(v0: Field): + v2 = call f1(v0) -> Field + v3 = call f1(v0) -> Field + v4 = add v2, v3 + return v4 } acir(inline) fn foo f1 { - b0(v0: Field): - v2 = add v0, Field 1 - return v2 + b0(v0: Field): + v2 = add v0, Field 1 + return v2 } "; let ssa = Ssa::from_str(src).unwrap(); let expected = " acir(inline) fn main f0 { - b0(v0: Field): - v2 = add v0, Field 1 - v3 = add v0, Field 1 - v4 = add v2, v3 - return v4 + b0(v0: Field): + v2 = add v0, Field 1 + v3 = add v0, Field 1 + v4 = add v2, v3 + return v4 } acir(inline) fn foo f1 { - b0(v0: Field): - v2 = add v0, Field 1 - return v2 + b0(v0: Field): + v2 = add v0, Field 1 + return v2 } "; diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/loop_invariant.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/loop_invariant.rs index f68afc55efa..9bbac1a4c0c 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/loop_invariant.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/loop_invariant.rs @@ -840,15 +840,15 @@ mod test { let src = " brillig(inline) fn main f0 { b0(v0: u32, v1: u32): - jmp b1(u32 0) + jmp b1(u32 0) b1(v2: u32): - v5 = lt v2, u32 4 - jmpif v5 then: b3, else: b2 + v5 = lt v2, u32 4 + jmpif v5 then: b3, else: b2 b2(): - return + return b3(): - v7 = sub v2, u32 1 - jmp b1(v7) + v7 = sub v2, u32 1 + jmp b1(v7) } "; diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/mod.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/mod.rs index 38004cdf151..a9784d4c7cf 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/mod.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/mod.rs @@ -7,6 +7,7 @@ mod array_set; mod as_slice_length; mod assert_constant; +mod basic_conditional; mod brillig_array_gets; pub(crate) mod brillig_entry_points; mod check_u128_mul_overflow; diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/remove_bit_shifts.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/remove_bit_shifts.rs index 7f37f98b4fb..b4427a1c91b 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/remove_bit_shifts.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/remove_bit_shifts.rs @@ -167,6 +167,7 @@ impl Context<'_> { let lhs_typ = self.function.dfg.type_of_value(lhs).unwrap_numeric(); let base = self.field_constant(FieldElement::from(2_u128)); let pow = self.pow(base, rhs); + let pow = self.pow_or_max_for_bit_size(pow, rhs, bit_size, lhs_typ); let pow = self.insert_cast(pow, lhs_typ); if lhs_typ.is_unsigned() { // unsigned right bit shift is just a normal division @@ -205,6 +206,53 @@ impl Context<'_> { } } + /// Returns `pow` or the maximum value allowed for `typ` if 2^rhs is guaranteed to exceed that maximum. + fn pow_or_max_for_bit_size( + &mut self, + pow: ValueId, + rhs: ValueId, + bit_size: u32, + typ: NumericType, + ) -> ValueId { + let max = if typ.is_unsigned() { + if bit_size == 128 { u128::MAX } else { (1_u128 << bit_size) - 1 } + } else { + 1_u128 << (bit_size - 1) + }; + let max = self.field_constant(FieldElement::from(max)); + + // Here we check whether rhs is less than the bit_size: if it's not then it will overflow. + // Then we do: + // + // rhs_is_less_than_bit_size = lt rhs, bit_size + // rhs_is_not_less_than_bit_size = not rhs_is_less_than_bit_size + // pow_when_is_less_than_bit_size = rhs_is_less_than_bit_size * pow + // pow_when_is_not_less_than_bit_size = rhs_is_not_less_than_bit_size * max + // pow = add pow_when_is_less_than_bit_size, pow_when_is_not_less_than_bit_size + // + // All operations here are unchecked because they work on field types. + let rhs_typ = self.function.dfg.type_of_value(rhs).unwrap_numeric(); + let bit_size = self.numeric_constant(bit_size as u128, rhs_typ); + let rhs_is_less_than_bit_size = self.insert_binary(rhs, BinaryOp::Lt, bit_size); + let rhs_is_not_less_than_bit_size = self.insert_not(rhs_is_less_than_bit_size); + let rhs_is_less_than_bit_size = + self.insert_cast(rhs_is_less_than_bit_size, NumericType::NativeField); + let rhs_is_not_less_than_bit_size = + self.insert_cast(rhs_is_not_less_than_bit_size, NumericType::NativeField); + let pow_when_is_less_than_bit_size = + self.insert_binary(rhs_is_less_than_bit_size, BinaryOp::Mul { unchecked: true }, pow); + let pow_when_is_not_less_than_bit_size = self.insert_binary( + rhs_is_not_less_than_bit_size, + BinaryOp::Mul { unchecked: true }, + max, + ); + self.insert_binary( + pow_when_is_less_than_bit_size, + BinaryOp::Add { unchecked: true }, + pow_when_is_not_less_than_bit_size, + ) + } + /// Computes lhs^rhs via square&multiply, using the bits decomposition of rhs /// Pseudo-code of the computation: /// let mut r = 1; diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ssa_gen/mod.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ssa_gen/mod.rs index a954ac3ab93..fbfb9d06de2 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ssa_gen/mod.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ssa_gen/mod.rs @@ -24,6 +24,7 @@ use self::{ value::{Tree, Values}, }; +use super::ir::basic_block::BasicBlockId; use super::ir::dfg::GlobalsGraph; use super::ir::instruction::ErrorType; use super::ir::types::NumericType; @@ -370,7 +371,7 @@ impl FunctionContext<'_> { unary.location, )) } - UnaryOp::MutableReference => { + UnaryOp::Reference { mutable: _ } => { Ok(self.codegen_reference(&unary.rhs)?.map(|rhs| { match rhs { value::Value::Normal(value) => { @@ -767,7 +768,15 @@ impl FunctionContext<'_> { let tag = self.enum_tag(&variable); let tag_type = self.builder.type_of_value(tag).unwrap_numeric(); - let end_block = self.builder.insert_block(); + let make_end_block = |this: &mut Self| -> (BasicBlockId, Values) { + let block = this.builder.insert_block(); + let results = Self::map_type(&match_expr.typ, |typ| { + this.builder.add_block_parameter(block, typ).into() + }); + (block, results) + }; + + let (end_block, end_results) = make_end_block(self); // Optimization: if there is no default case we can jump directly to the last case // when finished with the previous case instead of using a jmpif with an unreachable @@ -778,6 +787,8 @@ impl FunctionContext<'_> { match_expr.cases.len() - 1 }; + let mut blocks_to_merge = Vec::with_capacity(last_case); + for i in 0..last_case { let case = &match_expr.cases[i]; let variant_tag = self.variant_index_value(&case.constructor, tag_type)?; @@ -790,28 +801,70 @@ impl FunctionContext<'_> { self.builder.switch_to_block(case_block); self.bind_case_arguments(variable.clone(), case); let results = self.codegen_expression(&case.branch)?.into_value_list(self); - self.builder.terminate_with_jmp(end_block, results); + + // Each branch will jump to a different end block for now. We have to merge them all + // later since SSA doesn't support more than two blocks jumping to the same end block. + let local_end_block = make_end_block(self); + self.builder.terminate_with_jmp(local_end_block.0, results); + blocks_to_merge.push(local_end_block); self.builder.switch_to_block(else_block); } + let (last_local_end_block, last_results) = make_end_block(self); + blocks_to_merge.push((last_local_end_block, last_results)); + if let Some(branch) = &match_expr.default_case { let results = self.codegen_expression(branch)?.into_value_list(self); - self.builder.terminate_with_jmp(end_block, results); + self.builder.terminate_with_jmp(last_local_end_block, results); } else { // If there is no default case, assume we saved the last case from the // last_case optimization above let case = match_expr.cases.last().unwrap(); self.bind_case_arguments(variable, case); let results = self.codegen_expression(&case.branch)?.into_value_list(self); - self.builder.terminate_with_jmp(end_block, results); + self.builder.terminate_with_jmp(last_local_end_block, results); + } + + // Merge blocks as last-in first-out: + // + // local_end_block0-----------------------------------------\ + // end block + // / + // local_end_block1---------------------\ / + // new merge block2-/ + // local_end_block2--\ / + // new merge block1-/ + // local_end_block3--/ + // + // This is necessary since SSA panics during flattening if we immediately + // try to jump directly to end block instead: https://github.com/noir-lang/noir/issues/7323. + // + // It'd also be more efficient to merge them tournament-bracket style but that + // also leads to panics during flattening for similar reasons. + while let Some((block, results)) = blocks_to_merge.pop() { + self.builder.switch_to_block(block); + + if let Some((block2, results2)) = blocks_to_merge.pop() { + // Merge two blocks in the queue together + let (new_merge, new_merge_results) = make_end_block(self); + blocks_to_merge.push((new_merge, new_merge_results)); + + let results = results.into_value_list(self); + self.builder.terminate_with_jmp(new_merge, results); + + self.builder.switch_to_block(block2); + let results2 = results2.into_value_list(self); + self.builder.terminate_with_jmp(new_merge, results2); + } else { + // Finally done, jump to the end + let results = results.into_value_list(self); + self.builder.terminate_with_jmp(end_block, results); + } } self.builder.switch_to_block(end_block); - let result = Self::map_type(&match_expr.typ, |typ| { - self.builder.add_block_parameter(end_block, typ).into() - }); - Ok(result) + Ok(end_results) } fn variant_index_value( diff --git a/noir/noir-repo/compiler/noirc_frontend/Cargo.toml b/noir/noir-repo/compiler/noirc_frontend/Cargo.toml index e5231565041..ca2612f3d92 100644 --- a/noir/noir-repo/compiler/noirc_frontend/Cargo.toml +++ b/noir/noir-repo/compiler/noirc_frontend/Cargo.toml @@ -36,7 +36,7 @@ fxhash.workspace = true [dev-dependencies] base64.workspace = true proptest.workspace = true -proptest-derive = "0.5.0" +proptest-derive.workspace = true [features] experimental_parser = [] diff --git a/noir/noir-repo/compiler/noirc_frontend/src/ast/expression.rs b/noir/noir-repo/compiler/noirc_frontend/src/ast/expression.rs index 096e5cbad86..cc4cebafbbe 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/ast/expression.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/ast/expression.rs @@ -383,7 +383,9 @@ impl BinaryOpKind { pub enum UnaryOp { Minus, Not, - MutableReference, + Reference { + mutable: bool, + }, /// If implicitly_added is true, this operation was implicitly added by the compiler for a /// field dereference. The compiler may undo some of these implicitly added dereferences if @@ -732,7 +734,8 @@ impl Display for UnaryOp { match self { UnaryOp::Minus => write!(f, "-"), UnaryOp::Not => write!(f, "!"), - UnaryOp::MutableReference => write!(f, "&mut"), + UnaryOp::Reference { mutable } if *mutable => write!(f, "&mut"), + UnaryOp::Reference { .. } => write!(f, "&"), UnaryOp::Dereference { .. } => write!(f, "*"), } } @@ -930,6 +933,13 @@ impl FunctionReturnType { FunctionReturnType::Ty(typ) => Cow::Borrowed(typ), } } + + pub fn location(&self) -> Location { + match self { + FunctionReturnType::Default(location) => *location, + FunctionReturnType::Ty(typ) => typ.location, + } + } } impl Display for FunctionReturnType { diff --git a/noir/noir-repo/compiler/noirc_frontend/src/ast/mod.rs b/noir/noir-repo/compiler/noirc_frontend/src/ast/mod.rs index 8e74ce8877e..d4c48466b85 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/ast/mod.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/ast/mod.rs @@ -135,8 +135,8 @@ pub enum UnresolvedTypeData { /// A Trait as return type or parameter of function, including its generics TraitAsType(Path, GenericTypeArgs), - /// &mut T - MutableReference(Box), + /// &T and &mut T + Reference(Box, /*mutable*/ bool), // Note: Tuples have no visibility, instead each of their elements may have one. Tuple(Vec), @@ -311,7 +311,8 @@ impl std::fmt::Display for UnresolvedTypeData { other => write!(f, "fn[{other}]({args}) -> {ret}"), } } - MutableReference(element) => write!(f, "&mut {element}"), + Reference(element, false) => write!(f, "&{element}"), + Reference(element, true) => write!(f, "&mut {element}"), Quoted(quoted) => write!(f, "{}", quoted), Unit => write!(f, "()"), Error => write!(f, "error"), @@ -346,7 +347,7 @@ impl std::fmt::Display for UnresolvedTypeExpression { impl UnresolvedType { pub fn is_synthesized(&self) -> bool { match &self.typ { - UnresolvedTypeData::MutableReference(ty) => ty.is_synthesized(), + UnresolvedTypeData::Reference(ty, _) => ty.is_synthesized(), UnresolvedTypeData::Named(_, _, synthesized) => *synthesized, _ => false, } @@ -424,7 +425,7 @@ impl UnresolvedTypeData { path_is_wildcard || an_arg_is_unresolved } UnresolvedTypeData::TraitAsType(_path, args) => args.contains_unspecified(), - UnresolvedTypeData::MutableReference(typ) => typ.contains_unspecified(), + UnresolvedTypeData::Reference(typ, _) => typ.contains_unspecified(), UnresolvedTypeData::Tuple(args) => args.iter().any(|arg| arg.contains_unspecified()), UnresolvedTypeData::Function(args, ret, env, _unconstrained) => { let args_contains_unspecified = args.iter().any(|arg| arg.contains_unspecified()); diff --git a/noir/noir-repo/compiler/noirc_frontend/src/ast/visitor.rs b/noir/noir-repo/compiler/noirc_frontend/src/ast/visitor.rs index b2142f26655..8e78ca5ec54 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/ast/visitor.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/ast/visitor.rs @@ -393,7 +393,7 @@ pub trait Visitor { true } - fn visit_mutable_reference_type(&mut self, _: &UnresolvedType, _: Span) -> bool { + fn visit_reference_type(&mut self, _: &UnresolvedType, _mutable: bool, _: Span) -> bool { true } @@ -1382,8 +1382,8 @@ impl UnresolvedType { generic_type_args.accept(visitor); } } - UnresolvedTypeData::MutableReference(unresolved_type) => { - if visitor.visit_mutable_reference_type(unresolved_type, self.location.span) { + UnresolvedTypeData::Reference(unresolved_type, mutable) => { + if visitor.visit_reference_type(unresolved_type, *mutable, self.location.span) { unresolved_type.accept(visitor); } } diff --git a/noir/noir-repo/compiler/noirc_frontend/src/elaborator/enums.rs b/noir/noir-repo/compiler/noirc_frontend/src/elaborator/enums.rs index 90849a750d5..b5933f9dc37 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/elaborator/enums.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/elaborator/enums.rs @@ -1,8 +1,9 @@ -use std::collections::BTreeMap; +use std::collections::{BTreeMap, BTreeSet}; use fxhash::FxHashMap as HashMap; -use iter_extended::{try_vecmap, vecmap}; +use iter_extended::{btree_map, try_vecmap, vecmap}; use noirc_errors::Location; +use rangemap::StepLite; use crate::{ DataType, Kind, Shared, Type, @@ -27,6 +28,80 @@ use crate::{ use super::Elaborator; +const WILDCARD_PATTERN: &str = "_"; + +struct MatchCompiler<'elab, 'ctx> { + elaborator: &'elab mut Elaborator<'ctx>, + has_missing_cases: bool, + // We iterate on this to issue errors later so it needs to be a BTreeMap (versus HashMap) to be + // deterministic. + unreachable_cases: BTreeMap, +} + +/// A Pattern is anything that can appear before the `=>` in a match rule. +#[derive(Debug, Clone)] +enum Pattern { + /// A pattern checking for a tag and possibly binding variables such as `Some(42)` + Constructor(Constructor, Vec), + /// An integer literal pattern such as `4`, `12345`, or `-56` + Int(SignedField), + /// A pattern binding a variable such as `a` or `_` + Binding(DefinitionId), + + /// Multiple patterns combined with `|` where we should match this pattern if any + /// constituent pattern matches. e.g. `Some(3) | None` or `Some(1) | Some(2) | None` + #[allow(unused)] + Or(Vec), + + /// An integer range pattern such as `1..20` which will match any integer n such that + /// 1 <= n < 20. + #[allow(unused)] + Range(SignedField, SignedField), + + /// An error occurred while translating this pattern. This Pattern kind always translates + /// to a Fail branch in the decision tree, although the compiler is expected to halt + /// with errors before execution. + Error, +} + +#[derive(Clone)] +struct Column { + variable_to_match: DefinitionId, + pattern: Pattern, +} + +impl Column { + fn new(variable_to_match: DefinitionId, pattern: Pattern) -> Self { + Column { variable_to_match, pattern } + } +} + +#[derive(Clone)] +pub(super) struct Row { + columns: Vec, + guard: Option, + body: RowBody, + original_body: RowBody, + location: Location, +} + +type RowBody = ExprId; + +impl Row { + fn new(columns: Vec, guard: Option, body: RowBody, location: Location) -> Row { + Row { columns, guard, body, original_body: body, location } + } +} + +impl Row { + fn remove_column(&mut self, variable: DefinitionId) -> Option { + self.columns + .iter() + .position(|c| c.variable_to_match == variable) + .map(|idx| self.columns.remove(idx)) + } +} + impl Elaborator<'_> { /// Defines the value of an enum variant that we resolve an enum /// variant expression to. E.g. `Foo::Bar` in `Foo::Bar(baz)`. @@ -273,6 +348,7 @@ impl Elaborator<'_> { let rows = vecmap(rules, |(pattern, branch)| { self.push_scope(); + let pattern_location = pattern.location; let pattern = self.expression_to_pattern(pattern, &expected_pattern_type, &mut Vec::new()); let columns = vec![Column::new(variable_to_match, pattern)]; @@ -288,7 +364,7 @@ impl Elaborator<'_> { }); self.pop_scope(); - Row::new(columns, guard, body) + Row::new(columns, guard, body, pattern_location) }); (rows, result_type) } @@ -433,7 +509,7 @@ impl Elaborator<'_> { if let Some(existing) = variables_defined.iter().find(|elem| *elem == &name) { // Allow redefinition of `_` only, to ignore variables - if name.0.contents != "_" { + if name.0.contents != WILDCARD_PATTERN { self.push_err(ResolverError::VariableAlreadyDefinedInPattern { existing: existing.clone(), new_location: name.location(), @@ -524,8 +600,7 @@ impl Elaborator<'_> { ), Err(error) => { self.push_err(error); - let id = self.fresh_match_variable(expected_type.clone(), location); - Pattern::Binding(id) + Pattern::Error } } } @@ -721,17 +796,49 @@ impl Elaborator<'_> { /// /// This is an adaptation of https://github.com/yorickpeterse/pattern-matching-in-rust/tree/main/jacobs2021 /// which is an implementation of https://julesjacobs.com/notes/patternmatching/patternmatching.pdf - pub(super) fn elaborate_match_rows(&mut self, rows: Vec) -> HirMatch { - self.compile_rows(rows).unwrap_or_else(|error| { - self.push_err(error); - HirMatch::Failure - }) + pub(super) fn elaborate_match_rows( + &mut self, + rows: Vec, + type_matched_on: &Type, + location: Location, + ) -> HirMatch { + MatchCompiler::run(self, rows, type_matched_on, location) + } +} + +impl<'elab, 'ctx> MatchCompiler<'elab, 'ctx> { + fn run( + elaborator: &'elab mut Elaborator<'ctx>, + rows: Vec, + type_matched_on: &Type, + location: Location, + ) -> HirMatch { + let mut compiler = Self { + elaborator, + has_missing_cases: false, + unreachable_cases: rows.iter().map(|row| (row.body, row.location)).collect(), + }; + + let hir_match = compiler.compile_rows(rows).unwrap_or_else(|error| { + compiler.elaborator.push_err(error); + HirMatch::Failure { missing_case: false } + }); + + if compiler.has_missing_cases { + compiler.issue_missing_cases_error(&hir_match, type_matched_on, location); + } + + if !compiler.unreachable_cases.is_empty() { + compiler.issue_unreachable_cases_warning(); + } + + hir_match } fn compile_rows(&mut self, mut rows: Vec) -> Result { if rows.is_empty() { - eprintln!("Warning: missing case"); - return Ok(HirMatch::Failure); + self.has_missing_cases = true; + return Ok(HirMatch::Failure { missing_case: true }); } self.push_tests_against_bare_variables(&mut rows); @@ -741,7 +848,10 @@ impl Elaborator<'_> { let row = rows.remove(0); return Ok(match row.guard { - None => HirMatch::Success(row.body), + None => { + self.unreachable_cases.remove(&row.original_body); + HirMatch::Success(row.body) + } Some(cond) => { let remaining = self.compile_rows(rows)?; HirMatch::Guard { cond, body: row.body, otherwise: Box::new(remaining) } @@ -750,9 +860,10 @@ impl Elaborator<'_> { } let branch_var = self.branch_variable(&rows); - let location = self.interner.definition(branch_var).location; + let location = self.elaborator.interner.definition(branch_var).location; - match self.interner.definition_type(branch_var).follow_bindings_shallow().into_owned() { + let definition_type = self.elaborator.interner.definition_type(branch_var); + match definition_type.follow_bindings_shallow().into_owned() { Type::FieldElement | Type::Integer(_, _) => { let (cases, fallback) = self.compile_int_cases(rows, branch_var)?; Ok(HirMatch::Switch(branch_var, cases, Some(fallback))) @@ -827,7 +938,7 @@ impl Elaborator<'_> { | Type::NamedGeneric(_, _) | Type::CheckedCast { .. } | Type::Function(_, _, _, _) - | Type::MutableReference(_) + | Type::Reference(..) | Type::Forall(_, _) | Type::Constant(_, _) | Type::Quoted(_) @@ -849,8 +960,8 @@ impl Elaborator<'_> { fn fresh_match_variable(&mut self, variable_type: Type, location: Location) -> DefinitionId { let name = "internal_match_variable".to_string(); let kind = DefinitionKind::Local(None); - let id = self.interner.push_definition(name, false, false, kind, location); - self.interner.push_definition_type(id, variable_type); + let id = self.elaborator.interner.push_definition(name, false, false, kind, location); + self.elaborator.interner.push_definition_type(id, variable_type); id } @@ -946,7 +1057,7 @@ impl Elaborator<'_> { cols.push(Column::new(*var, pat)); } - cases[idx].2.push(Row::new(cols, row.guard, row.body)); + cases[idx].2.push(Row::new(cols, row.guard, row.body, row.location)); } } else { for (_, _, rows) in &mut cases { @@ -1045,16 +1156,16 @@ impl Elaborator<'_> { /// Creates: /// `{ let = ; }` fn let_binding(&mut self, variable: DefinitionId, rhs: DefinitionId, body: ExprId) -> ExprId { - let location = self.interner.definition(rhs).location; + let location = self.elaborator.interner.definition(rhs).location; - let r#type = self.interner.definition_type(variable); - let rhs_type = self.interner.definition_type(rhs); + let r#type = self.elaborator.interner.definition_type(variable); + let rhs_type = self.elaborator.interner.definition_type(rhs); let variable = HirIdent::non_trait_method(variable, location); let rhs = HirExpression::Ident(HirIdent::non_trait_method(rhs, location), None); - let rhs = self.interner.push_expr(rhs); - self.interner.push_expr_type(rhs, rhs_type); - self.interner.push_expr_location(rhs, location); + let rhs = self.elaborator.interner.push_expr(rhs); + self.elaborator.interner.push_expr_type(rhs, rhs_type); + self.elaborator.interner.push_expr_location(rhs, location); let let_ = HirStatement::Let(HirLetStatement { pattern: HirPattern::Identifier(variable), @@ -1065,77 +1176,185 @@ impl Elaborator<'_> { is_global_let: false, }); - let body_type = self.interner.id_type(body); - let let_ = self.interner.push_stmt(let_); - let body = self.interner.push_stmt(HirStatement::Expression(body)); + let body_type = self.elaborator.interner.id_type(body); + let let_ = self.elaborator.interner.push_stmt(let_); + let body = self.elaborator.interner.push_stmt(HirStatement::Expression(body)); - self.interner.push_stmt_location(let_, location); - self.interner.push_stmt_location(body, location); + self.elaborator.interner.push_stmt_location(let_, location); + self.elaborator.interner.push_stmt_location(body, location); let block = HirExpression::Block(HirBlockExpression { statements: vec![let_, body] }); - let block = self.interner.push_expr(block); - self.interner.push_expr_type(block, body_type); - self.interner.push_expr_location(block, location); + let block = self.elaborator.interner.push_expr(block); + self.elaborator.interner.push_expr_type(block, body_type); + self.elaborator.interner.push_expr_location(block, location); block } -} -/// A Pattern is anything that can appear before the `=>` in a match rule. -#[derive(Debug, Clone)] -enum Pattern { - /// A pattern checking for a tag and possibly binding variables such as `Some(42)` - Constructor(Constructor, Vec), - /// An integer literal pattern such as `4`, `12345`, or `-56` - Int(SignedField), - /// A pattern binding a variable such as `a` or `_` - Binding(DefinitionId), + /// Any case that isn't branched to when the match is finished must be covered by another + /// case and is thus redundant. + fn issue_unreachable_cases_warning(&mut self) { + for location in self.unreachable_cases.values().copied() { + self.elaborator.push_err(TypeCheckError::UnreachableCase { location }); + } + } - /// Multiple patterns combined with `|` where we should match this pattern if any - /// constituent pattern matches. e.g. `Some(3) | None` or `Some(1) | Some(2) | None` - #[allow(unused)] - Or(Vec), + /// Traverse the resulting HirMatch to build counter-examples of values which would + /// not be covered by the match. + fn issue_missing_cases_error( + &mut self, + tree: &HirMatch, + type_matched_on: &Type, + location: Location, + ) { + let starting_id = match tree { + HirMatch::Switch(id, ..) => *id, + _ => return self.issue_missing_cases_error_for_type(type_matched_on, location), + }; - /// An integer range pattern such as `1..20` which will match any integer n such that - /// 1 <= n < 20. - #[allow(unused)] - Range(SignedField, SignedField), + let mut cases = BTreeSet::new(); + self.find_missing_values(tree, &mut Default::default(), &mut cases, starting_id); - /// An error occurred while translating this pattern. This Pattern kind always translates - /// to a Fail branch in the decision tree, although the compiler is expected to halt - /// with errors before execution. - Error, -} + // It's possible to trigger this matching on an empty enum like `enum Void {}` + if !cases.is_empty() { + self.elaborator.push_err(TypeCheckError::MissingCases { cases, location }); + } + } -#[derive(Clone)] -struct Column { - variable_to_match: DefinitionId, - pattern: Pattern, -} + /// Issue a missing cases error if necessary for the given type, assuming that no + /// case of the type is covered. This is the case for empty matches `match foo {}`. + /// Note that this is expected not to error if the given type is an enum with zero variants. + fn issue_missing_cases_error_for_type(&mut self, type_matched_on: &Type, location: Location) { + let typ = type_matched_on.follow_bindings_shallow(); + if let Type::DataType(shared, generics) = typ.as_ref() { + if let Some(variants) = shared.borrow().get_variants(generics) { + let cases: BTreeSet<_> = variants.into_iter().map(|(name, _)| name).collect(); + if !cases.is_empty() { + self.elaborator.push_err(TypeCheckError::MissingCases { cases, location }); + } + return; + } + } + let typ = typ.to_string(); + self.elaborator.push_err(TypeCheckError::MissingManyCases { typ, location }); + } -impl Column { - fn new(variable_to_match: DefinitionId, pattern: Pattern) -> Self { - Column { variable_to_match, pattern } + fn find_missing_values( + &self, + tree: &HirMatch, + env: &mut HashMap)>, + missing_cases: &mut BTreeSet, + starting_id: DefinitionId, + ) { + match tree { + HirMatch::Success(_) | HirMatch::Failure { missing_case: false } => (), + HirMatch::Guard { otherwise, .. } => { + self.find_missing_values(otherwise, env, missing_cases, starting_id); + } + HirMatch::Failure { missing_case: true } => { + let case = Self::construct_missing_case(starting_id, env); + missing_cases.insert(case); + } + HirMatch::Switch(definition_id, cases, else_case) => { + for case in cases { + let name = case.constructor.to_string(); + env.insert(*definition_id, (name, case.arguments.clone())); + self.find_missing_values(&case.body, env, missing_cases, starting_id); + } + + if let Some(else_case) = else_case { + let typ = self.elaborator.interner.definition_type(*definition_id); + + for case in self.missing_cases(cases, &typ) { + env.insert(*definition_id, case); + self.find_missing_values(else_case, env, missing_cases, starting_id); + } + } + + env.remove(definition_id); + } + } } -} -#[derive(Clone)] -pub(super) struct Row { - columns: Vec, - guard: Option, - body: ExprId, -} + fn missing_cases(&self, cases: &[Case], typ: &Type) -> Vec<(String, Vec)> { + // We expect `cases` to come from a `Switch` which should always have + // at least 2 cases, otherwise it should be a Success or Failure node. + let first = &cases[0]; -impl Row { - fn new(columns: Vec, guard: Option, body: ExprId) -> Row { - Row { columns, guard, body } + if matches!(&first.constructor, Constructor::Int(_) | Constructor::Range(..)) { + return self.missing_integer_cases(cases, typ); + } + + let all_constructors = first.constructor.all_constructors(); + let mut all_constructors = + btree_map(all_constructors, |(constructor, arg_count)| (constructor, arg_count)); + + for case in cases { + all_constructors.remove(&case.constructor); + } + + vecmap(all_constructors, |(constructor, arg_count)| { + // Safety: this id should only be used in `env` of `find_missing_values` which + // only uses it for display and defaults to "_" on unknown ids. + let args = vecmap(0..arg_count, |_| DefinitionId::dummy_id()); + (constructor.to_string(), args) + }) } -} -impl Row { - fn remove_column(&mut self, variable: DefinitionId) -> Option { - self.columns - .iter() - .position(|c| c.variable_to_match == variable) - .map(|idx| self.columns.remove(idx)) + fn missing_integer_cases( + &self, + cases: &[Case], + typ: &Type, + ) -> Vec<(String, Vec)> { + // We could give missed cases for field ranges of `0 .. field_modulus` but since the field + // used in Noir may change we recommend a match-all pattern instead. + // If the type is a type variable, we don't know exactly which integer type this may + // resolve to so also just suggest a catch-all in that case. + if typ.is_field() || typ.is_bindable() { + return vec![(WILDCARD_PATTERN.to_string(), Vec::new())]; + } + + let mut missing_cases = rangemap::RangeInclusiveSet::new(); + + let int_max = SignedField::positive(typ.integral_maximum_size().unwrap()); + let int_min = typ.integral_minimum_size().unwrap(); + missing_cases.insert(int_min..=int_max); + + for case in cases { + match &case.constructor { + Constructor::Int(signed_field) => { + missing_cases.remove(*signed_field..=*signed_field); + } + Constructor::Range(start, end) => { + // Our ranges are exclusive, so adjust for that + missing_cases.remove(*start..=end.sub_one()); + } + _ => unreachable!( + "missing_integer_cases should only be called with Int or Range constructors" + ), + } + } + + vecmap(missing_cases, |range| { + if range.start() == range.end() { + (format!("{}", range.start()), Vec::new()) + } else { + (format!("{}..={}", range.start(), range.end()), Vec::new()) + } + }) + } + + fn construct_missing_case( + starting_id: DefinitionId, + env: &HashMap)>, + ) -> String { + let Some((constructor, arguments)) = env.get(&starting_id) else { + return WILDCARD_PATTERN.to_string(); + }; + + let no_arguments = arguments.is_empty(); + + let args = vecmap(arguments, |arg| Self::construct_missing_case(*arg, env)).join(", "); + + if no_arguments { constructor.clone() } else { format!("{constructor}({args})") } } } diff --git a/noir/noir-repo/compiler/noirc_frontend/src/elaborator/expressions.rs b/noir/noir-repo/compiler/noirc_frontend/src/elaborator/expressions.rs index 3c4d3c513d6..e5df79522c6 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/elaborator/expressions.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/elaborator/expressions.rs @@ -6,12 +6,12 @@ use rustc_hash::FxHashSet as HashSet; use crate::{ DataType, Kind, QuotedType, Shared, Type, ast::{ - ArrayLiteral, BinaryOpKind, BlockExpression, CallExpression, CastExpression, + ArrayLiteral, AsTraitPath, BinaryOpKind, BlockExpression, CallExpression, CastExpression, ConstrainExpression, ConstrainKind, ConstructorExpression, Expression, ExpressionKind, Ident, IfExpression, IndexExpression, InfixExpression, ItemVisibility, Lambda, Literal, MatchExpression, MemberAccessExpression, MethodCallExpression, Path, PathSegment, - PrefixExpression, StatementKind, UnaryOp, UnresolvedTypeData, UnresolvedTypeExpression, - UnsafeExpression, + PrefixExpression, StatementKind, TraitBound, UnaryOp, UnresolvedTraitConstraint, + UnresolvedTypeData, UnresolvedTypeExpression, UnsafeExpression, }, hir::{ comptime::{self, InterpreterError}, @@ -26,7 +26,8 @@ use crate::{ HirArrayLiteral, HirBinaryOp, HirBlockExpression, HirCallExpression, HirCastExpression, HirConstrainExpression, HirConstructorExpression, HirExpression, HirIdent, HirIfExpression, HirIndexExpression, HirInfixExpression, HirLambda, HirLiteral, - HirMemberAccess, HirMethodCallExpression, HirPrefixExpression, + HirMatch, HirMemberAccess, HirMethodCallExpression, HirPrefixExpression, ImplKind, + TraitMethod, }, stmt::{HirLetStatement, HirPattern, HirStatement}, traits::{ResolvedTraitBound, TraitConstraint}, @@ -37,7 +38,7 @@ use crate::{ token::{FmtStrFragment, Tokens}, }; -use super::{Elaborator, LambdaContext, UnsafeBlockStatus}; +use super::{Elaborator, LambdaContext, UnsafeBlockStatus, UnstableFeature}; impl Elaborator<'_> { pub(crate) fn elaborate_expression(&mut self, expr: Expression) -> (ExprId, Type) { @@ -94,11 +95,8 @@ impl Elaborator<'_> { self.push_err(ResolverError::UnquoteUsedOutsideQuote { location: expr.location }); (HirExpression::Error, Type::Error) } - ExpressionKind::AsTraitPath(_) => { - self.push_err(ResolverError::AsTraitPathNotYetImplemented { - location: expr.location, - }); - (HirExpression::Error, Type::Error) + ExpressionKind::AsTraitPath(path) => { + return self.elaborate_as_trait_path(path); } ExpressionKind::TypePath(path) => return self.elaborate_type_path(path), }; @@ -346,8 +344,12 @@ impl Elaborator<'_> { let operator = prefix.operator; - if let UnaryOp::MutableReference = operator { - self.check_can_mutate(rhs, rhs_location); + if let UnaryOp::Reference { mutable } = operator { + if mutable { + self.check_can_mutate(rhs, rhs_location); + } else { + self.use_unstable_feature(UnstableFeature::Ownership, location); + } } let expr = @@ -363,19 +365,25 @@ impl Elaborator<'_> { (expr_id, typ) } - fn check_can_mutate(&mut self, expr_id: ExprId, location: Location) { + pub(super) fn check_can_mutate(&mut self, expr_id: ExprId, location: Location) { let expr = self.interner.expression(&expr_id); match expr { HirExpression::Ident(hir_ident, _) => { if let Some(definition) = self.interner.try_definition(hir_ident.id) { + let name = definition.name.clone(); if !definition.mutable { self.push_err(TypeCheckError::CannotMutateImmutableVariable { - name: definition.name.clone(), + name, location, }); + } else { + self.check_can_mutate_lambda_capture(hir_ident.id, name, location); } } } + HirExpression::Index(_) => { + self.push_err(TypeCheckError::MutableReferenceToArrayElement { location }); + } HirExpression::MemberAccess(member_access) => { self.check_can_mutate(member_access.lhs, location); } @@ -383,6 +391,24 @@ impl Elaborator<'_> { } } + // We must check whether the mutable variable we are attempting to mutate + // comes from a lambda capture. All captures are immutable so we want to error + // if the user attempts to mutate a captured variable inside of a lambda without mutable references. + pub(super) fn check_can_mutate_lambda_capture( + &mut self, + id: DefinitionId, + name: String, + location: Location, + ) { + if let Some(lambda_context) = self.lambda_stack.last() { + let typ = self.interner.definition_type(id); + if !typ.is_mutable_ref() && lambda_context.captures.iter().any(|var| var.ident.id == id) + { + self.push_err(TypeCheckError::MutableCaptureWithoutRef { name, location }); + } + } + } + fn elaborate_index(&mut self, index_expr: IndexExpression) -> (HirExpression, Type) { let location = index_expr.index.location; @@ -1057,11 +1083,22 @@ impl Elaborator<'_> { ) -> (HirExpression, Type) { self.use_unstable_feature(super::UnstableFeature::Enums, location); + let expr_location = match_expr.expression.location; let (expression, typ) = self.elaborate_expression(match_expr.expression); - let (let_, variable) = self.wrap_in_let(expression, typ); + let (let_, variable) = self.wrap_in_let(expression, typ.clone()); + + let (errored, (rows, result_type)) = + self.errors_occurred_in(|this| this.elaborate_match_rules(variable, match_expr.rules)); + + // Avoid calling `elaborate_match_rows` if there were errors while constructing + // the match rows - it'll just lead to extra errors like `unreachable pattern` + // warnings on branches which previously had type errors. + let tree = HirExpression::Match(if !errored { + self.elaborate_match_rows(rows, &typ, expr_location) + } else { + HirMatch::Failure { missing_case: false } + }); - let (rows, result_type) = self.elaborate_match_rules(variable, match_expr.rules); - let tree = HirExpression::Match(self.elaborate_match_rows(rows)); let tree = self.interner.push_expr(tree); self.interner.push_expr_type(tree, result_type.clone()); self.interner.push_expr_location(tree, location); @@ -1319,4 +1356,55 @@ impl Elaborator<'_> { let (expr_id, typ) = self.inline_comptime_value(result, location); Some((self.interner.expression(&expr_id), typ)) } + + fn elaborate_as_trait_path(&mut self, path: AsTraitPath) -> (ExprId, Type) { + let location = path.typ.location.merge(path.trait_path.location); + + let constraint = UnresolvedTraitConstraint { + typ: path.typ, + trait_bound: TraitBound { + trait_path: path.trait_path, + trait_id: None, + trait_generics: path.trait_generics, + }, + }; + + let typ = self.resolve_type(constraint.typ.clone()); + let Some(trait_bound) = self.resolve_trait_bound(&constraint.trait_bound) else { + // resolve_trait_bound only returns None if it has already issued an error, so don't + // issue another here. + let error = self.interner.push_expr_full(HirExpression::Error, location, Type::Error); + return (error, Type::Error); + }; + + let constraint = TraitConstraint { typ, trait_bound }; + + let the_trait = self.interner.get_trait(constraint.trait_bound.trait_id); + let Some(method) = the_trait.find_method(&path.impl_item.0.contents) else { + let trait_name = the_trait.name.to_string(); + let method_name = path.impl_item.to_string(); + let location = path.impl_item.location(); + self.push_err(ResolverError::NoSuchMethodInTrait { trait_name, method_name, location }); + let error = self.interner.push_expr_full(HirExpression::Error, location, Type::Error); + return (error, Type::Error); + }; + + let trait_method = + TraitMethod { method_id: method, constraint: constraint.clone(), assumed: true }; + + let definition_id = self.interner.trait_method_id(trait_method.method_id); + + let ident = HirIdent { + location: path.impl_item.location(), + id: definition_id, + impl_kind: ImplKind::TraitMethod(trait_method), + }; + + let id = self.interner.push_expr(HirExpression::Ident(ident.clone(), None)); + self.interner.push_expr_location(id, location); + + let typ = self.type_check_variable(ident, id, None); + self.interner.push_expr_type(id, typ.clone()); + (id, typ) + } } diff --git a/noir/noir-repo/compiler/noirc_frontend/src/elaborator/lints.rs b/noir/noir-repo/compiler/noirc_frontend/src/elaborator/lints.rs index 4ce797c6e07..f4c6c791996 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/elaborator/lints.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/elaborator/lints.rs @@ -338,7 +338,7 @@ fn can_return_without_recursing_match( match match_expr { HirMatch::Success(expr) => check(*expr), - HirMatch::Failure => true, + HirMatch::Failure { .. } => true, HirMatch::Guard { cond: _, body, otherwise } => check(*body) && check_match(otherwise), HirMatch::Switch(_, cases, otherwise) => { cases.iter().all(|case| check_match(&case.body)) diff --git a/noir/noir-repo/compiler/noirc_frontend/src/elaborator/mod.rs b/noir/noir-repo/compiler/noirc_frontend/src/elaborator/mod.rs index ea784812aaf..fccf99cc7ca 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/elaborator/mod.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/elaborator/mod.rs @@ -588,7 +588,7 @@ impl<'context> Elaborator<'context> { for (mut constraint, expr_id, select_impl) in context.trait_constraints { let location = self.interner.expr_location(&expr_id); - if matches!(&constraint.typ, Type::MutableReference(_)) { + if matches!(&constraint.typ, Type::Reference(..)) { let (_, dereferenced_typ) = self.insert_auto_dereferences(expr_id, constraint.typ.clone()); constraint.typ = dereferenced_typ; @@ -1078,7 +1078,7 @@ impl<'context> Elaborator<'context> { self.mark_type_as_used(from); self.mark_type_as_used(to); } - Type::MutableReference(typ) => { + Type::Reference(typ, _) => { self.mark_type_as_used(typ); } Type::InfixExpr(left, _op, right, _) => { @@ -1264,9 +1264,13 @@ impl<'context> Elaborator<'context> { self.check_parent_traits_are_implemented(&trait_impl); self.remove_trait_impl_assumed_trait_implementations(trait_impl.impl_id); - for (module, function, _) in &trait_impl.methods.functions { + for (module, function, noir_function) in &trait_impl.methods.functions { self.local_module = *module; - let errors = check_trait_impl_method_matches_declaration(self.interner, *function); + let errors = check_trait_impl_method_matches_declaration( + self.interner, + *function, + noir_function, + ); self.push_errors(errors.into_iter().map(|error| error.into())); } @@ -1457,8 +1461,8 @@ impl<'context> Elaborator<'context> { self.self_type = Some(self_type.clone()); let self_type_location = trait_impl.object_type.location; - if matches!(self_type, Type::MutableReference(_)) { - self.push_err(DefCollectorErrorKind::MutableReferenceInTraitImpl { + if matches!(self_type, Type::Reference(..)) { + self.push_err(DefCollectorErrorKind::ReferenceInTraitImpl { location: self_type_location, }); } @@ -1751,7 +1755,7 @@ impl<'context> Elaborator<'context> { ); self.check_type_is_not_more_private_then_item(name, visibility, env, location); } - Type::MutableReference(typ) | Type::Array(_, typ) | Type::Slice(typ) => { + Type::Reference(typ, _) | Type::Array(_, typ) | Type::Slice(typ) => { self.check_type_is_not_more_private_then_item(name, visibility, typ, location); } Type::InfixExpr(left, _op, right, _) => { @@ -2163,4 +2167,13 @@ impl<'context> Elaborator<'context> { self.push_err(ParserError::with_reason(reason, location)); } } + + /// Run the given function using the resolver and return true if any errors (not warnings) + /// occurred while running it. + pub fn errors_occurred_in(&mut self, f: impl FnOnce(&mut Self) -> T) -> (bool, T) { + let previous_errors = self.errors.len(); + let ret = f(self); + let errored = self.errors.iter().skip(previous_errors).any(|error| error.is_error()); + (errored, ret) + } } diff --git a/noir/noir-repo/compiler/noirc_frontend/src/elaborator/options.rs b/noir/noir-repo/compiler/noirc_frontend/src/elaborator/options.rs index 58bb5e73a61..0d72d2955ba 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/elaborator/options.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/elaborator/options.rs @@ -3,14 +3,14 @@ use std::str::FromStr; #[derive(Copy, Clone, Debug, PartialEq, Eq)] pub enum UnstableFeature { Enums, - ArrayOwnership, + Ownership, } impl std::fmt::Display for UnstableFeature { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { match self { Self::Enums => write!(f, "enums"), - Self::ArrayOwnership => write!(f, "array-ownership"), + Self::Ownership => write!(f, "ownership"), } } } @@ -21,7 +21,7 @@ impl FromStr for UnstableFeature { fn from_str(s: &str) -> Result { match s { "enums" => Ok(Self::Enums), - "array-ownership" => Ok(Self::ArrayOwnership), + "ownership" => Ok(Self::Ownership), other => Err(format!("Unknown unstable feature '{other}'")), } } diff --git a/noir/noir-repo/compiler/noirc_frontend/src/elaborator/statements.rs b/noir/noir-repo/compiler/noirc_frontend/src/elaborator/statements.rs index 5402a682cdb..b9810c7d222 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/elaborator/statements.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/elaborator/statements.rs @@ -91,6 +91,7 @@ impl Elaborator<'_> { let type_contains_unspecified = let_stmt.r#type.contains_unspecified(); let annotated_type = self.resolve_inferred_type(let_stmt.r#type); + let pattern_location = let_stmt.pattern.location(); let expr_location = let_stmt.expression.location; let (expression, expr_type) = self.elaborate_expression_with_target_type(let_stmt.expression, Some(&annotated_type)); @@ -98,8 +99,11 @@ impl Elaborator<'_> { // Require the top-level of a global's type to be fully-specified if type_contains_unspecified && global_id.is_some() { let expected_type = annotated_type.clone(); - let error = - ResolverError::UnspecifiedGlobalType { location: expr_location, expected_type }; + let error = ResolverError::UnspecifiedGlobalType { + pattern_location, + expr_location, + expected_type, + }; self.push_err(error); } @@ -151,8 +155,11 @@ impl Elaborator<'_> { let (lvalue, lvalue_type, mutable) = self.elaborate_lvalue(assign.lvalue); if !mutable { - let (name, location) = self.get_lvalue_name_and_location(&lvalue); + let (_, name, location) = self.get_lvalue_error_info(&lvalue); self.push_err(TypeCheckError::VariableMustBeMutable { name, location }); + } else { + let (id, name, location) = self.get_lvalue_error_info(&lvalue); + self.check_can_mutate_lambda_capture(id, name, location); } self.unify_with_coercions(&expr_type, &lvalue_type, expression, expr_location, || { @@ -202,11 +209,10 @@ impl Elaborator<'_> { ); // Check that start range and end range have the same types - let range_location = start_location.merge(end_location); self.unify(&start_range_type, &end_range_type, || TypeCheckError::TypeMismatch { expected_typ: start_range_type.to_string(), expr_typ: end_range_type.to_string(), - expr_location: range_location, + expr_location: end_location, }); let expected_type = self.polymorphic_integer(); @@ -214,7 +220,7 @@ impl Elaborator<'_> { self.unify(&start_range_type, &expected_type, || TypeCheckError::TypeCannotBeUsed { typ: start_range_type.clone(), place: "for loop", - location: range_location, + location: start_location, }); self.interner.push_definition_type(identifier.id, start_range_type); @@ -331,20 +337,20 @@ impl Elaborator<'_> { (expr, self.interner.next_type_variable()) } - fn get_lvalue_name_and_location(&self, lvalue: &HirLValue) -> (String, Location) { + fn get_lvalue_error_info(&self, lvalue: &HirLValue) -> (DefinitionId, String, Location) { match lvalue { HirLValue::Ident(name, _) => { let location = name.location; if let Some(definition) = self.interner.try_definition(name.id) { - (definition.name.clone(), location) + (name.id, definition.name.clone(), location) } else { - ("(undeclared variable)".into(), location) + (DefinitionId::dummy_id(), "(undeclared variable)".into(), location) } } - HirLValue::MemberAccess { object, .. } => self.get_lvalue_name_and_location(object), - HirLValue::Index { array, .. } => self.get_lvalue_name_and_location(array), - HirLValue::Dereference { lvalue, .. } => self.get_lvalue_name_and_location(lvalue), + HirLValue::MemberAccess { object, .. } => self.get_lvalue_error_info(object), + HirLValue::Index { array, .. } => self.get_lvalue_error_info(array), + HirLValue::Dereference { lvalue, .. } => self.get_lvalue_error_info(lvalue), } } @@ -431,8 +437,8 @@ impl Elaborator<'_> { let (mut lvalue, mut lvalue_type, mut mutable) = self.elaborate_lvalue(*array); // Before we check that the lvalue is an array, try to dereference it as many times - // as needed to unwrap any &mut wrappers. - while let Type::MutableReference(element) = lvalue_type.follow_bindings() { + // as needed to unwrap any `&` or `&mut` wrappers. + while let Type::Reference(element, _) = lvalue_type.follow_bindings() { let element_type = element.as_ref().clone(); lvalue = HirLValue::Dereference { lvalue: Box::new(lvalue), element_type, location }; @@ -446,8 +452,8 @@ impl Elaborator<'_> { Type::Slice(elem_type) => *elem_type, Type::Error => Type::Error, Type::String(_) => { - let (_lvalue_name, lvalue_location) = - self.get_lvalue_name_and_location(&lvalue); + let (_id, _lvalue_name, lvalue_location) = + self.get_lvalue_error_info(&lvalue); self.push_err(TypeCheckError::StringIndexAssign { location: lvalue_location, }); @@ -476,7 +482,9 @@ impl Elaborator<'_> { let lvalue = Box::new(lvalue); let element_type = Type::type_variable(self.interner.next_type_variable_id()); - let expected_type = Type::MutableReference(Box::new(element_type.clone())); + + // Always expect a mutable reference here since we're storing to it + let expected_type = Type::Reference(Box::new(element_type.clone()), true); self.unify(&reference_type, &expected_type, || TypeCheckError::TypeMismatch { expected_typ: expected_type.to_string(), @@ -533,9 +541,8 @@ impl Elaborator<'_> { } } } - // If the lhs is a mutable reference we automatically transform - // lhs.field into (*lhs).field - Type::MutableReference(element) => { + // If the lhs is a reference we automatically transform `lhs.field` into `(*lhs).field` + Type::Reference(element, mutable) => { if let Some(mut dereference_lhs) = dereference_lhs { dereference_lhs(self, lhs_type.clone(), element.as_ref().clone()); return self.check_field_access( @@ -547,7 +554,7 @@ impl Elaborator<'_> { } else { let (element, index) = self.check_field_access(element, field_name, location, dereference_lhs)?; - return Some((Type::MutableReference(Box::new(element)), index)); + return Some((Type::Reference(Box::new(element), *mutable), index)); } } _ => (), diff --git a/noir/noir-repo/compiler/noirc_frontend/src/elaborator/traits.rs b/noir/noir-repo/compiler/noirc_frontend/src/elaborator/traits.rs index 0ddef96c46a..a931dde93de 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/elaborator/traits.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/elaborator/traits.rs @@ -273,6 +273,7 @@ impl Elaborator<'_> { pub(crate) fn check_trait_impl_method_matches_declaration( interner: &mut NodeInterner, function: FuncId, + noir_function: &NoirFunction, ) -> Vec { let meta = interner.function_meta(&function); let modifiers = interner.function_modifiers(&function); @@ -349,6 +350,8 @@ pub(crate) fn check_trait_impl_method_matches_declaration( definition_type, method_name, &meta.parameters, + &meta.return_type, + noir_function, meta.name.location, &trait_info.name.0.contents, &mut errors, @@ -358,11 +361,14 @@ pub(crate) fn check_trait_impl_method_matches_declaration( errors } +#[allow(clippy::too_many_arguments)] fn check_function_type_matches_expected_type( expected: &Type, actual: &Type, method_name: &str, actual_parameters: &Parameters, + actual_return_type: &FunctionReturnType, + noir_function: &NoirFunction, location: Location, trait_name: &str, errors: &mut Vec, @@ -381,11 +387,16 @@ fn check_function_type_matches_expected_type( if params_a.len() == params_b.len() { for (i, (a, b)) in params_a.iter().zip(params_b.iter()).enumerate() { if a.try_unify(b, &mut bindings).is_err() { + let parameter_location = noir_function.def.parameters.get(i); + let parameter_location = parameter_location.map(|param| param.typ.location); + let parameter_location = + parameter_location.unwrap_or_else(|| actual_parameters.0[i].0.location()); + errors.push(TypeCheckError::TraitMethodParameterTypeMismatch { method_name: method_name.to_string(), expected_typ: a.to_string(), actual_typ: b.to_string(), - parameter_location: actual_parameters.0[i].0.location(), + parameter_location, parameter_index: i + 1, }); } @@ -395,7 +406,7 @@ fn check_function_type_matches_expected_type( errors.push(TypeCheckError::TypeMismatch { expected_typ: ret_a.to_string(), expr_typ: ret_b.to_string(), - expr_location: location, + expr_location: actual_return_type.location(), }); } } else { diff --git a/noir/noir-repo/compiler/noirc_frontend/src/elaborator/types.rs b/noir/noir-repo/compiler/noirc_frontend/src/elaborator/types.rs index 0610155a798..6f6423017a4 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/elaborator/types.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/elaborator/types.rs @@ -12,6 +12,7 @@ use crate::{ Signedness, UnaryOp, UnresolvedGeneric, UnresolvedGenerics, UnresolvedType, UnresolvedTypeData, UnresolvedTypeExpression, WILDCARD_TYPE, }, + elaborator::UnstableFeature, hir::{ def_collector::dc_crate::CompilationError, def_map::{ModuleDefId, fully_qualified_module_path}, @@ -31,8 +32,8 @@ use crate::{ traits::{NamedType, ResolvedTraitBound, Trait, TraitConstraint}, }, node_interner::{ - DependencyId, ExprId, FuncId, GlobalValue, ImplSearchErrorKind, NodeInterner, TraitId, - TraitImplKind, TraitMethodId, + DependencyId, ExprId, FuncId, GlobalValue, ImplSearchErrorKind, TraitId, TraitImplKind, + TraitMethodId, }, signed_field::SignedField, token::SecondaryAttribute, @@ -141,8 +142,11 @@ impl Elaborator<'_> { } } } - MutableReference(element) => { - Type::MutableReference(Box::new(self.resolve_type_inner(*element, kind))) + Reference(element, mutable) => { + if !mutable { + self.use_unstable_feature(UnstableFeature::Ownership, location); + } + Type::Reference(Box::new(self.resolve_type_inner(*element, kind)), mutable) } Parenthesized(typ) => self.resolve_type_inner(*typ, kind), Resolved(id) => self.interner.get_quoted_type(id).clone(), @@ -174,8 +178,8 @@ impl Elaborator<'_> { if !kind.unifies(&resolved_type.kind()) { let expected_typ_err = CompilationError::TypeError(TypeCheckError::TypeKindMismatch { - expected_kind: kind.to_string(), - expr_kind: resolved_type.kind().to_string(), + expected_kind: kind.clone(), + expr_kind: resolved_type.kind(), expr_location: location, }); self.push_err(expected_typ_err); @@ -523,8 +527,8 @@ impl Elaborator<'_> { (Type::Constant(lhs, lhs_kind), Type::Constant(rhs, rhs_kind)) => { if !lhs_kind.unifies(&rhs_kind) { self.push_err(TypeCheckError::TypeKindMismatch { - expected_kind: lhs_kind.to_string(), - expr_kind: rhs_kind.to_string(), + expected_kind: lhs_kind, + expr_kind: rhs_kind, expr_location: location, }); return Type::Error; @@ -557,8 +561,8 @@ impl Elaborator<'_> { fn check_kind(&mut self, typ: Type, expected_kind: &Kind, location: Location) -> Type { if !typ.kind().unifies(expected_kind) { self.push_err(TypeCheckError::TypeKindMismatch { - expected_kind: expected_kind.to_string(), - expr_kind: typ.kind().to_string(), + expected_kind: expected_kind.clone(), + expr_kind: typ.kind(), expr_location: location, }); return Type::Error; @@ -766,8 +770,7 @@ impl Elaborator<'_> { make_error: impl FnOnce() -> TypeCheckError, ) { if let Err(UnificationError) = actual.unify(expected) { - let error: CompilationError = make_error().into(); - self.push_err(error); + self.push_err(make_error()); } } @@ -846,7 +849,7 @@ impl Elaborator<'_> { /// Insert as many dereference operations as necessary to automatically dereference a method /// call object to its base value type T. pub(super) fn insert_auto_dereferences(&mut self, object: ExprId, typ: Type) -> (ExprId, Type) { - if let Type::MutableReference(element) = typ.follow_bindings() { + if let Type::Reference(element, _mut) = typ.follow_bindings() { let location = self.interner.id_location(object); let object = self.interner.push_expr(HirExpression::Prefix(HirPrefixExpression { @@ -1040,6 +1043,7 @@ impl Elaborator<'_> { location: Location, ) -> Result<(Type, bool), TypeCheckError> { use Type::*; + match (lhs_type, rhs_type) { // Avoid reporting errors multiple times (Error, _) | (_, Error) => Ok((Bool, false)), @@ -1250,6 +1254,7 @@ impl Elaborator<'_> { location: Location, ) -> Result<(Type, bool), TypeCheckError> { use Type::*; + match op { crate::ast::UnaryOp::Minus | crate::ast::UnaryOp::Not => { match rhs_type { @@ -1303,17 +1308,26 @@ impl Elaborator<'_> { _ => Ok((rhs_type.clone(), true)), } } - crate::ast::UnaryOp::MutableReference => { - Ok((Type::MutableReference(Box::new(rhs_type.follow_bindings())), false)) + crate::ast::UnaryOp::Reference { mutable } => { + let typ = Type::Reference(Box::new(rhs_type.follow_bindings()), *mutable); + Ok((typ, false)) } crate::ast::UnaryOp::Dereference { implicitly_added: _ } => { let element_type = self.interner.next_type_variable(); - let expected = Type::MutableReference(Box::new(element_type.clone())); - self.unify(rhs_type, &expected, || TypeCheckError::TypeMismatch { - expr_typ: rhs_type.to_string(), - expected_typ: expected.to_string(), - expr_location: location, - }); + let make_expected = + |mutable| Type::Reference(Box::new(element_type.clone()), mutable); + + let immutable = make_expected(false); + let mutable = make_expected(true); + + // Both `&mut T` and `&T` should coerce to an expected `&T`. + if !rhs_type.try_reference_coercion(&immutable) { + self.unify(rhs_type, &mutable, || TypeCheckError::TypeMismatch { + expr_typ: rhs_type.to_string(), + expected_typ: mutable.to_string(), + expr_location: location, + }); + } Ok((element_type, false)) } } @@ -1436,9 +1450,9 @@ impl Elaborator<'_> { Type::NamedGeneric(_, _) => { self.lookup_method_in_trait_constraints(object_type, method_name, location) } - // Mutable references to another type should resolve to methods of their element type. + // References to another type should resolve to methods of their element type. // This may be a data type or a primitive type. - Type::MutableReference(element) => { + Type::Reference(element, _mutable) => { self.lookup_method(&element, method_name, location, check_self_param) } @@ -1835,13 +1849,12 @@ impl Elaborator<'_> { if let Some(expected_object_type) = expected_object_type { let actual_type = object_type.follow_bindings(); - if matches!(expected_object_type.follow_bindings(), Type::MutableReference(_)) { - if !matches!(actual_type, Type::MutableReference(_)) { - if let Err(error) = verify_mutable_reference(self.interner, *object) { - self.push_err(TypeCheckError::ResolverError(error)); - } + if let Type::Reference(_, mutable) = expected_object_type.follow_bindings() { + if !matches!(actual_type, Type::Reference(..)) { + let location = self.interner.id_location(*object); + self.check_can_mutate(*object, location); - let new_type = Type::MutableReference(Box::new(actual_type)); + let new_type = Type::Reference(Box::new(actual_type), mutable); *object_type = new_type.clone(); // First try to remove a dereference operator that may have been implicitly @@ -1850,11 +1863,9 @@ impl Elaborator<'_> { // If that didn't work, then wrap the whole expression in an `&mut` *object = new_object.unwrap_or_else(|| { - let location = self.interner.id_location(*object); - let new_object = self.interner.push_expr(HirExpression::Prefix(HirPrefixExpression { - operator: UnaryOp::MutableReference, + operator: UnaryOp::Reference { mutable }, rhs: *object, trait_method_id: None, })); @@ -1865,7 +1876,7 @@ impl Elaborator<'_> { } // Otherwise if the object type is a mutable reference and the method is not, insert as // many dereferences as needed. - } else if matches!(actual_type, Type::MutableReference(_)) { + } else if matches!(actual_type, Type::Reference(..)) { let (new_object, new_type) = self.insert_auto_dereferences(*object, actual_type); *object_type = new_type; *object = new_object; @@ -2134,30 +2145,3 @@ fn bind_generic(param: &ResolvedGeneric, arg: &Type, bindings: &mut TypeBindings bindings.insert(param.type_var.id(), (param.type_var.clone(), param.kind(), arg.clone())); } } - -/// Gives an error if a user tries to create a mutable reference -/// to an immutable variable. -fn verify_mutable_reference(interner: &NodeInterner, rhs: ExprId) -> Result<(), ResolverError> { - match interner.expression(&rhs) { - HirExpression::MemberAccess(member_access) => { - verify_mutable_reference(interner, member_access.lhs) - } - HirExpression::Index(_) => { - let location = interner.expr_location(&rhs); - Err(ResolverError::MutableReferenceToArrayElement { location }) - } - HirExpression::Ident(ident, _) => { - if let Some(definition) = interner.try_definition(ident.id) { - if !definition.mutable { - let location = interner.expr_location(&rhs); - let variable = definition.name.clone(); - let err = - ResolverError::MutableReferenceToImmutableVariable { location, variable }; - return Err(err); - } - } - Ok(()) - } - _ => Ok(()), - } -} diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/display.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/display.rs index c0283d9701b..af85027389e 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/display.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/display.rs @@ -86,6 +86,9 @@ pub(super) fn tokens_to_string(tokens: &[LocatedToken], interner: &NodeInterner) struct TokenPrettyPrinter<'interner> { interner: &'interner NodeInterner, indent: usize, + /// Determines whether the last outputted byte was alphanumeric. + /// This is used to add a space after the last token and before another token + /// that starts with an alphanumeric byte. last_was_alphanumeric: bool, last_was_right_brace: bool, last_was_semicolon: bool, @@ -169,30 +172,33 @@ impl<'interner> TokenPrettyPrinter<'interner> { } match token { - Token::QuotedType(id) => write!(f, "{}", self.interner.get_quoted_type(*id)), + Token::QuotedType(id) => { + let value = Value::Type(self.interner.get_quoted_type(*id).clone()); + self.print_value(&value, last_was_alphanumeric, f) + } Token::InternedExpr(id) => { let value = Value::expression(ExpressionKind::Interned(*id)); - self.print_value(&value, f) + self.print_value(&value, last_was_alphanumeric, f) } Token::InternedStatement(id) => { let value = Value::statement(StatementKind::Interned(*id)); - self.print_value(&value, f) + self.print_value(&value, last_was_alphanumeric, f) } Token::InternedLValue(id) => { let value = Value::lvalue(LValue::Interned(*id, Location::dummy())); - self.print_value(&value, f) + self.print_value(&value, last_was_alphanumeric, f) } Token::InternedUnresolvedTypeData(id) => { let value = Value::UnresolvedType(UnresolvedTypeData::Interned(*id)); - self.print_value(&value, f) + self.print_value(&value, last_was_alphanumeric, f) } Token::InternedPattern(id) => { let value = Value::pattern(Pattern::Interned(*id, Location::dummy())); - self.print_value(&value, f) + self.print_value(&value, last_was_alphanumeric, f) } Token::UnquoteMarker(id) => { let value = Value::TypedExpr(TypedExpr::ExprId(*id)); - self.print_value(&value, f) + self.print_value(&value, last_was_alphanumeric, f) } Token::Keyword(..) | Token::Ident(..) @@ -254,6 +260,7 @@ impl<'interner> TokenPrettyPrinter<'interner> { | Token::Slash | Token::Percent | Token::Ampersand + | Token::SliceStart | Token::ShiftLeft | Token::ShiftRight => { self.last_was_op = true; @@ -291,8 +298,21 @@ impl<'interner> TokenPrettyPrinter<'interner> { } } - fn print_value(&mut self, value: &Value, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + fn print_value( + &mut self, + value: &Value, + last_was_alphanumeric: bool, + f: &mut std::fmt::Formatter<'_>, + ) -> std::fmt::Result { let string = value.display(self.interner).to_string(); + if string.is_empty() { + return Ok(()); + } + + if last_was_alphanumeric && string.bytes().next().unwrap().is_ascii_alphanumeric() { + write!(f, " ")?; + } + for (index, line) in string.lines().enumerate() { if index > 0 { writeln!(f)?; @@ -301,7 +321,7 @@ impl<'interner> TokenPrettyPrinter<'interner> { line.fmt(f)?; } - self.last_was_alphanumeric = string.bytes().all(|byte| byte.is_ascii_alphanumeric()); + self.last_was_alphanumeric = string.bytes().last().unwrap().is_ascii_alphanumeric(); self.last_was_right_brace = string.ends_with('}'); self.last_was_semicolon = string.ends_with(';'); @@ -381,7 +401,13 @@ impl Display for ValuePrinter<'_, '_> { other => write!(f, "{other}(args)"), } } - Value::Pointer(value, _) => write!(f, "&mut {}", value.borrow().display(self.interner)), + Value::Pointer(value, _, mutable) => { + if *mutable { + write!(f, "&mut {}", value.borrow().display(self.interner)) + } else { + write!(f, "&{}", value.borrow().display(self.interner)) + } + } Value::Array(values, _) => { let values = vecmap(values, |value| value.display(self.interner).to_string()); write!(f, "[{}]", values.join(", ")) @@ -856,8 +882,9 @@ fn remove_interned_in_unresolved_type_data( remove_interned_in_generic_type_args(interner, generic_type_args), ) } - UnresolvedTypeData::MutableReference(typ) => UnresolvedTypeData::MutableReference( + UnresolvedTypeData::Reference(typ, mutable) => UnresolvedTypeData::Reference( Box::new(remove_interned_in_unresolved_type(interner, *typ)), + mutable, ), UnresolvedTypeData::Tuple(types) => UnresolvedTypeData::Tuple(vecmap(types, |typ| { remove_interned_in_unresolved_type(interner, typ) diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/hir_to_display_ast.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/hir_to_display_ast.rs index dcc938faf2a..cb030e1a80d 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/hir_to_display_ast.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/hir_to_display_ast.rs @@ -233,7 +233,7 @@ impl HirMatch { fn to_display_ast(&self, interner: &NodeInterner, location: Location) -> ExpressionKind { match self { HirMatch::Success(expr) => expr.to_display_ast(interner).kind, - HirMatch::Failure => ExpressionKind::Error, + HirMatch::Failure { .. } => ExpressionKind::Error, HirMatch::Guard { cond, body, otherwise } => { let condition = cond.to_display_ast(interner); let consequence = body.to_display_ast(interner); @@ -465,9 +465,9 @@ impl Type { let env = Box::new(env.to_display_ast()); UnresolvedTypeData::Function(args, ret, env, *unconstrained) } - Type::MutableReference(element) => { + Type::Reference(element, mutable) => { let element = Box::new(element.to_display_ast()); - UnresolvedTypeData::MutableReference(element) + UnresolvedTypeData::Reference(element, *mutable) } // Type::Forall is only for generic functions which don't store a type // in their Ast so they don't need to call to_display_ast for their Forall type. diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/interpreter.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/interpreter.rs index 5c87e70949a..d5b830d257c 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/interpreter.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/interpreter.rs @@ -393,7 +393,7 @@ impl<'local, 'interner> Interpreter<'local, 'interner> { } HirPattern::Mutable(pattern, _) => { // Create a mutable reference to store to - let argument = Value::Pointer(Shared::new(argument), true); + let argument = Value::Pointer(Shared::new(argument), true, true); self.define_pattern(pattern, typ, argument, location) } HirPattern::Tuple(pattern_fields, _) => { @@ -471,7 +471,7 @@ impl<'local, 'interner> Interpreter<'local, 'interner> { for scope in self.elaborator.interner.comptime_scopes.iter_mut().rev() { if let Entry::Occupied(mut entry) = scope.entry(id) { match entry.get() { - Value::Pointer(reference, true) => { + Value::Pointer(reference, true, _) => { *reference.borrow_mut() = argument; } _ => { @@ -507,7 +507,7 @@ impl<'local, 'interner> Interpreter<'local, 'interner> { /// This will automatically dereference a mutable variable if used. pub fn evaluate(&mut self, id: ExprId) -> IResult { match self.evaluate_no_dereference(id)? { - Value::Pointer(elem, true) => Ok(elem.borrow().clone()), + Value::Pointer(elem, true, _) => Ok(elem.borrow().clone()), other => Ok(other), } } @@ -848,7 +848,7 @@ impl<'local, 'interner> Interpreter<'local, 'interner> { fn evaluate_prefix(&mut self, prefix: HirPrefixExpression, id: ExprId) -> IResult { let rhs = match prefix.operator { - UnaryOp::MutableReference => self.evaluate_no_dereference(prefix.rhs)?, + UnaryOp::Reference { .. } => self.evaluate_no_dereference(prefix.rhs)?, _ => self.evaluate(prefix.rhs)?, }; @@ -899,17 +899,17 @@ impl<'local, 'interner> Interpreter<'local, 'interner> { Err(InterpreterError::InvalidValueForUnary { typ, location, operator: "not" }) } }, - UnaryOp::MutableReference => { + UnaryOp::Reference { mutable } => { // If this is a mutable variable (auto_deref = true), turn this into an explicit // mutable reference just by switching the value of `auto_deref`. Otherwise, wrap // the value in a fresh reference. match rhs { - Value::Pointer(elem, true) => Ok(Value::Pointer(elem, false)), - other => Ok(Value::Pointer(Shared::new(other), false)), + Value::Pointer(elem, true, _) => Ok(Value::Pointer(elem, false, mutable)), + other => Ok(Value::Pointer(Shared::new(other), false, mutable)), } } UnaryOp::Dereference { implicitly_added: _ } => match rhs { - Value::Pointer(element, _) => Ok(element.borrow().clone()), + Value::Pointer(element, _, _) => Ok(element.borrow().clone()), value => { let location = self.elaborator.interner.expr_location(&id); let typ = value.get_type().into_owned(); @@ -1613,7 +1613,7 @@ impl<'local, 'interner> Interpreter<'local, 'interner> { HirLValue::Ident(ident, typ) => self.mutate(ident.id, rhs, ident.location), HirLValue::Dereference { lvalue, element_type: _, location } => { match self.evaluate_lvalue(&lvalue)? { - Value::Pointer(value, _) => { + Value::Pointer(value, _, _) => { *value.borrow_mut() = rhs; Ok(()) } @@ -1669,12 +1669,12 @@ impl<'local, 'interner> Interpreter<'local, 'interner> { fn evaluate_lvalue(&mut self, lvalue: &HirLValue) -> IResult { match lvalue { HirLValue::Ident(ident, _) => match self.lookup(ident)? { - Value::Pointer(elem, true) => Ok(elem.borrow().clone()), + Value::Pointer(elem, true, _) => Ok(elem.borrow().clone()), other => Ok(other), }, HirLValue::Dereference { lvalue, element_type, location } => { match self.evaluate_lvalue(lvalue)? { - Value::Pointer(value, _) => Ok(value.borrow().clone()), + Value::Pointer(value, _, _) => Ok(value.borrow().clone()), value => { let typ = value.get_type().into_owned(); Err(InterpreterError::NonPointerDereferenced { typ, location: *location }) diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/interpreter/builtin.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/interpreter/builtin.rs index 34a5535f63c..77ee9183ead 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/interpreter/builtin.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/interpreter/builtin.rs @@ -28,18 +28,21 @@ use crate::{ hir::{ comptime::{ InterpreterError, Value, + display::tokens_to_string, errors::IResult, value::{ExprValue, TypedExpr}, }, def_collector::dc_crate::CollectedItems, def_map::ModuleDefId, + type_check::generics::TraitGenerics, }, hir_def::{ self, - expr::{HirExpression, HirIdent, HirLiteral}, + expr::{HirExpression, HirIdent, HirLiteral, ImplKind, TraitMethod}, function::FunctionBody, + traits::{ResolvedTraitBound, TraitConstraint}, }, - node_interner::{DefinitionKind, NodeInterner, TraitImplKind}, + node_interner::{DefinitionKind, NodeInterner, TraitImplKind, TraitMethodId}, parser::{Parser, StatementOrExpressionOrLValue}, token::{Attribute, LocatedToken, Token}, }; @@ -166,7 +169,7 @@ impl Interpreter<'_, '_> { "quoted_as_module" => quoted_as_module(self, arguments, return_type, location), "quoted_as_trait_constraint" => quoted_as_trait_constraint(self, arguments, location), "quoted_as_type" => quoted_as_type(self, arguments, location), - "quoted_eq" => quoted_eq(arguments, location), + "quoted_eq" => quoted_eq(self.elaborator.interner, arguments, location), "quoted_hash" => quoted_hash(arguments, location), "quoted_tokens" => quoted_tokens(arguments, location), "slice_insert" => slice_insert(interner, arguments, location), @@ -1041,7 +1044,7 @@ fn type_as_mutable_reference( location: Location, ) -> IResult { type_as(arguments, return_type, location, |typ| { - if let Type::MutableReference(typ) = typ { Some(Value::Type(*typ)) } else { None } + if let Type::Reference(typ, true) = typ { Some(Value::Type(*typ)) } else { None } }) } @@ -1321,7 +1324,7 @@ fn unresolved_type_as_mutable_reference( location: Location, ) -> IResult { unresolved_type_as(interner, arguments, return_type, location, |typ| { - if let UnresolvedTypeData::MutableReference(typ) = typ { + if let UnresolvedTypeData::Reference(typ, true) = typ { Some(Value::UnresolvedType(typ.typ)) } else { None @@ -1483,9 +1486,9 @@ fn zeroed(return_type: Type, location: Location) -> Value { // Using Value::Zeroed here is probably safer than using FuncId::dummy_id() or similar Value::Zeroed(typ) } - Type::MutableReference(element) => { + Type::Reference(element, mutable) => { let element = zeroed(*element, location); - Value::Pointer(Shared::new(element), false) + Value::Pointer(Shared::new(element), false, mutable) } // Optimistically assume we can resolve this type later or that the value is unused Type::TypeVariable(_) @@ -2177,7 +2180,11 @@ fn expr_as_unary_op( let unary_op_value: u128 = match prefix_expr.operator { UnaryOp::Minus => 0, UnaryOp::Not => 1, - UnaryOp::MutableReference => 2, + UnaryOp::Reference { mutable: true } => 2, + UnaryOp::Reference { mutable: false } => { + // `&` alone is experimental and currently hidden from the comptime API + return None; + } UnaryOp::Dereference { .. } => 3, }; @@ -2420,8 +2427,27 @@ fn function_def_as_typed_expr( ) -> IResult { let self_argument = check_one_argument(arguments, location)?; let func_id = get_function_def(self_argument)?; + let trait_impl_id = interpreter.elaborator.interner.function_meta(&func_id).trait_impl; let definition_id = interpreter.elaborator.interner.function_definition_id(func_id); - let hir_ident = HirIdent::non_trait_method(definition_id, location); + let hir_ident = if let Some(trait_impl_id) = trait_impl_id { + let trait_impl = interpreter.elaborator.interner.get_trait_implementation(trait_impl_id); + let trait_impl = trait_impl.borrow(); + let ordered = trait_impl.trait_generics.clone(); + let named = + interpreter.elaborator.interner.get_associated_types_for_impl(trait_impl_id).to_vec(); + let trait_generics = TraitGenerics { ordered, named }; + let trait_bound = + ResolvedTraitBound { trait_id: trait_impl.trait_id, trait_generics, location }; + let constraint = TraitConstraint { typ: trait_impl.typ.clone(), trait_bound }; + let method_index = trait_impl.methods.iter().position(|id| *id == func_id); + let method_index = method_index.expect("Expected to find the method"); + let method_id = TraitMethodId { trait_id: trait_impl.trait_id, method_index }; + let trait_method = TraitMethod { method_id, constraint, assumed: true }; + let id = interpreter.elaborator.interner.trait_method_id(trait_method.method_id); + HirIdent { location, id, impl_kind: ImplKind::TraitMethod(trait_method) } + } else { + HirIdent::non_trait_method(definition_id, location) + }; let generics = None; let hir_expr = HirExpression::Ident(hir_ident.clone(), generics.clone()); let expr_id = interpreter.elaborator.interner.push_expr(hir_expr); @@ -2913,10 +2939,24 @@ fn modulus_num_bits(arguments: Vec<(Value, Location)>, location: Location) -> IR } // fn quoted_eq(_first: Quoted, _second: Quoted) -> bool -fn quoted_eq(arguments: Vec<(Value, Location)>, location: Location) -> IResult { - eq_item(arguments, location, get_quoted) -} +fn quoted_eq( + interner: &NodeInterner, + arguments: Vec<(Value, Location)>, + location: Location, +) -> IResult { + let (self_arg, other_arg) = check_two_arguments(arguments, location)?; + let self_arg = get_quoted(self_arg)?; + let other_arg = get_quoted(other_arg)?; + // Comparing tokens one against each other doesn't work in the general case because tokens + // might be refer to interned expressions/statements/etc. We'd need to convert those nodes + // to tokens and compare the final result, but comparing their string representation works + // equally well and, for simplicity, that's what we do here. + let self_string = tokens_to_string(&self_arg, interner); + let other_string = tokens_to_string(&other_arg, interner); + + Ok(Value::Bool(self_string == other_string)) +} fn quoted_hash(arguments: Vec<(Value, Location)>, location: Location) -> IResult { hash_item(arguments, location, get_quoted) } diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/value.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/value.rs index 8d07669497f..ebaaf27f4f6 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/value.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/value.rs @@ -61,7 +61,7 @@ pub enum Value { Tuple(Vec), Struct(HashMap, Value>, Type), Enum(/*tag*/ usize, /*args*/ Vec, Type), - Pointer(Shared, /* auto_deref */ bool), + Pointer(Shared, /* auto_deref */ bool, /* mutable */ bool), Array(Vector, Type), Slice(Vector, Type), Quoted(Rc>), @@ -151,12 +151,12 @@ impl Value { Value::Slice(_, typ) => return Cow::Borrowed(typ), Value::Quoted(_) => Type::Quoted(QuotedType::Quoted), Value::StructDefinition(_) => Type::Quoted(QuotedType::StructDefinition), - Value::Pointer(element, auto_deref) => { + Value::Pointer(element, auto_deref, mutable) => { if *auto_deref { element.borrow().get_type().into_owned() } else { let element = element.borrow().get_type().into_owned(); - Type::MutableReference(Box::new(element)) + Type::Reference(Box::new(element), *mutable) } } Value::TraitConstraint { .. } => Type::Quoted(QuotedType::TraitConstraint), @@ -452,7 +452,7 @@ impl Value { Value::TypedExpr(TypedExpr::ExprId(expr_id)) => interner.expression(&expr_id), // Only convert pointers with auto_deref = true. These are mutable variables // and we don't need to wrap them in `&mut`. - Value::Pointer(element, true) => { + Value::Pointer(element, true, _) => { return element.unwrap_or_clone().into_hir_expression(interner, location); } Value::Closure(closure) => HirExpression::Lambda(closure.lambda.clone()), diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir/def_collector/dc_crate.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir/def_collector/dc_crate.rs index b0c71f1ebe6..81b3f842487 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir/def_collector/dc_crate.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir/def_collector/dc_crate.rs @@ -206,6 +206,13 @@ impl CompilationError { CompilationError::DebugComptimeScopeNotFound(_, location) => *location, } } + + pub(crate) fn is_error(&self) -> bool { + // This is a bit expensive but not all error types have a `is_warning` method + // and it'd lead to code duplication to add them. `CompilationError::is_error` + // also isn't expected to be called too often. + CustomDiagnostic::from(self).is_error() + } } impl std::fmt::Display for CompilationError { diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir/def_collector/errors.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir/def_collector/errors.rs index 4e8ed6233c5..07738c5f7de 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir/def_collector/errors.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir/def_collector/errors.rs @@ -2,7 +2,6 @@ use crate::ast::{Ident, ItemVisibility, Path, UnsupportedNumericGenericType}; use crate::hir::resolution::import::PathResolutionError; use crate::hir::type_check::generics::TraitGenerics; -use noirc_errors::FileDiagnostic; use noirc_errors::{CustomDiagnostic as Diagnostic, Location}; use thiserror::Error; @@ -38,8 +37,8 @@ pub enum DefCollectorErrorKind { CannotReexportItemWithLessVisibility { item_name: Ident, desired_visibility: ItemVisibility }, #[error("Non-struct type used in impl")] NonStructTypeInImpl { location: Location }, - #[error("Cannot implement trait on a mutable reference type")] - MutableReferenceInTraitImpl { location: Location }, + #[error("Cannot implement trait on a reference type")] + ReferenceInTraitImpl { location: Location }, #[error("Impl for type `{typ}` overlaps with existing impl")] OverlappingImpl { typ: crate::Type, location: Location, prev_location: Location }, #[error("Cannot `impl` a type defined outside the current crate")] @@ -76,13 +75,9 @@ pub enum DefCollectorErrorKind { } impl DefCollectorErrorKind { - pub fn into_file_diagnostic(&self, file: fm::FileId) -> FileDiagnostic { - Diagnostic::from(self).in_file(file) - } - pub fn location(&self) -> Location { match self { - DefCollectorErrorKind::Duplicate { first_def: ident, .. } + DefCollectorErrorKind::Duplicate { second_def: ident, .. } | DefCollectorErrorKind::UnresolvedModuleDecl { mod_name: ident, .. } | DefCollectorErrorKind::CannotReexportItemWithLessVisibility { item_name: ident, @@ -102,7 +97,7 @@ impl DefCollectorErrorKind { | DefCollectorErrorKind::TestOnAssociatedFunction { location } | DefCollectorErrorKind::ExportOnAssociatedFunction { location } | DefCollectorErrorKind::NonStructTypeInImpl { location } - | DefCollectorErrorKind::MutableReferenceInTraitImpl { location } + | DefCollectorErrorKind::ReferenceInTraitImpl { location } | DefCollectorErrorKind::OverlappingImpl { location, .. } | DefCollectorErrorKind::ModuleAlreadyPartOfCrate { location, .. } | DefCollectorErrorKind::ModuleOriginallyDefined { location, .. } @@ -165,10 +160,10 @@ impl<'a> From<&'a DefCollectorErrorKind> for Diagnostic { let second_location = second_def.0.location(); let mut diag = Diagnostic::simple_error( primary_message, - format!("First {} found here", &typ), - first_location, + format!("Second {} found here", &typ), + second_location, ); - diag.add_secondary(format!("Second {} found here", &typ), second_location); + diag.add_secondary(format!("First {} found here", &typ), first_location); diag } } @@ -204,8 +199,8 @@ impl<'a> From<&'a DefCollectorErrorKind> for Diagnostic { "Only struct types may have implementation methods".into(), *location, ), - DefCollectorErrorKind::MutableReferenceInTraitImpl { location } => Diagnostic::simple_error( - "Trait impls are not allowed on mutable reference types".into(), + DefCollectorErrorKind::ReferenceInTraitImpl { location } => Diagnostic::simple_error( + "Trait impls are not allowed on reference types".into(), "Try using a struct type here instead".into(), *location, ), diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir/resolution/errors.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir/resolution/errors.rs index 7bd5c79dc90..bc1c519ed5d 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir/resolution/errors.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir/resolution/errors.rs @@ -1,6 +1,6 @@ use acvm::FieldElement; pub use noirc_errors::Span; -use noirc_errors::{CustomDiagnostic as Diagnostic, FileDiagnostic, Location}; +use noirc_errors::{CustomDiagnostic as Diagnostic, Location}; use thiserror::Error; use crate::{ @@ -76,10 +76,6 @@ pub enum ResolverError { GenericsOnAssociatedType { location: Location }, #[error("{0}")] ParserError(Box), - #[error("Cannot create a mutable reference to {variable}, it was declared to be immutable")] - MutableReferenceToImmutableVariable { variable: String, location: Location }, - #[error("Mutable references to array indices are unsupported")] - MutableReferenceToArrayElement { location: Location }, #[error("Closure environment must be a tuple or unit type")] InvalidClosureEnvironment { typ: Type, location: Location }, #[error("Nested slices, i.e. slices within an array or slice, are not supported")] @@ -109,7 +105,11 @@ pub enum ResolverError { #[error("Only `comptime` globals can be mutable")] MutableGlobal { location: Location }, #[error("Globals must have a specified type")] - UnspecifiedGlobalType { location: Location, expected_type: Type }, + UnspecifiedGlobalType { + pattern_location: Location, + expr_location: Location, + expected_type: Type, + }, #[error("Global failed to evaluate")] UnevaluatedGlobalType { location: Location }, #[error("Globals used in a type position must be non-negative")] @@ -130,8 +130,6 @@ pub enum ResolverError { ArrayLengthInterpreter { error: InterpreterError }, #[error("The unquote operator '$' can only be used within a quote expression")] UnquoteUsedOutsideQuote { location: Location }, - #[error("\"as trait path\" not yet implemented")] - AsTraitPathNotYetImplemented { location: Location }, #[error("Invalid syntax in macro call")] InvalidSyntaxInMacroCall { location: Location }, #[error("Macros must be comptime functions")] @@ -170,7 +168,7 @@ pub enum ResolverError { AttributeFunctionIsNotAPath { function: String, location: Location }, #[error("Attribute function `{name}` is not in scope")] AttributeFunctionNotInScope { name: String, location: Location }, - #[error("The trait `{missing_trait}` is not implemented for `{type_missing_trait}")] + #[error("The trait `{missing_trait}` is not implemented for `{type_missing_trait}`")] TraitNotImplemented { impl_trait: String, missing_trait: String, @@ -192,16 +190,14 @@ pub enum ResolverError { TypeUnsupportedInMatch { typ: Type, location: Location }, #[error("Expected a struct, enum, or literal value in pattern, but found a {item}")] UnexpectedItemInPattern { location: Location, item: &'static str }, + #[error("Trait `{trait_name}` doesn't have a method named `{method_name}`")] + NoSuchMethodInTrait { trait_name: String, method_name: String, location: Location }, } impl ResolverError { - pub fn into_file_diagnostic(&self, file: fm::FileId) -> FileDiagnostic { - Diagnostic::from(self).in_file(file) - } - pub fn location(&self) -> Location { match self { - ResolverError::DuplicateDefinition { first_location: location, .. } + ResolverError::DuplicateDefinition { second_location: location, .. } | ResolverError::UnconditionalRecursion { location, .. } | ResolverError::PathIsNotIdent { location } | ResolverError::Expected { location, .. } @@ -224,8 +220,6 @@ impl ResolverError { | ResolverError::NonStructWithGenerics { location } | ResolverError::GenericsOnSelfType { location } | ResolverError::GenericsOnAssociatedType { location } - | ResolverError::MutableReferenceToImmutableVariable { location, .. } - | ResolverError::MutableReferenceToArrayElement { location } | ResolverError::InvalidClosureEnvironment { location, .. } | ResolverError::NestedSlices { location } | ResolverError::AbiAttributeOutsideContract { location } @@ -237,7 +231,7 @@ impl ResolverError { | ResolverError::WhileInConstrainedFn { location } | ResolverError::JumpOutsideLoop { location, .. } | ResolverError::MutableGlobal { location } - | ResolverError::UnspecifiedGlobalType { location, .. } + | ResolverError::UnspecifiedGlobalType { pattern_location: location, .. } | ResolverError::UnevaluatedGlobalType { location } | ResolverError::NegativeGlobalType { location, .. } | ResolverError::NonIntegralGlobalType { location, .. } @@ -245,7 +239,6 @@ impl ResolverError { | ResolverError::SelfReferentialType { location } | ResolverError::NumericGenericUsedForType { location, .. } | ResolverError::UnquoteUsedOutsideQuote { location } - | ResolverError::AsTraitPathNotYetImplemented { location } | ResolverError::InvalidSyntaxInMacroCall { location } | ResolverError::MacroIsNotComptime { location } | ResolverError::NonFunctionInAnnotation { location } @@ -261,6 +254,7 @@ impl ResolverError { | ResolverError::NonIntegerGlobalUsedInPattern { location, .. } | ResolverError::TypeUnsupportedInMatch { location, .. } | ResolverError::UnexpectedItemInPattern { location, .. } + | ResolverError::NoSuchMethodInTrait { location, .. } | ResolverError::VariableAlreadyDefinedInPattern { new_location: location, .. } => { *location } @@ -296,10 +290,10 @@ impl<'a> From<&'a ResolverError> for Diagnostic { ResolverError::DuplicateDefinition { name, first_location, second_location} => { let mut diag = Diagnostic::simple_error( format!("duplicate definitions of {name} found"), - "first definition found here".to_string(), - *first_location, + "second definition found here".to_string(), + *second_location, ); - diag.add_secondary("second definition found here".to_string(), *second_location); + diag.add_secondary("first definition found here".to_string(), *first_location); diag } ResolverError::UnusedVariable { ident } => { @@ -490,12 +484,6 @@ impl<'a> From<&'a ResolverError> for Diagnostic { *location, ), ResolverError::ParserError(error) => error.as_ref().into(), - ResolverError::MutableReferenceToImmutableVariable { variable, location } => { - Diagnostic::simple_error(format!("Cannot mutably reference the immutable variable {variable}"), format!("{variable} is immutable"), *location) - }, - ResolverError::MutableReferenceToArrayElement { location } => { - Diagnostic::simple_error("Mutable references to array elements are currently unsupported".into(), "Try storing the element in a fresh variable first".into(), *location) - }, ResolverError::InvalidClosureEnvironment { location, typ } => Diagnostic::simple_error( format!("{typ} is not a valid closure environment type"), "Closure environment must be a tuple or unit type".to_string(), *location), @@ -577,12 +565,14 @@ impl<'a> From<&'a ResolverError> for Diagnostic { *location, ) }, - ResolverError::UnspecifiedGlobalType { location, expected_type } => { - Diagnostic::simple_error( + ResolverError::UnspecifiedGlobalType { pattern_location, expr_location, expected_type } => { + let mut diagnostic = Diagnostic::simple_error( "Globals must have a specified type".to_string(), - format!("Inferred type is `{expected_type}`"), - *location, - ) + String::new(), + *pattern_location, + ); + diagnostic.add_secondary(format!("Inferred type is `{expected_type}`"), *expr_location); + diagnostic }, ResolverError::UnevaluatedGlobalType { location } => { Diagnostic::simple_error( @@ -658,13 +648,6 @@ impl<'a> From<&'a ResolverError> for Diagnostic { *location, ) }, - ResolverError::AsTraitPathNotYetImplemented { location } => { - Diagnostic::simple_error( - "\"as trait path\" not yet implemented".into(), - "".into(), - *location, - ) - }, ResolverError::InvalidSyntaxInMacroCall { location } => { Diagnostic::simple_error( "Invalid syntax in macro call".into(), @@ -774,9 +757,9 @@ impl<'a> From<&'a ResolverError> for Diagnostic { ResolverError::TraitNotImplemented { impl_trait, missing_trait: the_trait, type_missing_trait: typ, location, missing_trait_location} => { let mut diagnostic = Diagnostic::simple_error( format!("The trait bound `{typ}: {the_trait}` is not satisfied"), - format!("The trait `{the_trait}` is not implemented for `{typ}") + format!("The trait `{the_trait}` is not implemented for `{typ}`") , *location); - diagnostic.add_secondary(format!("required by this bound in `{impl_trait}"), *missing_trait_location); + diagnostic.add_secondary(format!("required by this bound in `{impl_trait}`"), *missing_trait_location); diagnostic }, ResolverError::LoopNotYetSupported { location } => { @@ -822,6 +805,13 @@ impl<'a> From<&'a ResolverError> for Diagnostic { *location, ) }, + ResolverError::NoSuchMethodInTrait { trait_name, method_name, location } => { + Diagnostic::simple_error( + format!("Trait `{trait_name}` has no method named `{method_name}`"), + String::new(), + *location, + ) + }, } } } diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir/type_check/errors.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir/type_check/errors.rs index f3c2c635b04..e68e70253a3 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir/type_check/errors.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir/type_check/errors.rs @@ -1,3 +1,4 @@ +use std::collections::BTreeSet; use std::rc::Rc; use acvm::FieldElement; @@ -15,6 +16,9 @@ use crate::hir_def::types::{BinaryTypeOperator, Kind, Type}; use crate::node_interner::NodeInterner; use crate::signed_field::SignedField; +/// Rust also only shows 3 maximum, even for short patterns. +pub const MAX_MISSING_CASES: usize = 3; + #[derive(Error, Debug, Clone, PartialEq, Eq)] pub enum Source { #[error("Binary")] @@ -63,7 +67,7 @@ pub enum TypeCheckError { #[error("Expected type {expected} is not the same as {actual}")] TypeMismatchWithSource { expected: Type, actual: Type, location: Location, source: Source }, #[error("Expected type {expected_kind:?} is not the same as {expr_kind:?}")] - TypeKindMismatch { expected_kind: String, expr_kind: String, expr_location: Location }, + TypeKindMismatch { expected_kind: Kind, expr_kind: Kind, expr_location: Location }, #[error("Evaluating {to} resulted in {to_value}, but {from_value} was expected")] TypeCanonicalizationMismatch { to: Type, @@ -100,6 +104,10 @@ pub enum TypeCheckError { VariableMustBeMutable { name: String, location: Location }, #[error("Cannot mutate immutable variable `{name}`")] CannotMutateImmutableVariable { name: String, location: Location }, + #[error("Variable {name} captured in lambda must be a mutable reference")] + MutableCaptureWithoutRef { name: String, location: Location }, + #[error("Mutable references to array indices are unsupported")] + MutableReferenceToArrayElement { location: Location }, #[error("No method named '{method_name}' found for type '{object_type}'")] UnresolvedMethodCall { method_name: String, object_type: Type, location: Location }, #[error("Cannot invoke function field '{method_name}' on type '{object_type}' as a method")] @@ -235,6 +243,13 @@ pub enum TypeCheckError { UnnecessaryUnsafeBlock { location: Location }, #[error("Unnecessary `unsafe` block")] NestedUnsafeBlock { location: Location }, + #[error("Unreachable match case")] + UnreachableCase { location: Location }, + #[error("Missing cases")] + MissingCases { cases: BTreeSet, location: Location }, + /// This error is used for types like integers which have too many variants to enumerate + #[error("Missing cases: `{typ}` is non-empty")] + MissingManyCases { typ: String, location: Location }, } #[derive(Debug, Clone, PartialEq, Eq)] @@ -279,6 +294,8 @@ impl TypeCheckError { | TypeCheckError::TupleIndexOutOfBounds { location, .. } | TypeCheckError::VariableMustBeMutable { location, .. } | TypeCheckError::CannotMutateImmutableVariable { location, .. } + | TypeCheckError::MutableCaptureWithoutRef { location, .. } + | TypeCheckError::MutableReferenceToArrayElement { location } | TypeCheckError::UnresolvedMethodCall { location, .. } | TypeCheckError::CannotInvokeStructFieldFunctionType { location, .. } | TypeCheckError::IntegerSignedness { location, .. } @@ -321,9 +338,14 @@ impl TypeCheckError { | TypeCheckError::CyclicType { location, .. } | TypeCheckError::TypeAnnotationsNeededForIndex { location } | TypeCheckError::UnnecessaryUnsafeBlock { location } + | TypeCheckError::UnreachableCase { location } + | TypeCheckError::MissingCases { location, .. } + | TypeCheckError::MissingManyCases { location, .. } | TypeCheckError::NestedUnsafeBlock { location } => *location, + TypeCheckError::DuplicateNamedTypeArg { name: ident, .. } | TypeCheckError::NoSuchNamedTypeArg { name: ident, .. } => ident.location(), + TypeCheckError::NoMatchingImplFound(no_matching_impl_found_error) => { no_matching_impl_found_error.location } @@ -369,11 +391,37 @@ impl<'a> From<&'a TypeCheckError> for Diagnostic { ) } TypeCheckError::TypeKindMismatch { expected_kind, expr_kind, expr_location } => { - Diagnostic::simple_error( - format!("Expected kind {expected_kind}, found kind {expr_kind}"), - String::new(), - *expr_location, - ) + // Try to improve the error message for some kind combinations + match (expected_kind, expr_kind) { + (Kind::Normal, Kind::Numeric(_)) => { + Diagnostic::simple_error( + "Expected type, found numeric generic".into(), + "not a type".into(), + *expr_location, + ) + } + (Kind::Numeric(typ), Kind::Normal) => { + Diagnostic::simple_error( + "Type provided when a numeric generic was expected".into(), + format!("the numeric generic is not of type `{typ}`"), + *expr_location, + ) + } + (Kind::Numeric(expected_type), Kind::Numeric(found_type)) => { + Diagnostic::simple_error( + format!("The numeric generic is not of type `{expected_type}`"), + format!("expected `{expected_type}`, found `{found_type}`"), + *expr_location, + ) + } + _ => { + Diagnostic::simple_error( + format!("Expected kind {expected_kind}, found kind {expr_kind}"), + String::new(), + *expr_location, + ) + } + } } TypeCheckError::TypeCanonicalizationMismatch { to, from, to_value, from_value, location } => { Diagnostic::simple_error( @@ -476,6 +524,14 @@ impl<'a> From<&'a TypeCheckError> for Diagnostic { | TypeCheckError::InvalidShiftSize { location } => { Diagnostic::simple_error(error.to_string(), String::new(), *location) } + TypeCheckError::MutableCaptureWithoutRef { name, location } => Diagnostic::simple_error( + format!("Mutable variable {name} captured in lambda must be a mutable reference"), + "Use '&mut' instead of 'mut' to capture a mutable variable.".to_string(), + *location, + ), + TypeCheckError::MutableReferenceToArrayElement { location } => { + Diagnostic::simple_error("Mutable references to array elements are currently unsupported".into(), "Try storing the element in a fresh variable first".into(), *location) + }, TypeCheckError::PublicReturnType { typ, location } => Diagnostic::simple_error( "Functions cannot declare a public return type".to_string(), format!("return type is {typ}"), @@ -641,6 +697,36 @@ impl<'a> From<&'a TypeCheckError> for Diagnostic { *location, ) }, + TypeCheckError::UnreachableCase { location } => { + Diagnostic::simple_warning( + "Unreachable match case".into(), + "This pattern is redundant with one or more prior patterns".into(), + *location, + ) + }, + TypeCheckError::MissingCases { cases, location } => { + let s = if cases.len() == 1 { "" } else { "s" }; + + let mut not_shown = String::new(); + let mut shown_cases = cases.iter() + .map(|s| format!("`{s}`")) + .take(MAX_MISSING_CASES) + .collect::>(); + + if cases.len() > MAX_MISSING_CASES { + shown_cases.truncate(MAX_MISSING_CASES); + not_shown = format!(", and {} more not shown", cases.len() - MAX_MISSING_CASES); + } + + let shown_cases = shown_cases.join(", "); + let msg = format!("Missing case{s}: {shown_cases}{not_shown}"); + Diagnostic::simple_error(msg, String::new(), *location) + }, + TypeCheckError::MissingManyCases { typ, location } => { + let msg = format!("Missing cases: `{typ}` is non-empty"); + let secondary = "Try adding a match-all pattern: `_`".to_string(); + Diagnostic::simple_error(msg, secondary, *location) + }, } } } @@ -648,15 +734,15 @@ impl<'a> From<&'a TypeCheckError> for Diagnostic { impl<'a> From<&'a NoMatchingImplFoundError> for Diagnostic { fn from(error: &'a NoMatchingImplFoundError) -> Self { let constraints = &error.constraints; - let span = error.location; + let location = error.location; assert!(!constraints.is_empty()); let msg = format!("No matching impl found for `{}: {}`", constraints[0].0, constraints[0].1); - let mut diagnostic = Diagnostic::from_message(&msg); + let mut diagnostic = Diagnostic::from_message(&msg, location.file); let secondary = format!("No impl for `{}: {}`", constraints[0].0, constraints[0].1); - diagnostic.add_secondary(secondary, span); + diagnostic.add_secondary(secondary, location); // These must be notes since secondaries are unordered for (typ, trait_name) in &constraints[1..] { diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir/type_check/mod.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir/type_check/mod.rs index f45b68dd818..c7a7890fcc0 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir/type_check/mod.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir/type_check/mod.rs @@ -2,4 +2,4 @@ mod errors; pub mod generics; pub use self::errors::Source; -pub use errors::{NoMatchingImplFoundError, TypeCheckError}; +pub use errors::{MAX_MISSING_CASES, NoMatchingImplFoundError, TypeCheckError}; diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir_def/expr.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir_def/expr.rs index 0076cab8de5..25e055188c2 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir_def/expr.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir_def/expr.rs @@ -1,4 +1,5 @@ use fm::FileId; +use iter_extended::vecmap; use noirc_errors::Location; use crate::Shared; @@ -358,15 +359,13 @@ pub enum HirMatch { /// Jump directly to ExprId Success(ExprId), - Failure, + /// A Failure node in the match. `missing_case` is true if this node is the result of a missing + /// case of the match for which we should later reconstruct an example of. + Failure { missing_case: bool }, /// Run `body` if the given expression is true. /// Otherwise continue with the given decision tree. - Guard { - cond: ExprId, - body: ExprId, - otherwise: Box, - }, + Guard { cond: ExprId, body: ExprId, otherwise: Box }, /// Switch on the given variable with the given cases to test. /// The final argument is an optional match-all case to take if @@ -387,7 +386,7 @@ impl Case { } } -#[derive(Debug, Clone, Hash, PartialEq, Eq)] +#[derive(Debug, Clone, Hash, PartialEq, Eq, PartialOrd, Ord)] pub enum Constructor { True, False, @@ -437,6 +436,41 @@ impl Constructor { _ => false, } } + + /// Return all the constructors of this type from one constructor. Intended to be used + /// for error reporting in cases where there are at least 2 constructors. + pub(crate) fn all_constructors(&self) -> Vec<(Constructor, /*arg count:*/ usize)> { + match self { + Constructor::True | Constructor::False => { + vec![(Constructor::True, 0), (Constructor::False, 0)] + } + Constructor::Unit => vec![(Constructor::Unit, 0)], + Constructor::Tuple(args) => vec![(self.clone(), args.len())], + Constructor::Variant(typ, _) => { + let typ = typ.follow_bindings(); + let Type::DataType(def, generics) = &typ else { + unreachable!( + "Constructor::Variant should have a DataType type, but found {typ:?}" + ); + }; + + let def_ref = def.borrow(); + if let Some(variants) = def_ref.get_variants(generics) { + vecmap(variants.into_iter().enumerate(), |(i, (_, fields))| { + (Constructor::Variant(typ.clone(), i), fields.len()) + }) + } else + /* def is a struct */ + { + let field_count = def_ref.fields_raw().map(|fields| fields.len()).unwrap_or(0); + vec![(Constructor::Variant(typ.clone(), 0), field_count)] + } + } + + // Nothing great to return for these + Constructor::Int(_) | Constructor::Range(..) => Vec::new(), + } + } } impl std::fmt::Display for Constructor { diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir_def/types.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir_def/types.rs index 4fd5b46657e..d1c70a18259 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir_def/types.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir_def/types.rs @@ -14,6 +14,7 @@ use crate::{ ast::{IntegerBitSize, ItemVisibility}, hir::type_check::{TypeCheckError, generics::TraitGenerics}, node_interner::{ExprId, NodeInterner, TraitId, TypeAliasId}, + signed_field::{AbsU128, SignedField}, }; use iter_extended::vecmap; use noirc_errors::Location; @@ -110,8 +111,8 @@ pub enum Type { /*unconstrained*/ bool, ), - /// &mut T - MutableReference(Box), + /// &T + Reference(Box, /*mutable*/ bool), /// A type generic over the given type variables. /// Storing both the TypeVariableId and TypeVariable isn't necessary @@ -858,8 +859,8 @@ impl TypeVariable { ) -> Result<(), TypeCheckError> { if !binding.kind().unifies(kind) { return Err(TypeCheckError::TypeKindMismatch { - expected_kind: format!("{}", kind), - expr_kind: format!("{}", binding.kind()), + expected_kind: kind.clone(), + expr_kind: binding.kind(), expr_location: location, }); } @@ -1046,9 +1047,12 @@ impl std::fmt::Display for Type { write!(f, "fn{closure_env_text}({}) -> {ret}", args.join(", ")) } - Type::MutableReference(element) => { + Type::Reference(element, mutable) if *mutable => { write!(f, "&mut {element}") } + Type::Reference(element, _) => { + write!(f, "&{element}") + } Type::Quoted(quoted) => write!(f, "{}", quoted), Type::InfixExpr(lhs, op, rhs, _) => { let this = self.canonicalize_checked(); @@ -1156,15 +1160,15 @@ impl Type { } pub fn is_field(&self) -> bool { - matches!(self.follow_bindings(), Type::FieldElement) + matches!(self.follow_bindings_shallow().as_ref(), Type::FieldElement) } pub fn is_bool(&self) -> bool { - matches!(self.follow_bindings(), Type::Bool) + matches!(self.follow_bindings_shallow().as_ref(), Type::Bool) } pub fn is_integer(&self) -> bool { - matches!(self.follow_bindings(), Type::Integer(_, _)) + matches!(self.follow_bindings_shallow().as_ref(), Type::Integer(_, _)) } /// If value_level, only check for Type::FieldElement, @@ -1220,7 +1224,7 @@ impl Type { Type::Alias(alias_type, generics) => { alias_type.borrow().get_type(&generics).is_primitive() } - Type::MutableReference(typ) => typ.is_primitive(), + Type::Reference(typ, _) => typ.is_primitive(), Type::DataType(..) | Type::TypeVariable(..) | Type::TraitAsType(..) @@ -1242,6 +1246,10 @@ impl Type { } } + pub(crate) fn is_mutable_ref(&self) -> bool { + matches!(self.follow_bindings_shallow().as_ref(), Type::Reference(_, true)) + } + /// True if this type can be used as a parameter to `main` or a contract function. /// This is only false for unsized types like slices or slices that do not make sense /// as a program input such as named generics or mutable references. @@ -1264,7 +1272,7 @@ impl Type { | Type::TypeVariable(_) | Type::NamedGeneric(_, _) | Type::Function(_, _, _, _) - | Type::MutableReference(_) + | Type::Reference(..) | Type::Forall(_, _) | Type::Quoted(_) | Type::Slice(_) @@ -1322,7 +1330,7 @@ impl Type { // This is possible as long as the output size is not dependent upon a witness condition. | Type::Function(_, _, _, _) | Type::Slice(_) - | Type::MutableReference(_) + | Type::Reference(..) | Type::Forall(_, _) // TODO: probably can allow code as it is all compile time | Type::Quoted(_) @@ -1380,7 +1388,7 @@ impl Type { // environment is the interpreter. In this environment, they are valid. Type::Quoted(_) => true, - Type::MutableReference(_) | Type::Forall(_, _) | Type::TraitAsType(..) => false, + Type::Reference(..) | Type::Forall(_, _) | Type::TraitAsType(..) => false, Type::Alias(alias, generics) => { let alias = alias.borrow(); @@ -1472,7 +1480,7 @@ impl Type { | Type::DataType(..) | Type::TraitAsType(..) | Type::Function(..) - | Type::MutableReference(..) + | Type::Reference(..) | Type::Forall(..) | Type::Quoted(..) => Kind::Normal, Type::Error => Kind::Any, @@ -1569,7 +1577,7 @@ impl Type { | Type::TraitAsType(..) | Type::NamedGeneric(_, _) | Type::Function(_, _, _, _) - | Type::MutableReference(_) + | Type::Reference(..) | Type::Forall(_, _) | Type::Constant(_, _) | Type::Quoted(_) @@ -1903,8 +1911,12 @@ impl Type { } } - (MutableReference(elem_a), MutableReference(elem_b)) => { - elem_a.try_unify(elem_b, bindings) + (Reference(elem_a, mutable_a), Reference(elem_b, mutable_b)) => { + if mutable_a == mutable_b { + elem_a.try_unify(elem_b, bindings) + } else { + Err(UnificationError) + } } (InfixExpr(lhs_a, op_a, rhs_a, _), InfixExpr(lhs_b, op_b, rhs_b, _)) => { @@ -2021,6 +2033,10 @@ impl Type { return; } + if self.try_reference_coercion(expected) { + return; + } + // Try to coerce `fn (..) -> T` to `unconstrained fn (..) -> T` match self.try_fn_to_unconstrained_fn_coercion(expected) { FunctionCoercionResult::NoCoercion => errors.push(make_error()), @@ -2089,6 +2105,25 @@ impl Type { false } + /// Attempt to coerce `&mut T` to `&T`, returning true if this is possible. + pub fn try_reference_coercion(&self, target: &Type) -> bool { + let this = self.follow_bindings(); + let target = target.follow_bindings(); + + if let (Type::Reference(this_elem, true), Type::Reference(target_elem, false)) = + (&this, &target) + { + // Still have to ensure the element types match. + // Don't need to issue an error here if not, it will be done in unify_with_coercions + let mut bindings = TypeBindings::new(); + if this_elem.try_unify(target_elem, &mut bindings).is_ok() { + Self::apply_type_bindings(bindings); + return true; + } + } + false + } + /// Apply the given type bindings, making them permanently visible for each /// clone of each type variable bound. pub fn apply_type_bindings(bindings: TypeBindings) { @@ -2144,8 +2179,8 @@ impl Type { kind.ensure_value_fits(x, location) } else { Err(TypeCheckError::TypeKindMismatch { - expected_kind: format!("{}", constant_kind), - expr_kind: format!("{}", kind), + expected_kind: constant_kind, + expr_kind: kind.clone(), expr_location: location, }) } @@ -2166,8 +2201,8 @@ impl Type { op.function(lhs_value, rhs_value, &infix_kind, location) } else { Err(TypeCheckError::TypeKindMismatch { - expected_kind: format!("{}", kind), - expr_kind: format!("{}", infix_kind), + expected_kind: kind.clone(), + expr_kind: infix_kind, expr_location: location, }) } @@ -2448,9 +2483,10 @@ impl Type { let env = Box::new(env.substitute_helper(type_bindings, substitute_bound_typevars)); Type::Function(args, ret, env, *unconstrained) } - Type::MutableReference(element) => Type::MutableReference(Box::new( - element.substitute_helper(type_bindings, substitute_bound_typevars), - )), + Type::Reference(element, mutable) => Type::Reference( + Box::new(element.substitute_helper(type_bindings, substitute_bound_typevars)), + *mutable, + ), Type::TraitAsType(s, name, generics) => { let ordered = vecmap(&generics.ordered, |arg| { @@ -2514,7 +2550,7 @@ impl Type { || ret.occurs(target_id) || env.occurs(target_id) } - Type::MutableReference(element) => element.occurs(target_id), + Type::Reference(element, _) => element.occurs(target_id), Type::InfixExpr(lhs, _op, rhs, _) => lhs.occurs(target_id) || rhs.occurs(target_id), Type::FieldElement @@ -2574,7 +2610,7 @@ impl Type { Function(args, ret, env, *unconstrained) } - MutableReference(element) => MutableReference(Box::new(element.follow_bindings())), + Reference(element, mutable) => Reference(Box::new(element.follow_bindings()), *mutable), TraitAsType(s, name, args) => { let ordered = vecmap(&args.ordered, |arg| arg.follow_bindings()); @@ -2699,7 +2735,7 @@ impl Type { ret.replace_named_generics_with_type_variables(); env.replace_named_generics_with_type_variables(); } - Type::MutableReference(elem) => elem.replace_named_generics_with_type_variables(), + Type::Reference(elem, _) => elem.replace_named_generics_with_type_variables(), Type::Forall(_, typ) => typ.replace_named_generics_with_type_variables(), Type::InfixExpr(lhs, _op, rhs, _) => { lhs.replace_named_generics_with_type_variables(); @@ -2743,7 +2779,7 @@ impl Type { TypeBinding::Bound(typ) => typ.integral_maximum_size(), TypeBinding::Unbound(_, kind) => kind.integral_maximum_size(), }, - Type::MutableReference(typ) => typ.integral_maximum_size(), + Type::Reference(typ, _) => typ.integral_maximum_size(), Type::InfixExpr(lhs, _op, rhs, _) => lhs.infix_kind(rhs).integral_maximum_size(), Type::Constant(_, kind) => kind.integral_maximum_size(), @@ -2761,6 +2797,36 @@ impl Type { | Type::Error => None, } } + + pub(crate) fn integral_minimum_size(&self) -> Option { + match self.follow_bindings_shallow().as_ref() { + Type::FieldElement => None, + Type::Integer(sign, num_bits) => { + if *sign == Signedness::Unsigned { + return Some(SignedField::zero()); + } + + let max_bit_size = num_bits.bit_size() - 1; + Some(if max_bit_size == 128 { + SignedField::negative(i128::MIN.abs_u128()) + } else { + SignedField::negative(1u128 << max_bit_size) + }) + } + Type::Bool => Some(SignedField::zero()), + Type::TypeVariable(var) => { + let binding = &var.1; + match &*binding.borrow() { + TypeBinding::Unbound(_, type_var_kind) => match type_var_kind { + Kind::Any | Kind::Normal | Kind::Integer | Kind::IntegerOrField => None, + Kind::Numeric(typ) => typ.integral_minimum_size(), + }, + TypeBinding::Bound(typ) => typ.integral_minimum_size(), + } + } + _ => None, + } + } } /// Wraps a given `expression` in `expression.as_slice()` @@ -2940,8 +3006,8 @@ impl From<&Type> for PrintableType { env: Box::new(env.as_ref().into()), unconstrained: *unconstrained, }, - Type::MutableReference(typ) => { - PrintableType::MutableReference { typ: Box::new(typ.as_ref().into()) } + Type::Reference(typ, mutable) => { + PrintableType::Reference { typ: Box::new(typ.as_ref().into()), mutable: *mutable } } Type::Quoted(_) => unreachable!(), Type::InfixExpr(..) => unreachable!(), @@ -3034,7 +3100,10 @@ impl std::fmt::Debug for Type { write!(f, "fn({}) -> {ret:?}{closure_env_text}", args.join(", ")) } - Type::MutableReference(element) => { + Type::Reference(element, false) => { + write!(f, "&{element:?}") + } + Type::Reference(element, true) => { write!(f, "&mut {element:?}") } Type::Quoted(quoted) => write!(f, "{}", quoted), @@ -3116,7 +3185,10 @@ impl std::hash::Hash for Type { env.hash(state); is_unconstrained.hash(state); } - Type::MutableReference(elem) => elem.hash(state), + Type::Reference(elem, mutable) => { + elem.hash(state); + mutable.hash(state); + } Type::Forall(vars, typ) => { vars.hash(state); typ.hash(state); @@ -3184,7 +3256,9 @@ impl PartialEq for Type { let args_and_ret_eq = lhs_args == rhs_args && lhs_ret == rhs_ret; args_and_ret_eq && lhs_env == rhs_env && lhs_unconstrained == rhs_unconstrained } - (MutableReference(lhs_elem), MutableReference(rhs_elem)) => lhs_elem == rhs_elem, + (Reference(lhs_elem, lhs_mut), Reference(rhs_elem, rhs_mut)) => { + lhs_elem == rhs_elem && lhs_mut == rhs_mut + } (Forall(lhs_vars, lhs_type), Forall(rhs_vars, rhs_type)) => { lhs_vars == rhs_vars && lhs_type == rhs_type } diff --git a/noir/noir-repo/compiler/noirc_frontend/src/lexer/lexer.rs b/noir/noir-repo/compiler/noirc_frontend/src/lexer/lexer.rs index 630f192c109..a6657d63866 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/lexer/lexer.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/lexer/lexer.rs @@ -106,6 +106,8 @@ impl<'a> Lexer<'a> { // and the next token issued will be the next '&'. let span = Span::inclusive(self.position, self.position + 1); Err(LexerErrorKind::LogicalAnd { location: self.location(span) }) + } else if self.peek_char_is('[') { + self.single_char_token(Token::SliceStart) } else { self.single_char_token(Token::Ampersand) } diff --git a/noir/noir-repo/compiler/noirc_frontend/src/lexer/token.rs b/noir/noir-repo/compiler/noirc_frontend/src/lexer/token.rs index 7367489f625..effdc827f2f 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/lexer/token.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/lexer/token.rs @@ -188,6 +188,10 @@ pub enum Token { Percent, /// & Ampersand, + /// & followed immediately by '[' + /// This is a lexer hack to distinguish slices + /// from taking a reference to an array + SliceStart, /// ^ Caret, /// << @@ -287,6 +291,7 @@ pub fn token_to_borrowed_token(token: &Token) -> BorrowedToken<'_> { Token::Slash => BorrowedToken::Slash, Token::Percent => BorrowedToken::Percent, Token::Ampersand => BorrowedToken::Ampersand, + Token::SliceStart => BorrowedToken::Ampersand, Token::Caret => BorrowedToken::Caret, Token::ShiftLeft => BorrowedToken::ShiftLeft, Token::ShiftRight => BorrowedToken::ShiftRight, @@ -522,6 +527,7 @@ impl fmt::Display for Token { Token::Slash => write!(f, "/"), Token::Percent => write!(f, "%"), Token::Ampersand => write!(f, "&"), + Token::SliceStart => write!(f, "&"), Token::Caret => write!(f, "^"), Token::ShiftLeft => write!(f, "<<"), Token::ShiftRight => write!(f, ">>"), diff --git a/noir/noir-repo/compiler/noirc_frontend/src/monomorphization/errors.rs b/noir/noir-repo/compiler/noirc_frontend/src/monomorphization/errors.rs index 32407f29cd0..93a12a46591 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/monomorphization/errors.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/monomorphization/errors.rs @@ -1,4 +1,4 @@ -use noirc_errors::{CustomDiagnostic, FileDiagnostic, Location}; +use noirc_errors::{CustomDiagnostic, Location}; use crate::{ Type, @@ -34,18 +34,9 @@ impl MonomorphizationError { } } -impl From for FileDiagnostic { - fn from(error: MonomorphizationError) -> FileDiagnostic { - let location = error.location(); - let call_stack = vec![location]; - let diagnostic = error.into_diagnostic(); - diagnostic.with_call_stack(call_stack).in_file(location.file) - } -} - -impl MonomorphizationError { - fn into_diagnostic(self) -> CustomDiagnostic { - let message = match &self { +impl From for CustomDiagnostic { + fn from(error: MonomorphizationError) -> CustomDiagnostic { + let message = match &error { MonomorphizationError::UnknownArrayLength { length, err, .. } => { format!("Could not determine array length `{length}`, encountered error: `{err}`") } @@ -78,7 +69,7 @@ impl MonomorphizationError { } }; - let location = self.location(); + let location = error.location(); CustomDiagnostic::simple_error(message, String::new(), location) } } diff --git a/noir/noir-repo/compiler/noirc_frontend/src/monomorphization/mod.rs b/noir/noir-repo/compiler/noirc_frontend/src/monomorphization/mod.rs index 62ed1ef2e68..ba4640fa104 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/monomorphization/mod.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/monomorphization/mod.rs @@ -1280,7 +1280,8 @@ impl<'interner> Monomorphizer<'interner> { } } - HirType::MutableReference(element) => { + // Lower both mutable & immutable references to the same reference type + HirType::Reference(element, _mutable) => { let element = Self::convert_type(element, location)?; ast::Type::MutableReference(Box::new(element)) } @@ -1386,7 +1387,7 @@ impl<'interner> Monomorphizer<'interner> { Self::check_type(env, location) } - HirType::MutableReference(element) => Self::check_type(element, location), + HirType::Reference(element, _mutable) => Self::check_type(element, location), HirType::InfixExpr(lhs, _, rhs, _) => { Self::check_type(lhs, location)?; Self::check_type(rhs, location) @@ -1601,8 +1602,8 @@ impl<'interner> Monomorphizer<'interner> { fn append_printable_type_info_inner(typ: &Type, arguments: &mut Vec) { // Disallow printing slices and mutable references for consistency, // since they cannot be passed from ACIR into Brillig - if matches!(typ, HirType::MutableReference(_)) { - unreachable!("println and format strings do not support mutable references."); + if matches!(typ, HirType::Reference(..)) { + unreachable!("println and format strings do not support references."); } let printable_type: PrintableType = typ.into(); @@ -1995,7 +1996,7 @@ impl<'interner> Monomorphizer<'interner> { ) -> Result { match match_expr { HirMatch::Success(id) => self.expr(id), - HirMatch::Failure => { + HirMatch::Failure { .. } => { let false_ = Box::new(ast::Expression::Literal(ast::Literal::Bool(false))); let msg = "match failure"; let msg_expr = ast::Expression::Literal(ast::Literal::Str(msg.to_string())); @@ -2102,13 +2103,13 @@ impl<'interner> Monomorphizer<'interner> { })) } ast::Type::MutableReference(element) => { - use crate::ast::UnaryOp::MutableReference; + use crate::ast::UnaryOp::Reference; let rhs = Box::new(self.zeroed_value_of_type(element, location)); let result_type = typ.clone(); ast::Expression::Unary(ast::Unary { rhs, result_type, - operator: MutableReference, + operator: Reference { mutable: true }, location, }) } diff --git a/noir/noir-repo/compiler/noirc_frontend/src/node_interner.rs b/noir/noir-repo/compiler/noirc_frontend/src/node_interner.rs index e8b46b9db69..12ef39f6b18 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/node_interner.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/node_interner.rs @@ -725,6 +725,15 @@ impl NodeInterner { ExprId(self.nodes.insert(Node::Expression(expr))) } + /// Intern an expression with everything needed for it (location & Type) + /// instead of requiring they be pushed later. + pub fn push_expr_full(&mut self, expr: HirExpression, location: Location, typ: Type) -> ExprId { + let id = self.push_expr(expr); + self.push_expr_location(id, location); + self.push_expr_type(id, typ); + id + } + /// Stores the span for an interned expression. pub fn push_expr_location(&mut self, expr_id: ExprId, location: Location) { self.id_to_location.insert(expr_id.into(), location); @@ -756,7 +765,7 @@ impl NodeInterner { id: type_id, name: unresolved_trait.trait_def.name.clone(), crate_id: unresolved_trait.crate_id, - location: unresolved_trait.trait_def.location, + location: unresolved_trait.trait_def.name.location(), generics, visibility: ItemVisibility::Private, self_type_typevar: TypeVariable::unbound(self.next_type_variable_id(), Kind::Normal), @@ -1280,7 +1289,11 @@ impl NodeInterner { /// Returns the type of an item stored in the Interner or Error if it was not found. pub fn id_type(&self, index: impl Into) -> Type { - self.id_to_type.get(&index.into()).cloned().unwrap_or(Type::Error) + self.try_id_type(index).cloned().unwrap_or(Type::Error) + } + + pub fn try_id_type(&self, index: impl Into) -> Option<&Type> { + self.id_to_type.get(&index.into()) } /// Returns the type of the definition or `Type::Error` if it was not found. @@ -1398,7 +1411,7 @@ impl NodeInterner { ) -> Option { match self_type { Type::Error => None, - Type::MutableReference(element) => { + Type::Reference(element, _mutable) => { self.add_method(element, method_name, method_id, trait_id) } _ => { @@ -2402,8 +2415,8 @@ impl Methods { return true; } - // Handle auto-dereferencing `&mut T` into `T` - if let Type::MutableReference(object) = object { + // Handle auto-dereferencing `&T` and `&mut T` into `T` + if let Type::Reference(object, _mutable) = object { if object.unify(typ).is_ok() { return true; } @@ -2417,8 +2430,8 @@ impl Methods { return true; } - // Handle auto-dereferencing `&mut T` into `T` - if let Type::MutableReference(method_type) = method_type { + // Handle auto-dereferencing `&T` and `&mut T` into `T` + if let Type::Reference(method_type, _mutable) = method_type { if method_type.unify(typ).is_ok() { return true; } @@ -2475,7 +2488,7 @@ fn get_type_method_key(typ: &Type) -> Option { Type::Function(_, _, _, _) => Some(Function), Type::NamedGeneric(_, _) => Some(Generic), Type::Quoted(quoted) => Some(Quoted(*quoted)), - Type::MutableReference(element) => get_type_method_key(element), + Type::Reference(element, _) => get_type_method_key(element), Type::Alias(alias, _) => get_type_method_key(&alias.borrow().typ), Type::DataType(struct_type, _) => Some(Struct(struct_type.borrow().id)), diff --git a/noir/noir-repo/compiler/noirc_frontend/src/parser/parser.rs b/noir/noir-repo/compiler/noirc_frontend/src/parser/parser.rs index f0f707a2849..b797a15f0c1 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/parser/parser.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/parser/parser.rs @@ -208,6 +208,9 @@ impl<'a> Parser<'a> { match self.tokens.next() { Some(Ok(token)) => match token.token() { Token::LineComment(comment, None) | Token::BlockComment(comment, None) => { + if !last_comments.is_empty() { + last_comments.push('\n'); + } last_comments.push_str(comment); continue; } @@ -554,6 +557,7 @@ impl<'a> Parser<'a> { ); } + #[allow(unused)] fn expected_mut_after_ampersand(&mut self) { self.push_error( ParserErrorReason::ExpectedMutAfterAmpersand { found: self.token.token().clone() }, diff --git a/noir/noir-repo/compiler/noirc_frontend/src/parser/parser/expression.rs b/noir/noir-repo/compiler/noirc_frontend/src/parser/parser/expression.rs index 5c056c52cb4..672d9428d9a 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/parser/parser/expression.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/parser/parser/expression.rs @@ -79,10 +79,14 @@ impl Parser<'_> { /// UnaryOp = '&' 'mut' | '-' | '!' | '*' fn parse_unary_op(&mut self) -> Option { - if self.at(Token::Ampersand) && self.next_is(Token::Keyword(Keyword::Mut)) { - self.bump(); + if self.at(Token::Ampersand) { + let mut mutable = false; + if self.next_is(Token::Keyword(Keyword::Mut)) { + mutable = true; + self.bump(); + } self.bump(); - Some(UnaryOp::MutableReference) + Some(UnaryOp::Reference { mutable }) } else if self.eat(Token::Minus) { Some(UnaryOp::Minus) } else if self.eat(Token::Bang) { @@ -388,20 +392,23 @@ impl Parser<'_> { /// UnsafeExpression = 'unsafe' Block fn parse_unsafe_expr(&mut self) -> Option { let start_location = self.current_token_location; + let comments_before_unsafe = self.current_token_comments.clone(); if !self.eat_keyword(Keyword::Unsafe) { return None; } - if self.current_token_comments.is_empty() { - if let Some(statement_comments) = &mut self.statement_comments { - if !statement_comments.trim().to_lowercase().starts_with("safety:") { - self.push_error(ParserErrorReason::MissingSafetyComment, start_location); - } + let comments: &str = if comments_before_unsafe.is_empty() { + if let Some(statement_comments) = &self.statement_comments { + statement_comments } else { - self.push_error(ParserErrorReason::MissingSafetyComment, start_location); + "" } - } else if !self.current_token_comments.trim().to_lowercase().starts_with("safety:") { + } else { + &comments_before_unsafe + }; + + if !comments.lines().any(|line| line.trim().to_lowercase().starts_with("safety:")) { self.push_error(ParserErrorReason::MissingSafetyComment, start_location); } @@ -735,7 +742,7 @@ impl Parser<'_> { /// SliceExpression = '&' ArrayLiteral fn parse_slice_literal(&mut self) -> Option { - if !(self.at(Token::Ampersand) && self.next_is(Token::LeftBracket)) { + if !(self.at(Token::SliceStart) && self.next_is(Token::LeftBracket)) { return None; } @@ -1079,7 +1086,9 @@ mod tests { let src = " // Safety: test unsafe { 1 }"; - let expr = parse_expression_no_errors(src); + let mut parser = Parser::for_str_with_dummy_file(src); + let expr = parser.parse_expression_or_error(); + assert!(parser.errors.is_empty()); let ExpressionKind::Unsafe(unsafe_expression) = expr.kind else { panic!("Expected unsafe expression"); }; @@ -1252,7 +1261,7 @@ mod tests { let ExpressionKind::Prefix(prefix) = expr.kind else { panic!("Expected prefix expression"); }; - assert!(matches!(prefix.operator, UnaryOp::MutableReference)); + assert!(matches!(prefix.operator, UnaryOp::Reference { mutable: true })); let ExpressionKind::Variable(path) = prefix.rhs.kind else { panic!("Expected variable"); diff --git a/noir/noir-repo/compiler/noirc_frontend/src/parser/parser/function.rs b/noir/noir-repo/compiler/noirc_frontend/src/parser/parser/function.rs index 0f152e64e9f..caf2cdeb1c3 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/parser/parser/function.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/parser/parser/function.rs @@ -109,10 +109,24 @@ impl Parser<'_> { let visibility = self.parse_visibility(); (FunctionReturnType::Ty(self.parse_type_or_error()), visibility) } else { - ( - FunctionReturnType::Default(self.location_at_previous_token_end()), - Visibility::Private, - ) + // This will return the span between `)` and `{` + // + // fn foo() { } + // ^^^ + let mut location = self.previous_token_location.merge(self.current_token_location); + + // Here we change it to this (if there's space) + // + // fn foo() { } + // ^ + if location.span.end() - location.span.start() >= 3 { + location = Location::new( + Span::from(location.span.start() + 1..location.span.end() - 1), + location.file, + ); + } + + (FunctionReturnType::Default(location), Visibility::Private) }; let where_clause = self.parse_where_clause(); @@ -215,7 +229,7 @@ impl Parser<'_> { let mut pattern = Pattern::Identifier(ident); if self_pattern.reference { - self_type = UnresolvedTypeData::MutableReference(Box::new(self_type)) + self_type = UnresolvedTypeData::Reference(Box::new(self_type), self_pattern.mutable) .with_location(ident_location); } else if self_pattern.mutable { pattern = Pattern::Mutable(Box::new(pattern), ident_location, true); diff --git a/noir/noir-repo/compiler/noirc_frontend/src/parser/parser/pattern.rs b/noir/noir-repo/compiler/noirc_frontend/src/parser/parser/pattern.rs index 61fb1572c17..e1468845ded 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/parser/parser/pattern.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/parser/parser/pattern.rs @@ -66,14 +66,12 @@ impl Parser<'_> { } } - if self.at(Token::Ampersand) && self.next_is(Token::Keyword(Keyword::Mut)) { - self.bump(); + if self.at(Token::Ampersand) { self.bump(); + + let mutable = self.eat_keyword(Keyword::Mut); if !self.next_is_colon() && self.eat_self() { - return Some(PatternOrSelf::SelfPattern(SelfPattern { - reference: true, - mutable: true, - })); + return Some(PatternOrSelf::SelfPattern(SelfPattern { reference: true, mutable })); } else { self.push_error( ParserErrorReason::RefMutCanOnlyBeUsedWithSelf, diff --git a/noir/noir-repo/compiler/noirc_frontend/src/parser/parser/statement.rs b/noir/noir-repo/compiler/noirc_frontend/src/parser/parser/statement.rs index 833a2e480c4..600ddec43c9 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/parser/parser/statement.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/parser/parser/statement.rs @@ -521,7 +521,9 @@ mod tests { fn parses_let_statement_with_unsafe() { let src = "// Safety: comment let x = unsafe { 1 };"; - let statement = parse_statement_no_errors(src); + let mut parser = Parser::for_str_with_dummy_file(src); + let statement = parser.parse_statement_or_error(); + assert!(parser.errors.is_empty()); let StatementKind::Let(let_statement) = statement.kind else { panic!("Expected let statement"); }; @@ -540,6 +542,20 @@ mod tests { assert_eq!(let_statement.pattern.to_string(), "x"); } + #[test] + fn parses_let_statement_with_unsafe_after_some_other_comment() { + let src = "// Top comment + // Safety: comment + let x = unsafe { 1 };"; + let mut parser = Parser::for_str_with_dummy_file(src); + let statement = parser.parse_statement_or_error(); + assert!(parser.errors.is_empty()); + let StatementKind::Let(let_statement) = statement.kind else { + panic!("Expected let statement"); + }; + assert_eq!(let_statement.pattern.to_string(), "x"); + } + #[test] fn parses_comptime_block() { let src = "comptime { 1 }"; diff --git a/noir/noir-repo/compiler/noirc_frontend/src/parser/parser/types.rs b/noir/noir-repo/compiler/noirc_frontend/src/parser/parser/types.rs index bcbf57d863d..1ccb68e2677 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/parser/parser/types.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/parser/parser/types.rs @@ -370,15 +370,15 @@ impl Parser<'_> { } fn parses_mutable_reference_type(&mut self) -> Option { - if self.eat(Token::Ampersand) { - if !self.eat_keyword(Keyword::Mut) { - self.expected_mut_after_ampersand(); - } + // The `&` may be lexed as a slice start if this is an array or slice type + if self.eat(Token::Ampersand) || self.eat(Token::SliceStart) { + let mutable = self.eat_keyword(Keyword::Mut); - return Some(UnresolvedTypeData::MutableReference(Box::new( - self.parse_type_or_error(), - ))); - }; + return Some(UnresolvedTypeData::Reference( + Box::new(self.parse_type_or_error()), + mutable, + )); + } None } @@ -604,11 +604,21 @@ mod tests { assert!(matches!(typ.typ, UnresolvedTypeData::FieldElement)); } + #[test] + fn parses_reference_type() { + let src = "&Field"; + let typ = parse_type_no_errors(src); + let UnresolvedTypeData::Reference(typ, false) = typ.typ else { + panic!("Expected a reference type") + }; + assert!(matches!(typ.typ, UnresolvedTypeData::FieldElement)); + } + #[test] fn parses_mutable_reference_type() { let src = "&mut Field"; let typ = parse_type_no_errors(src); - let UnresolvedTypeData::MutableReference(typ) = typ.typ else { + let UnresolvedTypeData::Reference(typ, true) = typ.typ else { panic!("Expected a mutable reference type") }; assert!(matches!(typ.typ, UnresolvedTypeData::FieldElement)); diff --git a/noir/noir-repo/compiler/noirc_frontend/src/resolve_locations.rs b/noir/noir-repo/compiler/noirc_frontend/src/resolve_locations.rs index 4daf088a2f1..652ce4aa557 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/resolve_locations.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/resolve_locations.rs @@ -32,9 +32,24 @@ impl NodeInterner { } /// Returns the Type of the expression that exists at the given location. - pub fn type_at_location(&self, location: Location) -> Option { - let index = self.find_location_index(location)?; - Some(self.id_type(index)) + pub fn type_at_location(&self, location: Location) -> Option<&Type> { + // This is similar to `find_location_index` except that we skip indexes for which there is no type + let mut location_candidate: Option<(&Index, &Location, &Type)> = None; + + for (index, interned_location) in self.id_to_location.iter() { + if interned_location.contains(&location) { + if let Some(typ) = self.try_id_type(*index) { + if let Some(current_location) = location_candidate { + if interned_location.span.is_smaller(¤t_location.1.span) { + location_candidate = Some((index, interned_location, typ)); + } + } else { + location_candidate = Some((index, interned_location, typ)); + } + } + } + } + location_candidate.map(|(_index, _location, typ)| typ) } /// Returns the [Location] of the definition of the given Ident found at [Span] of the given [FileId]. diff --git a/noir/noir-repo/compiler/noirc_frontend/src/signed_field.rs b/noir/noir-repo/compiler/noirc_frontend/src/signed_field.rs index dcddd52daa8..925b128ea24 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/signed_field.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/signed_field.rs @@ -19,6 +19,14 @@ impl SignedField { Self { field: field.into(), is_negative: true } } + pub fn zero() -> SignedField { + Self { field: FieldElement::zero(), is_negative: false } + } + + pub fn one() -> SignedField { + Self { field: FieldElement::one(), is_negative: false } + } + /// Convert a signed integer to a SignedField, carefully handling /// INT_MIN in the process. Note that to convert an unsigned integer /// you can call `SignedField::positive`. @@ -125,6 +133,30 @@ impl std::fmt::Display for SignedField { } } +impl rangemap::StepLite for SignedField { + fn add_one(&self) -> Self { + if self.is_negative { + if self.field.is_one() { + Self::new(FieldElement::zero(), false) + } else { + Self::new(self.field - FieldElement::one(), self.is_negative) + } + } else { + Self::new(self.field + FieldElement::one(), self.is_negative) + } + } + + fn sub_one(&self) -> Self { + if self.is_negative { + Self::new(self.field + FieldElement::one(), self.is_negative) + } else if self.field.is_zero() { + Self::new(FieldElement::one(), true) + } else { + Self::new(self.field - FieldElement::one(), self.is_negative) + } + } +} + pub trait AbsU128 { /// Necessary to handle casting to unsigned generically without overflowing on INT_MIN. fn abs_u128(self) -> u128; diff --git a/noir/noir-repo/compiler/noirc_frontend/src/tests.rs b/noir/noir-repo/compiler/noirc_frontend/src/tests.rs index 7bbe1f60873..62da755887b 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/tests.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/tests.rs @@ -362,12 +362,12 @@ fn check_trait_implementation_duplicate_method() { impl Default for Foo { // Duplicate trait methods should not compile fn default(x: Field, y: Field) -> Field { - ^^^^^^^ Duplicate definitions of trait associated function with name default found ~~~~~~~ First trait associated function found here y + 2 * x } // Duplicate trait methods should not compile fn default(x: Field, y: Field) -> Field { + ^^^^^^^ Duplicate definitions of trait associated function with name default found ~~~~~~~ Second trait associated function found here x + 2 * y } @@ -381,7 +381,6 @@ fn check_trait_implementation_duplicate_method() { #[test] fn check_trait_wrong_method_return_type() { - // TODO: improve the error location let src = " trait Default { fn default() -> Self; @@ -392,7 +391,7 @@ fn check_trait_wrong_method_return_type() { impl Default for Foo { fn default() -> Field { - ^^^^^^^ Expected type Foo, found type Field + ^^^^^ Expected type Foo, found type Field 0 } } @@ -406,7 +405,6 @@ fn check_trait_wrong_method_return_type() { #[test] fn check_trait_wrong_method_return_type2() { - // TODO: improve the error location let src = " trait Default { fn default(x: Field, y: Field) -> Self; @@ -419,7 +417,7 @@ fn check_trait_wrong_method_return_type2() { impl Default for Foo { fn default(x: Field, _y: Field) -> Field { - ^^^^^^^ Expected type Foo, found type Field + ^^^^^ Expected type Foo, found type Field x } } @@ -430,6 +428,31 @@ fn check_trait_wrong_method_return_type2() { check_errors(src); } +#[test] +fn check_trait_wrong_method_return_type3() { + let src = " + trait Default { + fn default(x: Field, y: Field) -> Self; + } + + struct Foo { + bar: Field, + array: [Field; 2], + } + + impl Default for Foo { + fn default(_x: Field, _y: Field) { + ^ Expected type Foo, found type () + } + } + + fn main() { + let _ = Foo { bar: 1, array: [2, 3] }; // silence Foo never constructed warning + } + "; + check_errors(src); +} + #[test] fn check_trait_missing_implementation() { let src = " @@ -506,7 +529,6 @@ fn check_trait_wrong_method_name() { #[test] fn check_trait_wrong_parameter() { - // TODO: improve the error location let src = " trait Default { fn default(x: Field) -> Self; @@ -518,7 +540,7 @@ fn check_trait_wrong_parameter() { impl Default for Foo { fn default(x: u32) -> Self { - ^ Parameter #1 of method `default` must be of type Field, not u32 + ^^^ Parameter #1 of method `default` must be of type Field, not u32 Foo {bar: x} } } @@ -543,7 +565,7 @@ fn check_trait_wrong_parameter2() { impl Default for Foo { fn default(x: Field, y: Foo) -> Self { - ^ Parameter #2 of method `default` must be of type Field, not Foo + ^^^ Parameter #2 of method `default` must be of type Field, not Foo Self { bar: x, array: [x, y.bar] } } } @@ -644,7 +666,6 @@ fn check_impl_struct_not_trait() { fn check_trait_duplicate_declaration() { let src = " trait Default { - ^^^^^^^ Duplicate definitions of trait definition with name Default found ~~~~~~~ First trait definition found here fn default(x: Field, y: Field) -> Self; } @@ -661,6 +682,7 @@ fn check_trait_duplicate_declaration() { } trait Default { + ^^^^^^^ Duplicate definitions of trait definition with name Default found ~~~~~~~ Second trait definition found here fn default(x: Field) -> Self; } @@ -1499,15 +1521,15 @@ fn numeric_generic_binary_operation_type_mismatch() { #[test] fn bool_generic_as_loop_bound() { - // TODO: improve the error location of the last error (should be just on N) let src = r#" pub fn read() { ^ N has a type of bool. The only supported numeric generic types are `u1`, `u8`, `u16`, and `u32`. ~ Unsupported numeric generic type let mut fields = [0; N]; - ^ Expected kind numeric u32, found kind numeric bool + ^ The numeric generic is not of type `u32` + ~ expected `u32`, found `bool` for i in 0..N { - ^^^^ Expected type Field, found type bool + ^ Expected type Field, found type bool fields[i] = i + 1; } assert(fields[0] == 1); @@ -1516,6 +1538,19 @@ fn bool_generic_as_loop_bound() { check_errors(src); } +#[test] +fn wrong_type_in_for_range() { + let src = r#" + pub fn foo() { + for _ in true..false { + ^^^^ The type bool cannot be used in a for loop + + } + } + "#; + check_errors(src); +} + #[test] fn numeric_generic_in_function_signature() { let src = r#" @@ -1526,13 +1561,12 @@ fn numeric_generic_in_function_signature() { #[test] fn numeric_generic_as_struct_field_type_fails() { - // TODO: improve error message, in Rust it says "expected type, found const parameter `N`" - // which might be more understandable let src = r#" pub struct Foo { a: Field, b: N, - ^ Expected kind normal, found kind numeric u32 + ^ Expected type, found numeric generic + ~ not a type } "#; check_errors(src); @@ -1545,7 +1579,8 @@ fn normal_generic_as_array_length() { pub struct Foo { a: Field, b: [Field; N], - ^^^^^^^^^^ Expected kind numeric u32, found kind normal + ^^^^^^^^^^ Type provided when a numeric generic was expected + ~~~~~~~~~~ the numeric generic is not of type `u32` } "#; check_errors(src); @@ -1553,13 +1588,17 @@ fn normal_generic_as_array_length() { #[test] fn numeric_generic_as_param_type() { - // TODO: improve the error message, see what Rust does let src = r#" pub fn foo(x: I) -> I { - ^ Expected kind normal, found kind numeric u32 - ^ Expected kind normal, found kind numeric u32 + ^ Expected type, found numeric generic + ~ not a type + ^ Expected type, found numeric generic + ~ not a type + + let _q: I = 5; - ^ Expected kind normal, found kind numeric u32 + ^ Expected type, found numeric generic + ~ not a type x } "#; @@ -1568,23 +1607,23 @@ fn numeric_generic_as_param_type() { #[test] fn numeric_generic_as_unused_param_type() { - // TODO: improve the error message let src = r#" pub fn foo(_x: I) { } - ^ Expected kind normal, found kind numeric u32 + ^ Expected type, found numeric generic + ~ not a type "#; check_errors(src); } #[test] fn numeric_generic_as_unused_trait_fn_param_type() { - // TODO: improve the error message let src = r#" trait Foo { ^^^ unused trait Foo ~~~ unused trait fn foo(_x: I) { } - ^ Expected kind normal, found kind numeric u32 + ^ Expected type, found numeric generic + ~ not a type } "#; check_errors(src); @@ -1592,7 +1631,6 @@ fn numeric_generic_as_unused_trait_fn_param_type() { #[test] fn numeric_generic_as_return_type() { - // TODO: improve the error message let src = r#" // std::mem::zeroed() without stdlib trait Zeroed { @@ -1600,7 +1638,8 @@ fn numeric_generic_as_return_type() { } fn foo(x: T) -> I where T: Zeroed { - ^ Expected kind normal, found kind numeric Field + ^ Expected type, found numeric generic + ~ not a type ^^^ unused function foo ~~~ unused function x.zeroed() @@ -1613,7 +1652,6 @@ fn numeric_generic_as_return_type() { #[test] fn numeric_generic_used_in_nested_type_fails() { - // TODO: improve the error message let src = r#" pub struct Foo { a: Field, @@ -1621,7 +1659,8 @@ fn numeric_generic_used_in_nested_type_fails() { } pub struct Bar { inner: N - ^ Expected kind normal, found kind numeric u32 + ^ Expected type, found numeric generic + ~ not a type } "#; check_errors(src); @@ -1629,12 +1668,12 @@ fn numeric_generic_used_in_nested_type_fails() { #[test] fn normal_generic_used_in_nested_array_length_fail() { - // TODO: improve the error message let src = r#" pub struct Foo { a: Field, b: Bar, - ^ Expected kind numeric u32, found kind normal + ^ Type provided when a numeric generic was expected + ~ the numeric generic is not of type `u32` } pub struct Bar { inner: [Field; N] @@ -1755,7 +1794,7 @@ fn numeric_generic_used_in_turbofish() { // allow u16 to be used as an array size #[test] fn numeric_generic_u16_array_size() { - // TODO: improve the error location (and maybe the message) + // TODO: improve the error location let src = r#" fn len(_arr: [Field; N]) -> u32 { N @@ -1763,8 +1802,10 @@ fn numeric_generic_u16_array_size() { pub fn foo() -> u32 { let fields: [Field; N] = [0; N]; - ^^^^^^^^^^ Expected kind numeric u32, found kind numeric u16 - ^ Expected kind numeric u32, found kind numeric u16 + ^ The numeric generic is not of type `u32` + ~ expected `u32`, found `u16` + ^^^^^^^^^^ The numeric generic is not of type `u32` + ~~~~~~~~~~ expected `u32`, found `u16` len(fields) } "#; @@ -1864,7 +1905,8 @@ fn normal_generic_used_when_numeric_expected_in_where_clause() { } pub fn read() -> T where T: Deserialize { - ^ Expected kind numeric u32, found kind normal + ^ Type provided when a numeric generic was expected + ~ the numeric generic is not of type `u32` T::deserialize([0, 1]) } "#; @@ -1877,10 +1919,13 @@ fn normal_generic_used_when_numeric_expected_in_where_clause() { } pub fn read() -> T where T: Deserialize { - ^ Expected kind numeric u32, found kind normal + ^ Type provided when a numeric generic was expected + ~ the numeric generic is not of type `u32` let mut fields: [Field; N] = [0; N]; - ^ Expected kind numeric u32, found kind normal - ^^^^^^^^^^ Expected kind numeric u32, found kind normal + ^ Type provided when a numeric generic was expected + ~ the numeric generic is not of type `u32` + ^^^^^^^^^^ Type provided when a numeric generic was expected + ~~~~~~~~~~ the numeric generic is not of type `u32` for i in 0..N { ^ cannot find `N` in this scope ~ not found in this scope @@ -1903,7 +1948,8 @@ fn numeric_generics_type_kind_mismatch() { fn bar() -> u16 { foo::() - ^ Expected kind numeric u32, found kind numeric u16 + ^ The numeric generic is not of type `u32` + ~ expected `u32`, found `u16` } global M: u16 = 3; @@ -2421,13 +2467,12 @@ fn bit_not_on_untyped_integer() { #[test] fn duplicate_struct_field() { - // TODO: the primary error location should be on the second field let src = r#" pub struct Foo { x: i32, - ^ Duplicate definitions of struct field with name x found ~ First struct field found here x: i32, + ^ Duplicate definitions of struct field with name x found ~ Second struct field found here } @@ -2546,7 +2591,7 @@ fn uses_self_type_in_trait_where_clause() { } pub trait Foo where Self: Trait { - ~~~~~ required by this bound in `Foo + ~~~~~ required by this bound in `Foo` fn foo(self) -> bool { self.trait_func() ^^^^^^^^^^^^^^^^^ No method named 'trait_func' found for type 'Bar' @@ -2557,7 +2602,7 @@ fn uses_self_type_in_trait_where_clause() { impl Foo for Bar { ^^^ The trait bound `_: Trait` is not satisfied - ~~~ The trait `Trait` is not implemented for `_ + ~~~ The trait `Trait` is not implemented for `_` } @@ -2735,25 +2780,27 @@ fn as_trait_path_syntax_no_impl() { #[test] fn do_not_infer_globals_to_u32_from_type_use() { - // TODO: improve the error location (maybe it should be on the global name) let src = r#" global ARRAY_LEN = 3; - ^ Globals must have a specified type + ^^^^^^^^^ Globals must have a specified type ~ Inferred type is `Field` global STR_LEN: _ = 2; - ^ Globals must have a specified type + ^^^^^^^ Globals must have a specified type ~ Inferred type is `Field` global FMT_STR_LEN = 2; - ^ Globals must have a specified type + ^^^^^^^^^^^ Globals must have a specified type ~ Inferred type is `Field` fn main() { let _a: [u32; ARRAY_LEN] = [1, 2, 3]; - ^^^^^^^^^^^^^^^^ Expected kind numeric u32, found kind numeric Field + ^^^^^^^^^^^^^^^^ The numeric generic is not of type `u32` + ~~~~~~~~~~~~~~~~ expected `u32`, found `Field` let _b: str = "hi"; - ^^^^^^^^^^^^ Expected kind numeric u32, found kind numeric Field + ^^^^^^^^^^^^ The numeric generic is not of type `u32` + ~~~~~~~~~~~~ expected `u32`, found `Field` let _c: fmtstr = f"hi"; - ^^^^^^^^^^^^^^^^^^^^^^ Expected kind numeric u32, found kind numeric Field + ^^^^^^^^^^^^^^^^^^^^^^ The numeric generic is not of type `u32` + ~~~~~~~~~~~~~~~~~~~~~~ expected `u32`, found `Field` } "#; check_errors(src); @@ -2763,25 +2810,25 @@ fn do_not_infer_globals_to_u32_from_type_use() { fn do_not_infer_partial_global_types() { let src = r#" pub global ARRAY: [Field; _] = [0; 3]; - ^^^^^^ Globals must have a specified type + ^^^^^ Globals must have a specified type ~~~~~~ Inferred type is `[Field; 3]` pub global NESTED_ARRAY: [[Field; _]; 3] = [[]; 3]; - ^^^^^^^ Globals must have a specified type + ^^^^^^^^^^^^ Globals must have a specified type ~~~~~~~ Inferred type is `[[Field; 0]; 3]` pub global STR: str<_> = "hi"; - ^^^^ Globals must have a specified type + ^^^ Globals must have a specified type ~~~~ Inferred type is `str<2>` pub global NESTED_STR: [str<_>] = &["hi"]; - ^^^^^^^ Globals must have a specified type + ^^^^^^^^^^ Globals must have a specified type ~~~~~~~ Inferred type is `[str<2>]` pub global FORMATTED_VALUE: str<5> = "there"; pub global FMT_STR: fmtstr<_, _> = f"hi {FORMATTED_VALUE}"; - ^^^^^^^^^^^^^^^^^^^^^^^ Globals must have a specified type + ^^^^^^^ Globals must have a specified type ~~~~~~~~~~~~~~~~~~~~~~~ Inferred type is `fmtstr<20, (str<5>)>` pub global TUPLE_WITH_MULTIPLE: ([str<_>], [[Field; _]; 3]) = + ^^^^^^^^^^^^^^^^^^^ Globals must have a specified type (&["hi"], [[]; 3]); - ^^^^^^^^^^^^^^^^^^ Globals must have a specified type ~~~~~~~~~~~~~~~~~~ Inferred type is `([str<2>], [[Field; 0]; 3])` fn main() { } @@ -2839,7 +2886,8 @@ fn non_u32_as_array_length() { fn main() { let _a: [u32; ARRAY_LEN] = [1, 2, 3]; - ^^^^^^^^^^^^^^^^ Expected kind numeric u32, found kind numeric u8 + ^^^^^^^^^^^^^^^^ The numeric generic is not of type `u32` + ~~~~~~~~~~~~~~~~ expected `u32`, found `u8` } "#; check_errors(src); @@ -3850,15 +3898,14 @@ fn errors_if_while_body_type_is_not_unit() { #[test] fn check_impl_duplicate_method_without_self() { - // TODO: the primary error location should be n the second `foo` let src = " pub struct Foo {} impl Foo { fn foo() {} - ^^^ duplicate definitions of foo found ~~~ first definition found here fn foo() {} + ^^^ duplicate definitions of foo found ~~~ second definition found here } @@ -3896,3 +3943,206 @@ fn subtract_to_int_min() { let errors = get_program_errors(src); assert_eq!(errors.len(), 0); } + +#[test] +fn mutate_with_reference_in_lambda() { + let src = r#" + fn main() { + let x = &mut 3; + let f = || { + *x += 2; + }; + f(); + assert(*x == 5); + } + "#; + + assert_no_errors(src); +} + +#[test] +fn mutate_with_reference_marked_mutable_in_lambda() { + let src = r#" + fn main() { + let mut x = &mut 3; + let f = || { + *x += 2; + }; + f(); + assert(*x == 5); + } + "#; + assert_no_errors(src); +} + +#[test] +fn deny_capturing_mut_variable_without_reference_in_lambda() { + let src = r#" + fn main() { + let mut x = 3; + let f = || { + x += 2; + ^ Mutable variable x captured in lambda must be a mutable reference + ~ Use '&mut' instead of 'mut' to capture a mutable variable. + }; + f(); + assert(x == 5); + } + "#; + check_errors(src); +} + +#[test] +fn deny_capturing_mut_variable_without_reference_in_nested_lambda() { + let src = r#" + fn main() { + let mut x = 3; + let f = || { + let inner = || { + x += 2; + ^ Mutable variable x captured in lambda must be a mutable reference + ~ Use '&mut' instead of 'mut' to capture a mutable variable. + }; + inner(); + }; + f(); + assert(x == 5); + } + "#; + check_errors(src); +} + +#[test] +fn allow_capturing_mut_variable_only_used_immutably() { + let src = r#" + fn main() { + let mut x = 3; + let f = || x; + let _x2 = f(); + assert(x == 3); + } + "#; + assert_no_errors(src); +} + +#[test] +fn deny_capturing_mut_var_as_param_to_function() { + let src = r#" + fn main() { + let mut x = 3; + let f = || mutate(&mut x); + ^ Mutable variable x captured in lambda must be a mutable reference + ~ Use '&mut' instead of 'mut' to capture a mutable variable. + f(); + assert(x == 3); + } + + fn mutate(x: &mut Field) { + *x = 5; + } + "#; + check_errors(src); +} + +#[test] +fn deny_capturing_mut_var_as_param_to_function_in_nested_lambda() { + let src = r#" + fn main() { + let mut x = 3; + let f = || { + let inner = || mutate(&mut x); + ^ Mutable variable x captured in lambda must be a mutable reference + ~ Use '&mut' instead of 'mut' to capture a mutable variable. + inner(); + }; + f(); + assert(x == 3); + } + + fn mutate(x: &mut Field) { + *x = 5; + } + "#; + check_errors(src); +} + +#[test] +fn deny_capturing_mut_var_as_param_to_impl_method() { + let src = r#" + struct Foo { + value: Field, + } + + impl Foo { + fn mutate(&mut self) { + self.value = 2; + } + } + + fn main() { + let mut foo = Foo { value: 1 }; + let f = || foo.mutate(); + ^^^ Mutable variable foo captured in lambda must be a mutable reference + ~~~ Use '&mut' instead of 'mut' to capture a mutable variable. + f(); + assert(foo.value == 2); + } + "#; + check_errors(src); +} + +#[test] +fn deny_attaching_mut_ref_to_immutable_object() { + let src = r#" + struct Foo { + value: Field, + } + + impl Foo { + fn mutate(&mut self) { + self.value = 2; + } + } + + fn main() { + let foo = Foo { value: 1 }; + let f = || foo.mutate(); + ^^^ Cannot mutate immutable variable `foo` + f(); + assert(foo.value == 2); + } + "#; + check_errors(src); +} + +#[test] +fn immutable_references_with_ownership_feature() { + let src = r#" + unconstrained fn main() { + let mut array = [1, 2, 3]; + borrow(&array); + } + + fn borrow(_array: &[Field; 3]) {} + "#; + + let (_, _, errors) = get_program_using_features(src, &[UnstableFeature::Ownership]); + assert_eq!(errors.len(), 0); +} + +#[test] +fn immutable_references_without_ownership_feature() { + let src = r#" + fn main() { + let mut array = [1, 2, 3]; + borrow(&array); + ^^^^^^ This requires the unstable feature 'ownership' which is not enabled + ~~~~~~ Pass -Zownership to nargo to enable this feature at your own risk. + } + + fn borrow(_array: &[Field; 3]) {} + ^^^^^^^^^^^ This requires the unstable feature 'ownership' which is not enabled + ~~~~~~~~~~~ Pass -Zownership to nargo to enable this feature at your own risk. + "#; + check_errors(src); +} diff --git a/noir/noir-repo/compiler/noirc_frontend/src/tests/enums.rs b/noir/noir-repo/compiler/noirc_frontend/src/tests/enums.rs index 78f0442bc9f..d9da717dc56 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/tests/enums.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/tests/enums.rs @@ -8,13 +8,12 @@ use super::{check_errors, check_errors_using_features}; #[test] fn error_with_duplicate_enum_variant() { - // TODO: the primary error should be on the second `Bar` let src = r#" pub enum Foo { Bar(i32), - ^^^ Duplicate definitions of enum variant with name Bar found ~~~ First enum variant found here Bar(u8), + ^^^ Duplicate definitions of enum variant with name Bar found ~~~ Second enum variant found here } @@ -200,3 +199,170 @@ fn constructor_arg_arity_mismatch_in_pattern() { "#; check_errors(src); } + +#[test] +fn unreachable_match_case() { + check_errors( + r#" + fn main() { + match Opt::Some(Opt::Some(3)) { + Opt::Some(_) => (), + Opt::None => (), + Opt::Some(Opt::Some(_)) => (), + ^^^^^^^^^^^^^^^^^^^^^^^ Unreachable match case + ~~~~~~~~~~~~~~~~~~~~~~~ This pattern is redundant with one or more prior patterns + } + } + + enum Opt { + None, + Some(T), + } + "#, + ); +} + +#[test] +fn match_reachability_errors_ignored_when_there_is_a_type_error() { + // No comment on the second `None` case. + // Type errors in general mess up reachability errors in match cases. + // If we naively change to catch this case (which is easy) we also end up + // erroring that the `3 => ()` case is unreachable as well, which is true + // but we don't want to annoy users with an extra obvious error. This + // behavior matches Rust as well. + check_errors( + " + fn main() { + match Opt::Some(3) { + Opt::None => (), + Opt::Some(_) => {}, + Opt::None => (), + 3 => (), + ^ Expected type Opt, found type Field + } + } + + enum Opt { + None, + Some(T), + } + ", + ); +} + +#[test] +fn missing_single_case() { + check_errors( + " + fn main() { + match Opt::Some(3) { + ^^^^^^^^^^^^ Missing case: `Some(_)` + Opt::None => (), + } + } + + enum Opt { + None, + Some(T), + } + ", + ); +} + +#[test] +fn missing_many_cases() { + check_errors( + " + fn main() { + match Abc::A { + ^^^^^^ Missing cases: `C`, `D`, `E`, and 21 more not shown + Abc::A => (), + Abc::B => (), + } + } + + enum Abc { + A,B,C,D,E,F,G,H,I,J,K,L,M,N,O,P,Q,R,S,T,U,V,W,X,Y,Z + } + ", + ); +} + +#[test] +fn missing_int_ranges() { + check_errors( + " + fn main() { + let x: i8 = 3; + match Opt::Some(x) { + ^^^^^^^^^^^^ Missing cases: `None`, `Some(-128..=3)`, `Some(5)`, and 1 more not shown + Opt::Some(4) => (), + Opt::Some(6) => (), + } + } + + enum Opt { + None, + Some(T), + } + ", + ); +} + +#[test] +fn missing_int_ranges_with_negatives() { + check_errors( + " + fn main() { + let x: i32 = -4; + match x { + ^ Missing cases: `-2147483648..=-6`, `-4..=-1`, `1..=2`, and 1 more not shown + -5 => (), + 0 => (), + 3 => (), + } + } + ", + ); +} + +#[test] +fn missing_cases_with_empty_match() { + check_errors( + " + fn main() { + match Abc::A {} + ^^^^^^ Missing cases: `A`, `B`, `C`, and 23 more not shown + } + + enum Abc { + A,B,C,D,E,F,G,H,I,J,K,L,M,N,O,P,Q,R,S,T,U,V,W,X,Y,Z + } + ", + ); +} + +#[test] +fn missing_integer_cases_with_empty_match() { + check_errors( + " + fn main() { + let x: i8 = 3; + match x {} + ^ Missing cases: `i8` is non-empty + ~ Try adding a match-all pattern: `_` + } + ", + ); +} + +#[test] +fn match_on_empty_enum() { + check_errors( + " + pub fn foo(v: Void) { + match v {} + } + pub enum Void {}", + ); +} diff --git a/noir/noir-repo/compiler/noirc_frontend/src/tests/traits.rs b/noir/noir-repo/compiler/noirc_frontend/src/tests/traits.rs index 5ba63bc6a29..d2f9d9a9672 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/tests/traits.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/tests/traits.rs @@ -102,11 +102,10 @@ fn trait_inheritance_with_generics_4() { #[test] fn trait_inheritance_dependency_cycle() { - // TODO: maybe the error location should be just on Foo let src = r#" trait Foo: Bar {} - ^^^^^^^^^^^^^^^^^ Dependency cycle found - ~~~~~~~~~~~~~~~~~ 'Foo' recursively depends on itself: Foo -> Bar -> Foo + ^^^ Dependency cycle found + ~~~ 'Foo' recursively depends on itself: Foo -> Bar -> Foo trait Bar: Foo {} fn main() {} "#; @@ -115,18 +114,17 @@ fn trait_inheritance_dependency_cycle() { #[test] fn trait_inheritance_missing_parent_implementation() { - // TODO: the secondary errors are missing a closing backtick let src = r#" pub trait Foo {} pub trait Bar: Foo {} - ~~~ required by this bound in `Bar + ~~~ required by this bound in `Bar` pub struct Struct {} impl Bar for Struct {} ^^^^^^ The trait bound `Struct: Foo` is not satisfied - ~~~~~~ The trait `Foo` is not implemented for `Struct + ~~~~~~ The trait `Foo` is not implemented for `Struct` fn main() { let _ = Struct {}; // silence Struct never constructed warning @@ -214,7 +212,7 @@ fn errors_if_impl_trait_constraint_is_not_satisfied() { pub trait Foo where T: Greeter, - ~~~~~~~ required by this bound in `Foo + ~~~~~~~ required by this bound in `Foo` { fn greet(object: U) where @@ -230,7 +228,7 @@ fn errors_if_impl_trait_constraint_is_not_satisfied() { impl Foo for Bar {} ^^^ The trait bound `SomeGreeter: Greeter` is not satisfied - ~~~ The trait `Greeter` is not implemented for `SomeGreeter + ~~~ The trait `Greeter` is not implemented for `SomeGreeter` fn main() {} "#; @@ -1214,3 +1212,36 @@ fn calls_trait_method_using_struct_name_when_multiple_impls_exist_and_errors_tur "#; check_errors(src); } + +#[test] +fn as_trait_path_in_expression() { + let src = r#" + fn main() { + cursed::(); + } + + fn cursed() + where T: Foo + Foo2 + { + ::bar(1); + ::bar(()); + + // Use each function with different generic arguments + ::bar(()); + } + + trait Foo { fn bar(x: U); } + trait Foo2 { fn bar(x: U); } + + pub struct S {} + + impl Foo for S { + fn bar(_x: Z) {} + } + + impl Foo2 for S { + fn bar(_x: Z) {} + } + "#; + assert_no_errors(src); +} diff --git a/noir/noir-repo/compiler/noirc_printable_type/src/lib.rs b/noir/noir-repo/compiler/noirc_printable_type/src/lib.rs index 6ff211cac2a..8e25de58b43 100644 --- a/noir/noir-repo/compiler/noirc_printable_type/src/lib.rs +++ b/noir/noir-repo/compiler/noirc_printable_type/src/lib.rs @@ -49,8 +49,9 @@ pub enum PrintableType { env: Box, unconstrained: bool, }, - MutableReference { + Reference { typ: Box, + mutable: bool, }, Unit, } @@ -127,7 +128,10 @@ fn to_string(value: &PrintableValue, typ: &PrintableType) -> Op (PrintableValue::Field(_), PrintableType::Function { arguments, return_type, .. }) => { output.push_str(&format!("< {:?}>>", arguments, return_type,)); } - (_, PrintableType::MutableReference { .. }) => { + (_, PrintableType::Reference { mutable: false, .. }) => { + output.push_str("<>"); + } + (_, PrintableType::Reference { mutable: true, .. }) => { output.push_str("<>"); } (PrintableValue::Vec { array_elements, is_slice }, PrintableType::Array { typ, .. }) diff --git a/noir/noir-repo/compiler/wasm/src/compile.rs b/noir/noir-repo/compiler/wasm/src/compile.rs index 021462d9f46..8c0359bbced 100644 --- a/noir/noir-repo/compiler/wasm/src/compile.rs +++ b/noir/noir-repo/compiler/wasm/src/compile.rs @@ -130,7 +130,7 @@ pub(crate) struct DependencyGraph { pub(crate) root_dependencies: Vec, pub(crate) library_dependencies: BTreeMap>, } -/// This is map contains the paths of all of the files in the entry-point crate and +/// This map contains the paths of all of the files in the entry-point crate and /// the transitive dependencies of the entry-point crate. /// /// This is for all intents and purposes the file system that the compiler will use to resolve/compile @@ -176,7 +176,7 @@ pub fn compile_program( let compiled_program = noirc_driver::compile_main(&mut context, crate_id, &compile_options, None) .map_err(|errs| { - CompileError::with_file_diagnostics( + CompileError::with_custom_diagnostics( "Failed to compile program", errs, &context.file_manager, @@ -186,7 +186,7 @@ pub fn compile_program( let optimized_program = nargo::ops::transform_program(compiled_program, expression_width); nargo::ops::check_program(&optimized_program).map_err(|errs| { - CompileError::with_file_diagnostics( + CompileError::with_custom_diagnostics( "Compiled program is not solvable", errs, &context.file_manager, @@ -212,8 +212,8 @@ pub fn compile_contract( let compiled_contract = noirc_driver::compile_contract(&mut context, crate_id, &compile_options) - .map_err(|errs: Vec| { - CompileError::with_file_diagnostics( + .map_err(|errs: Vec| { + CompileError::with_custom_diagnostics( "Failed to compile contract", errs, &context.file_manager, diff --git a/noir/noir-repo/compiler/wasm/src/compile_new.rs b/noir/noir-repo/compiler/wasm/src/compile_new.rs index e7c2e94cd84..37065c8f825 100644 --- a/noir/noir-repo/compiler/wasm/src/compile_new.rs +++ b/noir/noir-repo/compiler/wasm/src/compile_new.rs @@ -109,7 +109,7 @@ impl CompilerContext { let compiled_program = compile_main(&mut self.context, root_crate_id, &compile_options, None) .map_err(|errs| { - CompileError::with_file_diagnostics( + CompileError::with_custom_diagnostics( "Failed to compile program", errs, &self.context.file_manager, @@ -119,7 +119,7 @@ impl CompilerContext { let optimized_program = nargo::ops::transform_program(compiled_program, expression_width); nargo::ops::check_program(&optimized_program).map_err(|errs| { - CompileError::with_file_diagnostics( + CompileError::with_custom_diagnostics( "Compiled program is not solvable", errs, &self.context.file_manager, @@ -148,7 +148,7 @@ impl CompilerContext { let compiled_contract = compile_contract(&mut self.context, root_crate_id, &compile_options) .map_err(|errs| { - CompileError::with_file_diagnostics( + CompileError::with_custom_diagnostics( "Failed to compile contract", errs, &self.context.file_manager, diff --git a/noir/noir-repo/compiler/wasm/src/errors.rs b/noir/noir-repo/compiler/wasm/src/errors.rs index c2e51162d3f..47927df1056 100644 --- a/noir/noir-repo/compiler/wasm/src/errors.rs +++ b/noir/noir-repo/compiler/wasm/src/errors.rs @@ -4,7 +4,7 @@ use serde::Serialize; use wasm_bindgen::prelude::*; use fm::FileManager; -use noirc_errors::FileDiagnostic; +use noirc_errors::CustomDiagnostic; #[wasm_bindgen(typescript_custom_section)] const DIAGNOSTICS: &'static str = r#" @@ -87,8 +87,7 @@ pub struct Diagnostic { } impl Diagnostic { - fn new(file_diagnostic: &FileDiagnostic, file: String) -> Diagnostic { - let diagnostic = &file_diagnostic.diagnostic; + fn new(diagnostic: &CustomDiagnostic, file: String) -> Diagnostic { let message = diagnostic.message.clone(); let secondaries = diagnostic @@ -116,16 +115,16 @@ impl CompileError { CompileError { message: message.to_string(), diagnostics: vec![] } } - pub fn with_file_diagnostics( + pub fn with_custom_diagnostics( message: &str, - file_diagnostics: Vec, + custom_diagnostics: Vec, file_manager: &FileManager, ) -> CompileError { - let diagnostics: Vec<_> = file_diagnostics + let diagnostics: Vec<_> = custom_diagnostics .iter() .map(|err| { let file_path = file_manager - .path(err.file_id) + .path(err.file) .expect("File must exist to have caused diagnostics"); Diagnostic::new(err, file_path.to_str().unwrap().to_string()) }) diff --git a/noir/noir-repo/cspell.json b/noir/noir-repo/cspell.json index 3bbbede78cc..31c81657144 100644 --- a/noir/noir-repo/cspell.json +++ b/noir/noir-repo/cspell.json @@ -173,6 +173,7 @@ "noncanonical", "nouner", "oneshot", + "oneof", "overflowing", "pedersen", "peekable", @@ -188,6 +189,11 @@ "printstd", "proptest", "proptests", + "prost", + "proto", + "protobuf", + "protoc", + "protos", "pseudocode", "pubkey", "quantile", diff --git a/noir/noir-repo/docs/docs/how_to/merkle-proof.mdx b/noir/noir-repo/docs/docs/how_to/merkle-proof.mdx deleted file mode 100644 index 0a128adb2de..00000000000 --- a/noir/noir-repo/docs/docs/how_to/merkle-proof.mdx +++ /dev/null @@ -1,48 +0,0 @@ ---- -title: Prove Merkle Tree Membership -description: - Learn how to use merkle membership proof in Noir to prove that a given leaf is a member of a - merkle tree with a specified root, at a given index. -keywords: - [merkle proof, merkle membership proof, Noir, rust, hash function, Pedersen, sha256, merkle tree] -sidebar_position: 4 ---- - -Let's walk through an example of a merkle membership proof in Noir that proves that a given leaf is -in a merkle tree. - -```rust - -fn main(message : [Field; 62], index : Field, hashpath : [Field; 40], root : Field) { - let leaf = std::hash::hash_to_field(message.as_slice()); - let merkle_root = std::merkle::compute_merkle_root(leaf, index, hashpath); - assert(merkle_root == root); -} - -``` - -The message is hashed using `hash_to_field`. The specific hash function that is being used is chosen -by the backend. The only requirement is that this hash function can heuristically be used as a -random oracle. If only collision resistance is needed, then one can call `std::hash::pedersen_hash` -instead. - -```rust -let leaf = std::hash::hash_to_field(message.as_slice()); -``` - -The leaf is then passed to a compute_merkle_root function with the root, index and hashpath. The returned root can then be asserted to be the same as the provided root. - -```rust -let merkle_root = std::merkle::compute_merkle_root(leaf, index, hashpath); -assert (merkle_root == root); -``` - -> **Note:** It is possible to re-implement the merkle tree implementation without standard library. -> However, for most usecases, it is enough. In general, the standard library will always opt to be -> as conservative as possible, while striking a balance with efficiency. - -An example, the merkle membership proof, only requires a hash function that has collision -resistance, hence a hash function like Pedersen is allowed, which in most cases is more efficient -than the even more conservative sha256. - -[View an example on the starter repo](https://github.com/noir-lang/noir-examples/blob/3ea09545cabfa464124ec2f3ea8e60c608abe6df/stealthdrop/circuits/src/main.nr#L20) diff --git a/noir/noir-repo/docs/docs/noir/concepts/data_types/integers.md b/noir/noir-repo/docs/docs/noir/concepts/data_types/integers.md index b8a5d498029..ff3fafa1f90 100644 --- a/noir/noir-repo/docs/docs/noir/concepts/data_types/integers.md +++ b/noir/noir-repo/docs/docs/noir/concepts/data_types/integers.md @@ -58,54 +58,6 @@ fn main(x: i16, y: i16) { Modulo operation is defined for negative integers thanks to integer division, so that the equality `x = (x/y)*y + (x%y)` holds. -## 128 bits Unsigned Integers - -The built-in structure `U128` allows you to use 128-bit unsigned integers almost like a native integer type. However, there are some differences to keep in mind: -- You cannot cast between a native integer and `U128` -- There is a higher performance cost when using `U128`, compared to a native type. - -Conversion between unsigned integer types and U128 are done through the use of `from_integer` and `to_integer` functions. `from_integer` also accepts the `Field` type as input. - -```rust -fn main() { - let x = U128::from_integer(23); - let y = U128::from_hex("0x7"); - let z = x + y; - assert(z.to_integer() == 30); -} -``` - -`U128` is implemented with two 64 bits limbs, representing the low and high bits, which explains the performance cost. You should expect `U128` to be twice more costly for addition and four times more costly for multiplication. -You can construct a U128 from its limbs: -```rust -fn main(x: u64, y: u64) { - let z = U128::from_u64s_be(x,y); - assert(z.hi == x as Field); - assert(z.lo == y as Field); -} -``` - -Note that the limbs are stored as Field elements in order to avoid unnecessary conversions. -Apart from this, most operations will work as usual: - -```rust -fn main(x: U128, y: U128) { - // multiplication - let c = x * y; - // addition and subtraction - let c = c - x + y; - // division - let c = x / y; - // bit operation; - let c = x & y | y; - // bit shift - let c = x << y; - // comparisons; - let c = x < y; - let c = x == y; -} -``` - ## Overflows Computations that exceed the type boundaries will result in overflow errors. This happens with both signed and unsigned integers. For example, attempting to prove: diff --git a/noir/noir-repo/docs/docs/noir/concepts/functions.md b/noir/noir-repo/docs/docs/noir/concepts/functions.md index f656cdfd97a..9f40ea9d1d2 100644 --- a/noir/noir-repo/docs/docs/noir/concepts/functions.md +++ b/noir/noir-repo/docs/docs/noir/concepts/functions.md @@ -34,6 +34,23 @@ is pre-pended with a colon and the parameter type. Multiple parameters are separ fn foo(x : Field, y : Field){} ``` +You can use an underscore `_` as a parameter name when you don't need to use the parameter in the function body. This is useful when you need to satisfy a function signature but don't need to use all the parameters: + +```rust +fn foo(_ : Field, y : Field) { + // Only using y parameter +} +``` + +Alternatively, you can prefix a parameter name with an underscore (e.g. `_x`), which also indicates that the parameter is unused. This approach is often preferred as it preserves the parameter name for documentation purposes: + +```rust +fn foo(_x : Field, y : Field) -> Field { + // Only using y parameter + y +} +``` + The return type of a function can be stated by using the `->` arrow notation. The function below states that the foo function must return a `Field`. If the function returns no value, then the arrow is omitted. diff --git a/noir/noir-repo/docs/docs/noir/concepts/traits.md b/noir/noir-repo/docs/docs/noir/concepts/traits.md index 17cc04a9751..af5b396bfb8 100644 --- a/noir/noir-repo/docs/docs/noir/concepts/traits.md +++ b/noir/noir-repo/docs/docs/noir/concepts/traits.md @@ -153,6 +153,37 @@ fn main() { } ``` +## As Trait Syntax + +Rarely to call a method it may not be sufficient to use the general method call syntax of `obj.method(args)`. +One case where this may happen is if there are two traits in scope which both define a method with the same name. +For example: + +```rust +trait Foo { fn bar(); } +trait Foo2 { fn bar(); } + +fn example() + where T: Foo + Foo2 +{ + // How to call Foo::bar and Foo2::bar? +} +``` + +In the above example we have both `Foo` and `Foo2` which define a `bar` method. The normal way to resolve +this would be to use the static method syntax `Foo::bar(object)` but there is no object in this case and +`Self` does not appear in the type signature of `bar` at all so we would not know which impl to choose. +For these situations there is the "as trait" syntax: `::method(object, args...)` + +```rust +fn example() + where T: Foo + Foo2 +{ + ::bar(); + ::bar(); +} +``` + ## Generic Implementations You can add generics to a trait implementation by adding the generic list after the `impl` keyword: diff --git a/noir/noir-repo/docs/docs/noir/modules_packages_crates/dependencies.md b/noir/noir-repo/docs/docs/noir/modules_packages_crates/dependencies.md index 22186b22598..cb876532392 100644 --- a/noir/noir-repo/docs/docs/noir/modules_packages_crates/dependencies.md +++ b/noir/noir-repo/docs/docs/noir/modules_packages_crates/dependencies.md @@ -77,14 +77,14 @@ use lib_a; You can also import only the specific parts of dependency that you want to use, like so: ```rust -use std::hash::sha256; +use std::hash::blake3; use std::scalar_mul::fixed_base_embedded_curve; ``` Lastly, You can import multiple items in the same line by enclosing them in curly braces: ```rust -use std::hash::{keccak256, sha256}; +use std::hash::{blake2s, blake3}; ``` We don't have a way to consume libraries from inside a [workspace](./workspaces.md) as external dependencies right now. diff --git a/noir/noir-repo/docs/docs/noir/standard_library/black_box_fns.md b/noir/noir-repo/docs/docs/noir/standard_library/black_box_fns.md index e9392b20a92..ed905ecb5c2 100644 --- a/noir/noir-repo/docs/docs/noir/standard_library/black_box_fns.md +++ b/noir/noir-repo/docs/docs/noir/standard_library/black_box_fns.md @@ -23,7 +23,7 @@ Here is a list of the current black box functions: - AND - XOR - RANGE -- [Keccak256](./cryptographic_primitives/hashes.mdx#keccak256) +- [Keccakf1600](./cryptographic_primitives/hashes.mdx#keccakf1600) - [Recursive proof verification](./recursion.mdx) Most black box functions are included as part of the Noir standard library, however `AND`, `XOR` and `RANGE` are used as part of the Noir language syntax. For instance, using the bitwise operator `&` will invoke the `AND` black box function. diff --git a/noir/noir-repo/docs/docs/noir/standard_library/cryptographic_primitives/hashes.mdx b/noir/noir-repo/docs/docs/noir/standard_library/cryptographic_primitives/hashes.mdx index b7518fa95c1..334873e6863 100644 --- a/noir/noir-repo/docs/docs/noir/standard_library/cryptographic_primitives/hashes.mdx +++ b/noir/noir-repo/docs/docs/noir/standard_library/cryptographic_primitives/hashes.mdx @@ -1,8 +1,7 @@ --- title: Hash methods description: - Learn about the cryptographic primitives ready to use for any Noir project, including sha256, - blake2s and pedersen + Learn about the cryptographic primitives ready to use for any Noir project keywords: [cryptographic primitives, Noir project, sha256, blake2s, pedersen, hash] sidebar_position: 0 @@ -10,23 +9,11 @@ sidebar_position: 0 import BlackBoxInfo from '@site/src/components/Notes/_blackbox'; -## sha256 +## sha256 compression -Given an array of bytes, returns the resulting sha256 hash. -Specify a message_size to hash only the first `message_size` bytes of the input. - -#include_code sha256 noir_stdlib/src/hash/sha256.nr rust - -example: -#include_code sha256_var test_programs/execution_success/sha256/src/main.nr rust - -```rust -fn main() { - let x = [163, 117, 178, 149]; // some random bytes - let hash = std::sha256::sha256_var(x, 4); -} -``` +Performs a sha256 compression on an input and initial state, returning the resulting state. +#include_code sha256_compression noir_stdlib/src/hash/mod.nr rust @@ -88,17 +75,11 @@ example: -## keccak256 +## keccakf1600 -Given an array of bytes (`u8`), returns the resulting keccak hash as an array of -32 bytes (`[u8; 32]`). Specify a message_size to hash only the first -`message_size` bytes of the input. - -#include_code keccak256 noir_stdlib/src/hash/mod.nr rust - -example: +Given an initial `[u64; 25]` state, returns the state resulting from applying a keccakf1600 permutation (`[u64; 25]`). -#include_code keccak256 test_programs/execution_success/keccak256/src/main.nr rust +#include_code keccakf1600 noir_stdlib/src/hash/mod.nr rust diff --git a/noir/noir-repo/docs/docs/noir/standard_library/merkle_trees.md b/noir/noir-repo/docs/docs/noir/standard_library/merkle_trees.md deleted file mode 100644 index 6a9ebf72ada..00000000000 --- a/noir/noir-repo/docs/docs/noir/standard_library/merkle_trees.md +++ /dev/null @@ -1,58 +0,0 @@ ---- -title: Merkle Trees -description: Learn about Merkle Trees in Noir with this tutorial. Explore the basics of computing a merkle root using a proof, with examples. -keywords: - [ - Merkle trees in Noir, - Noir programming language, - check membership, - computing root from leaf, - Noir Merkle tree implementation, - Merkle tree tutorial, - Merkle tree code examples, - Noir libraries, - pedersen hash., - ] ---- - -## compute_merkle_root - -Returns the root of the tree from the provided leaf and its hash path, using a [Pedersen hash](./cryptographic_primitives/hashes.mdx#pedersen_hash). - -```rust -fn compute_merkle_root(leaf : Field, index : Field, hash_path: [Field]) -> Field -``` - -example: - -```rust -/** - // these values are for this example only - index = "0" - priv_key = "0x000000000000000000000000000000000000000000000000000000616c696365" - secret = "0x1929ea3ab8d9106a899386883d9428f8256cfedb3c4f6b66bf4aa4d28a79988f" - note_hash_path = [ - "0x1e61bdae0f027b1b2159e1f9d3f8d00fa668a952dddd822fda80dc745d6f65cc", - "0x0e4223f3925f98934393c74975142bd73079ab0621f4ee133cee050a3c194f1a", - "0x2fd7bb412155bf8693a3bd2a3e7581a679c95c68a052f835dddca85fa1569a40" - ] - */ -fn main(index: Field, priv_key: Field, secret: Field, note_hash_path: [Field; 3]) { - - let pubkey = std::scalar_mul::fixed_base_embedded_curve(priv_key); - let pubkey_x = pubkey[0]; - let pubkey_y = pubkey[1]; - let note_commitment = std::hash::pedersen(&[pubkey_x, pubkey_y, secret]); - - let root = std::merkle::compute_merkle_root(note_commitment[0], index, note_hash_path.as_slice()); - println(root); -} -``` - -To check merkle tree membership: - -1. Include a merkle root as a program input. -2. Compute the merkle root of a given leaf, index and hash path. -3. Assert the merkle roots are equal. - -For more info about merkle trees, see the Wikipedia [page](https://en.wikipedia.org/wiki/Merkle_tree). diff --git a/noir/noir-repo/docs/docs/noir/standard_library/traits.md b/noir/noir-repo/docs/docs/noir/standard_library/traits.md index e6f6f80ff03..ed923c0707a 100644 --- a/noir/noir-repo/docs/docs/noir/standard_library/traits.md +++ b/noir/noir-repo/docs/docs/noir/standard_library/traits.md @@ -71,7 +71,7 @@ As a general rule of thumb, `From` may be implemented in the [situations where i - The conversion is *infallible*: Noir does not provide an equivalent to Rust's `TryFrom`, if the conversion can fail then provide a named method instead. - The conversion is *lossless*: semantically, it should not lose or discard information. For example, `u32: From` can losslessly convert any `u16` into a valid `u32` such that the original `u16` can be recovered. On the other hand, `u16: From` should not be implemented as `2**16` is a `u32` which cannot be losslessly converted into a `u16`. - The conversion is *value-preserving*: the conceptual kind and meaning of the resulting value is the same, even though the Noir type and technical representation might be different. While it's possible to infallibly and losslessly convert a `u8` into a `str<2>` hex representation, `4u8` and `"04"` are too different for `str<2>: From` to be implemented. -- The conversion is *obvious*: it's the only reasonable conversion between the two types. If there's ambiguity on how to convert between them such that the same input could potentially map to two different values then a named method should be used. For instance rather than implementing `U128: From<[u8; 16]>`, the methods `U128::from_le_bytes` and `U128::from_be_bytes` are used as otherwise the endianness of the array would be ambiguous, resulting in two potential values of `U128` from the same byte array. +- The conversion is *obvious*: it's the only reasonable conversion between the two types. If there's ambiguity on how to convert between them such that the same input could potentially map to two different values then a named method should be used. For instance rather than implementing `u128: From<[u8; 16]>`, the methods `u128::from_le_bytes` and `u128::from_be_bytes` are used as otherwise the endianness of the array would be ambiguous, resulting in two potential values of `u128` from the same byte array. One additional recommendation specific to Noir is: - The conversion is *efficient*: it's relatively cheap to convert between the two types. Due to being a ZK DSL, it's more important to avoid unnecessary computation compared to Rust. If the implementation of `From` would encourage users to perform unnecessary conversion, resulting in additional proving time, then it may be preferable to expose functionality such that this conversion may be avoided. diff --git a/noir/noir-repo/docs/docs/tooling/security.md b/noir/noir-repo/docs/docs/tooling/security.md index e14481efc31..8a09d231a7d 100644 --- a/noir/noir-repo/docs/docs/tooling/security.md +++ b/noir/noir-repo/docs/docs/tooling/security.md @@ -39,7 +39,7 @@ Here, the results of `factor` are two elements of the returned array. The value This pass checks if the constraint coverage of Brillig calls is sufficient in these terms. -The check is at the moment disabled by default due to performance concerns and can be enabled by passing the `--enable-brillig-constraints-check` option to `nargo`. +The check is enabled by default and can be disabled by passing the `--skip-brillig-constraints-check` option to `nargo`. #### Lookback option diff --git a/noir/noir-repo/examples/oracle_transcript/Nargo.toml b/noir/noir-repo/examples/oracle_transcript/Nargo.toml new file mode 100644 index 00000000000..3f333c912b0 --- /dev/null +++ b/noir/noir-repo/examples/oracle_transcript/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "oracle_transcript" +type = "bin" +authors = [""] +compiler_version = ">=0.23.0" + +[dependencies] diff --git a/noir/noir-repo/examples/oracle_transcript/Oracle.jsonl b/noir/noir-repo/examples/oracle_transcript/Oracle.jsonl new file mode 100644 index 00000000000..570e9590761 --- /dev/null +++ b/noir/noir-repo/examples/oracle_transcript/Oracle.jsonl @@ -0,0 +1,5 @@ +{"call":{"function":"void_field","inputs":[]},"result":{"values":["000000000000000000000000000000000000000000000000000000000000000a"]}} +{"call":{"function":"void_field","inputs":[]},"result":{"values":["0000000000000000000000000000000000000000000000000000000000000014"]}} +{"call":{"function":"field_field","inputs":["0000000000000000000000000000000000000000000000000000000000000002"]},"result":{"values":["000000000000000000000000000000000000000000000000000000000000001e"]}} +{"call":{"function":"field_field","inputs":["0000000000000000000000000000000000000000000000000000000000000003"]},"result":{"values":["0000000000000000000000000000000000000000000000000000000000000028"]}} +{"call":{"function":"struct_field","inputs":["000000000000000000000000000000000000000000000000000000000000012c","0000000000000000000000000000000000000000000000000000000000000320",["000000000000000000000000000000000000000000000000000000000000000a","0000000000000000000000000000000000000000000000000000000000000014","000000000000000000000000000000000000000000000000000000000000001e","0000000000000000000000000000000000000000000000000000000000000028"]]},"result":{"values":["0000000000000000000000000000000000000000000000000000000000000064"]}} diff --git a/noir/noir-repo/examples/oracle_transcript/Oracle.test.jsonl b/noir/noir-repo/examples/oracle_transcript/Oracle.test.jsonl new file mode 100644 index 00000000000..cebe10d307f --- /dev/null +++ b/noir/noir-repo/examples/oracle_transcript/Oracle.test.jsonl @@ -0,0 +1,20 @@ +{"call":{"function":"create_mock","inputs":[["0000000000000000000000000000000000000000000000000000000000000076","000000000000000000000000000000000000000000000000000000000000006f","0000000000000000000000000000000000000000000000000000000000000069","0000000000000000000000000000000000000000000000000000000000000064","000000000000000000000000000000000000000000000000000000000000005f","0000000000000000000000000000000000000000000000000000000000000066","0000000000000000000000000000000000000000000000000000000000000069","0000000000000000000000000000000000000000000000000000000000000065","000000000000000000000000000000000000000000000000000000000000006c","0000000000000000000000000000000000000000000000000000000000000064"]]},"result":{"values":["0000000000000000000000000000000000000000000000000000000000000000"]}} +{"call":{"function":"set_mock_returns","inputs":["0000000000000000000000000000000000000000000000000000000000000000","000000000000000000000000000000000000000000000000000000000000000a"]},"result":{"values":[]}} +{"call":{"function":"set_mock_times","inputs":["0000000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000001"]},"result":{"values":[]}} +{"call":{"function":"create_mock","inputs":[["0000000000000000000000000000000000000000000000000000000000000076","000000000000000000000000000000000000000000000000000000000000006f","0000000000000000000000000000000000000000000000000000000000000069","0000000000000000000000000000000000000000000000000000000000000064","000000000000000000000000000000000000000000000000000000000000005f","0000000000000000000000000000000000000000000000000000000000000066","0000000000000000000000000000000000000000000000000000000000000069","0000000000000000000000000000000000000000000000000000000000000065","000000000000000000000000000000000000000000000000000000000000006c","0000000000000000000000000000000000000000000000000000000000000064"]]},"result":{"values":["0000000000000000000000000000000000000000000000000000000000000001"]}} +{"call":{"function":"set_mock_returns","inputs":["0000000000000000000000000000000000000000000000000000000000000001","0000000000000000000000000000000000000000000000000000000000000014"]},"result":{"values":[]}} +{"call":{"function":"set_mock_times","inputs":["0000000000000000000000000000000000000000000000000000000000000001","0000000000000000000000000000000000000000000000000000000000000001"]},"result":{"values":[]}} +{"call":{"function":"create_mock","inputs":[["0000000000000000000000000000000000000000000000000000000000000066","0000000000000000000000000000000000000000000000000000000000000069","0000000000000000000000000000000000000000000000000000000000000065","000000000000000000000000000000000000000000000000000000000000006c","0000000000000000000000000000000000000000000000000000000000000064","000000000000000000000000000000000000000000000000000000000000005f","0000000000000000000000000000000000000000000000000000000000000066","0000000000000000000000000000000000000000000000000000000000000069","0000000000000000000000000000000000000000000000000000000000000065","000000000000000000000000000000000000000000000000000000000000006c","0000000000000000000000000000000000000000000000000000000000000064"]]},"result":{"values":["0000000000000000000000000000000000000000000000000000000000000002"]}} +{"call":{"function":"set_mock_params","inputs":["0000000000000000000000000000000000000000000000000000000000000002","0000000000000000000000000000000000000000000000000000000000000002"]},"result":{"values":[]}} +{"call":{"function":"set_mock_returns","inputs":["0000000000000000000000000000000000000000000000000000000000000002","000000000000000000000000000000000000000000000000000000000000001e"]},"result":{"values":[]}} +{"call":{"function":"create_mock","inputs":[["0000000000000000000000000000000000000000000000000000000000000066","0000000000000000000000000000000000000000000000000000000000000069","0000000000000000000000000000000000000000000000000000000000000065","000000000000000000000000000000000000000000000000000000000000006c","0000000000000000000000000000000000000000000000000000000000000064","000000000000000000000000000000000000000000000000000000000000005f","0000000000000000000000000000000000000000000000000000000000000066","0000000000000000000000000000000000000000000000000000000000000069","0000000000000000000000000000000000000000000000000000000000000065","000000000000000000000000000000000000000000000000000000000000006c","0000000000000000000000000000000000000000000000000000000000000064"]]},"result":{"values":["0000000000000000000000000000000000000000000000000000000000000003"]}} +{"call":{"function":"set_mock_params","inputs":["0000000000000000000000000000000000000000000000000000000000000003","0000000000000000000000000000000000000000000000000000000000000003"]},"result":{"values":[]}} +{"call":{"function":"set_mock_returns","inputs":["0000000000000000000000000000000000000000000000000000000000000003","0000000000000000000000000000000000000000000000000000000000000028"]},"result":{"values":[]}} +{"call":{"function":"create_mock","inputs":[["0000000000000000000000000000000000000000000000000000000000000073","0000000000000000000000000000000000000000000000000000000000000074","0000000000000000000000000000000000000000000000000000000000000072","0000000000000000000000000000000000000000000000000000000000000075","0000000000000000000000000000000000000000000000000000000000000063","0000000000000000000000000000000000000000000000000000000000000074","000000000000000000000000000000000000000000000000000000000000005f","0000000000000000000000000000000000000000000000000000000000000066","0000000000000000000000000000000000000000000000000000000000000069","0000000000000000000000000000000000000000000000000000000000000065","000000000000000000000000000000000000000000000000000000000000006c","0000000000000000000000000000000000000000000000000000000000000064"]]},"result":{"values":["0000000000000000000000000000000000000000000000000000000000000004"]}} +{"call":{"function":"set_mock_params","inputs":["0000000000000000000000000000000000000000000000000000000000000004","000000000000000000000000000000000000000000000000000000000000012c","0000000000000000000000000000000000000000000000000000000000000320",["000000000000000000000000000000000000000000000000000000000000000a","0000000000000000000000000000000000000000000000000000000000000014","000000000000000000000000000000000000000000000000000000000000001e","0000000000000000000000000000000000000000000000000000000000000028"]]},"result":{"values":[]}} +{"call":{"function":"set_mock_returns","inputs":["0000000000000000000000000000000000000000000000000000000000000004","0000000000000000000000000000000000000000000000000000000000000064"]},"result":{"values":[]}} +{"call":{"function":"void_field","inputs":[]},"result":{"values":["000000000000000000000000000000000000000000000000000000000000000a"]}} +{"call":{"function":"void_field","inputs":[]},"result":{"values":["0000000000000000000000000000000000000000000000000000000000000014"]}} +{"call":{"function":"field_field","inputs":["0000000000000000000000000000000000000000000000000000000000000002"]},"result":{"values":["000000000000000000000000000000000000000000000000000000000000001e"]}} +{"call":{"function":"field_field","inputs":["0000000000000000000000000000000000000000000000000000000000000003"]},"result":{"values":["0000000000000000000000000000000000000000000000000000000000000028"]}} +{"call":{"function":"struct_field","inputs":["000000000000000000000000000000000000000000000000000000000000012c","0000000000000000000000000000000000000000000000000000000000000320",["000000000000000000000000000000000000000000000000000000000000000a","0000000000000000000000000000000000000000000000000000000000000014","000000000000000000000000000000000000000000000000000000000000001e","0000000000000000000000000000000000000000000000000000000000000028"]]},"result":{"values":["0000000000000000000000000000000000000000000000000000000000000064"]}} diff --git a/noir/noir-repo/examples/oracle_transcript/Prover.toml b/noir/noir-repo/examples/oracle_transcript/Prover.toml new file mode 100644 index 00000000000..eb8504c2b0c --- /dev/null +++ b/noir/noir-repo/examples/oracle_transcript/Prover.toml @@ -0,0 +1,6 @@ + +[input] +x = 2 +y = 3 + +return = 100 diff --git a/noir/noir-repo/examples/oracle_transcript/log_and_exec_transcript.sh b/noir/noir-repo/examples/oracle_transcript/log_and_exec_transcript.sh new file mode 100755 index 00000000000..c6e5066f158 --- /dev/null +++ b/noir/noir-repo/examples/oracle_transcript/log_and_exec_transcript.sh @@ -0,0 +1,22 @@ +#!/bin/bash +set -eu + +cd $(dirname $0) + +# Execute the test to capture oracle calls. +NARGO_TEST_FOREIGN_CALL_LOG=Oracle.test.jsonl nargo test + +# Get rid of the mock setup calls +cat Oracle.test.jsonl \ + | jq --slurp -r -c '.[] | select(.call.function | contains("mock") | not)' \ + > Oracle.jsonl + +# Execute `main` with the Prover.toml and Oracle.jsonl files. +nargo execute --skip-underconstrained-check --oracle-file Oracle.jsonl + +# Also execute through `noir-execute` +noir-execute \ + --artifact-path target/oracle_transcript.json \ + --oracle-file Oracle.jsonl \ + --prover-file Prover.toml \ + --output-dir target diff --git a/noir/noir-repo/examples/oracle_transcript/src/main.nr b/noir/noir-repo/examples/oracle_transcript/src/main.nr new file mode 100644 index 00000000000..585ff2af2b2 --- /dev/null +++ b/noir/noir-repo/examples/oracle_transcript/src/main.nr @@ -0,0 +1,63 @@ +use std::test::OracleMock; + +struct Point { + x: Field, + y: Field, +} + +impl Eq for Point { + fn eq(self, other: Point) -> bool { + (self.x == other.x) & (self.y == other.y) + } +} + +#[oracle(void_field)] +unconstrained fn void_field_oracle() -> Field {} + +unconstrained fn void_field() -> Field { + void_field_oracle() +} + +#[oracle(field_field)] +unconstrained fn field_field_oracle(_x: Field) -> Field {} + +unconstrained fn field_field(x: Field) -> Field { + field_field_oracle(x) +} + +#[oracle(struct_field)] +unconstrained fn struct_field_oracle(_point: Point, _array: [Field; 4]) -> Field {} + +unconstrained fn struct_field(point: Point, array: [Field; 4]) -> Field { + struct_field_oracle(point, array) +} + +fn main(input: Point) -> pub Field { + // Safety: testing context + unsafe { + let a = void_field(); + let b = void_field(); + let c = field_field(input.x); + let d = field_field(input.y); + let p = Point { x: a * c, y: b * d }; + struct_field(p, [a, b, c, d]) + } +} + +/// This test is used to capture an oracle transcript, which can then be replayed +/// during execution. +#[test] +fn test_main() { + // Safety: testing context + unsafe { + let _ = OracleMock::mock("void_field").returns(10).times(1); + let _ = OracleMock::mock("void_field").returns(20).times(1); + let _ = OracleMock::mock("field_field").with_params((2,)).returns(30); + let _ = OracleMock::mock("field_field").with_params((3,)).returns(40); + let _ = OracleMock::mock("struct_field") + .with_params((Point { x: 300, y: 800 }, [10, 20, 30, 40])) + .returns(100); + } + let output = main(Point { x: 2, y: 3 }); + assert_eq(output, 100) +} diff --git a/noir/noir-repo/examples/oracle_transcript/test.sh b/noir/noir-repo/examples/oracle_transcript/test.sh new file mode 100755 index 00000000000..8f43c3b8bb9 --- /dev/null +++ b/noir/noir-repo/examples/oracle_transcript/test.sh @@ -0,0 +1,10 @@ +#!/bin/bash +set -eu + +cd $(dirname $0) + +# This file is used for Noir CI and is not required. + +rm -f ./Oracle.* + +./log_and_exec_transcript.sh diff --git a/noir/noir-repo/noir_stdlib/src/hash/keccak.nr b/noir/noir-repo/noir_stdlib/src/hash/keccak.nr deleted file mode 100644 index 75be7982e66..00000000000 --- a/noir/noir-repo/noir_stdlib/src/hash/keccak.nr +++ /dev/null @@ -1,155 +0,0 @@ -use crate::runtime::is_unconstrained; - -global BLOCK_SIZE_IN_BYTES: u32 = 136; //(1600 - BITS * 2) / WORD_SIZE; -global WORD_SIZE: u32 = 8; // Limbs are made up of u64s so 8 bytes each. -global LIMBS_PER_BLOCK: u32 = BLOCK_SIZE_IN_BYTES / WORD_SIZE; -global NUM_KECCAK_LANES: u32 = 25; - -#[foreign(keccakf1600)] -pub fn keccakf1600(input: [u64; 25]) -> [u64; 25] {} - -#[no_predicates] -#[deprecated("keccak256 is being deprecated from the stdlib, use https://github.com/noir-lang/keccak256 instead")] -pub(crate) fn keccak256(input: [u8; N], message_size: u32) -> [u8; 32] { - assert(N >= message_size); - - // Copy input to block bytes. For that we'll need at least input bytes (N) - // but we want it to be padded to a multiple of BLOCK_SIZE_IN_BYTES. - let mut block_bytes = [0; ((N / BLOCK_SIZE_IN_BYTES) + 1) * BLOCK_SIZE_IN_BYTES]; - if is_unconstrained() { - for i in 0..message_size { - block_bytes[i] = input[i]; - } - } else { - for i in 0..N { - if i < message_size { - block_bytes[i] = input[i]; - } - } - } - - //1. format_input_lanes - let max_blocks = (N + BLOCK_SIZE_IN_BYTES) / BLOCK_SIZE_IN_BYTES; - //maximum number of bytes to hash - let real_max_blocks = (message_size + BLOCK_SIZE_IN_BYTES) / BLOCK_SIZE_IN_BYTES; - let real_blocks_bytes = real_max_blocks * BLOCK_SIZE_IN_BYTES; - - block_bytes[message_size] = 1; - block_bytes[real_blocks_bytes - 1] = 0x80; - - // populate a vector of 64-bit limbs from our byte array - let mut sliced_buffer = - [0; (((N / BLOCK_SIZE_IN_BYTES) + 1) * BLOCK_SIZE_IN_BYTES) / WORD_SIZE]; - for i in 0..sliced_buffer.len() { - let limb_start = WORD_SIZE * i; - - let mut sliced = 0; - let mut v = 1; - for k in 0..WORD_SIZE { - sliced += v * (block_bytes[limb_start + k] as Field); - v *= 256; - } - - sliced_buffer[i] = sliced as u64; - } - - //2. sponge_absorb - let mut state: [u64; NUM_KECCAK_LANES] = [0; NUM_KECCAK_LANES]; - // When in an unconstrained runtime we can take advantage of runtime loop bounds, - // thus allowing us to simplify the loop body. - if is_unconstrained() { - for i in 0..real_max_blocks { - if (i == 0) { - for j in 0..LIMBS_PER_BLOCK { - state[j] = sliced_buffer[j]; - } - } else { - for j in 0..LIMBS_PER_BLOCK { - state[j] = state[j] ^ sliced_buffer[i * LIMBS_PER_BLOCK + j]; - } - } - state = keccakf1600(state); - } - } else { - // `real_max_blocks` is guaranteed to at least be `1` - // We peel out the first block as to avoid a conditional inside of the loop. - // Otherwise, a dynamic predicate can cause a blowup in a constrained runtime. - for j in 0..LIMBS_PER_BLOCK { - state[j] = sliced_buffer[j]; - } - state = keccakf1600(state); - for i in 1..max_blocks { - if i < real_max_blocks { - for j in 0..LIMBS_PER_BLOCK { - state[j] = state[j] ^ sliced_buffer[i * LIMBS_PER_BLOCK + j]; - } - state = keccakf1600(state); - } - } - } - - //3. sponge_squeeze - let mut result = [0; 32]; - for i in 0..4 { - let lane = state[i] as Field; - let lane_le: [u8; 8] = lane.to_le_bytes(); - for j in 0..8 { - result[8 * i + j] = lane_le[j]; - } - } - result -} - -mod tests { - use super::keccak256; - - #[test] - fn smoke_test() { - let input = [0xbd]; - let result = [ - 0x5a, 0x50, 0x2f, 0x9f, 0xca, 0x46, 0x7b, 0x26, 0x6d, 0x5b, 0x78, 0x33, 0x65, 0x19, - 0x37, 0xe8, 0x05, 0x27, 0x0c, 0xa3, 0xf3, 0xaf, 0x1c, 0x0d, 0xd2, 0x46, 0x2d, 0xca, - 0x4b, 0x3b, 0x1a, 0xbf, - ]; - assert_eq(keccak256(input, input.len()), result); - } - - #[test] - fn hash_hello_world() { - let input = "Hello world!".as_bytes(); - let result = [ - 0xec, 0xd0, 0xe1, 0x8, 0xa9, 0x8e, 0x19, 0x2a, 0xf1, 0xd2, 0xc2, 0x50, 0x55, 0xf4, 0xe3, - 0xbe, 0xd7, 0x84, 0xb5, 0xc8, 0x77, 0x20, 0x4e, 0x73, 0x21, 0x9a, 0x52, 0x3, 0x25, 0x1f, - 0xea, 0xab, - ]; - assert_eq(keccak256(input, input.len()), result); - } - - #[test] - fn var_size_hash() { - let input = [ - 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, - 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, - 223, - ]; - let result = [ - 226, 37, 115, 94, 94, 196, 72, 116, 194, 105, 79, 233, 65, 12, 30, 94, 181, 131, 170, - 219, 171, 166, 236, 88, 143, 67, 255, 160, 248, 214, 39, 129, - ]; - assert_eq(keccak256(input, 13), result); - } - - #[test] - fn hash_longer_than_136_bytes() { - let input = "123456789 123456789 123456789 123456789 123456789 123456789 123456789 123456789 123456789 123456789 123456789 123456789 123456789 123456789" - .as_bytes(); - assert(input.len() > 136); - - let result = [ - 0x1d, 0xca, 0xeb, 0xdf, 0xd9, 0xd6, 0x24, 0x67, 0x1c, 0x18, 0x16, 0xda, 0xd, 0x8a, 0xeb, - 0xa8, 0x75, 0x71, 0x2c, 0xc, 0x89, 0xe0, 0x25, 0x2, 0xe8, 0xb6, 0x5e, 0x16, 0x5, 0x55, - 0xe4, 0x40, - ]; - assert_eq(keccak256(input, input.len()), result); - } -} diff --git a/noir/noir-repo/noir_stdlib/src/hash/mod.nr b/noir/noir-repo/noir_stdlib/src/hash/mod.nr index 1ded89ec80d..7a492d373cc 100644 --- a/noir/noir-repo/noir_stdlib/src/hash/mod.nr +++ b/noir/noir-repo/noir_stdlib/src/hash/mod.nr @@ -1,18 +1,28 @@ pub mod poseidon; pub mod poseidon2; -pub mod keccak; -pub mod sha256; -pub mod sha512; use crate::default::Default; use crate::embedded_curve_ops::{ EmbeddedCurvePoint, EmbeddedCurveScalar, multi_scalar_mul, multi_scalar_mul_array_return, }; use crate::meta::derive_via; -use crate::uint128::U128; -// Kept for backwards compatibility -pub use sha256::{digest, sha256, sha256_compression, sha256_var}; +#[foreign(sha256_compression)] +// docs:start:sha256_compression +pub fn sha256_compression(input: [u32; 16], state: [u32; 8]) -> [u32; 8] {} +// docs:end:sha256_compression + +#[foreign(keccakf1600)] +// docs:start:keccakf1600 +pub fn keccakf1600(input: [u64; 25]) -> [u64; 25] {} +// docs:end:keccakf1600 + +pub mod keccak { + #[deprecated("This function has been moved to std::hash::keccakf1600")] + pub fn keccakf1600(input: [u64; 25]) -> [u64; 25] { + super::keccakf1600(input) + } +} #[foreign(blake2s)] // docs:start:blake2s @@ -114,13 +124,6 @@ pub fn hash_to_field(inputs: [Field]) -> Field { sum } -// docs:start:keccak256 -pub fn keccak256(input: [u8; N], message_size: u32) -> [u8; 32] -// docs:end:keccak256 -{ - crate::hash::keccak::keccak256(input, message_size) -} - #[foreign(poseidon2_permutation)] pub fn poseidon2_permutation(_input: [Field; N], _state_length: u32) -> [Field; N] {} @@ -302,16 +305,6 @@ impl Hash for () { {} } -impl Hash for U128 { - fn hash(self, state: &mut H) - where - H: Hasher, - { - H::write(state, self.lo as Field); - H::write(state, self.hi as Field); - } -} - impl Hash for [T; N] where T: Hash, diff --git a/noir/noir-repo/noir_stdlib/src/hash/sha256.nr b/noir/noir-repo/noir_stdlib/src/hash/sha256.nr deleted file mode 100644 index a8bd71a2111..00000000000 --- a/noir/noir-repo/noir_stdlib/src/hash/sha256.nr +++ /dev/null @@ -1,845 +0,0 @@ -use crate::runtime::is_unconstrained; - -// Implementation of SHA-256 mapping a byte array of variable length to -// 32 bytes. - -// A message block is up to 64 bytes taken from the input. -global BLOCK_SIZE: u32 = 64; - -// The first index in the block where the 8 byte message size will be written. -global MSG_SIZE_PTR: u32 = 56; - -// Size of the message block when packed as 4-byte integer array. -global INT_BLOCK_SIZE: u32 = 16; - -// A `u32` integer consists of 4 bytes. -global INT_SIZE: u32 = 4; - -// Index of the integer in the `INT_BLOCK` where the length is written. -global INT_SIZE_PTR: u32 = MSG_SIZE_PTR / INT_SIZE; - -// Magic numbers for bit shifting. -// Works with actual bit shifting as well as the compiler turns them into * and / -// but circuit execution appears to be 10% faster this way. -global TWO_POW_8: u32 = 256; -global TWO_POW_16: u32 = TWO_POW_8 * 256; -global TWO_POW_24: u32 = TWO_POW_16 * 256; -global TWO_POW_32: u64 = TWO_POW_24 as u64 * 256; - -// Index of a byte in a 64 byte block; ie. 0..=63 -type BLOCK_BYTE_PTR = u32; - -// The foreign function to compress blocks works on 16 pieces of 4-byte integers, instead of 64 bytes. -type INT_BLOCK = [u32; INT_BLOCK_SIZE]; - -// A message block is a slice of the original message of a fixed size, -// potentially padded with zeros, with neighbouring 4 bytes packed into integers. -type MSG_BLOCK = INT_BLOCK; - -// The hash is 32 bytes. -type HASH = [u8; 32]; - -// The state accumulates the blocks. -// Its overall size is the same as the `HASH`. -type STATE = [u32; 8]; - -// docs:start:sha256 -#[deprecated("sha256 is being deprecated from the stdlib, use https://github.com/noir-lang/sha256 instead")] -pub fn sha256(input: [u8; N]) -> HASH -// docs:end:sha256 -{ - digest(input) -} - -#[foreign(sha256_compression)] -pub fn sha256_compression(_input: INT_BLOCK, _state: STATE) -> STATE {} - -// SHA-256 hash function -#[no_predicates] -#[deprecated("sha256 is being deprecated from the stdlib, use https://github.com/noir-lang/sha256 instead")] -pub fn digest(msg: [u8; N]) -> HASH { - sha256_var(msg, N as u64) -} - -// Variable size SHA-256 hash -#[deprecated("sha256 is being deprecated from the stdlib, use https://github.com/noir-lang/sha256 instead")] -pub fn sha256_var(msg: [u8; N], message_size: u64) -> HASH { - let message_size = message_size as u32; - let num_blocks = N / BLOCK_SIZE; - let mut msg_block: MSG_BLOCK = [0; INT_BLOCK_SIZE]; - // Intermediate hash, starting with the canonical initial value - let mut h: STATE = [ - 1779033703, 3144134277, 1013904242, 2773480762, 1359893119, 2600822924, 528734635, - 1541459225, - ]; - // Pointer into msg_block on a 64 byte scale - let mut msg_byte_ptr = 0; - for i in 0..num_blocks { - let msg_start = BLOCK_SIZE * i; - // Safety: the msg_block is checked below in verify_msg_block - let (new_msg_block, new_msg_byte_ptr) = - unsafe { build_msg_block(msg, message_size, msg_start) }; - - if msg_start < message_size { - msg_block = new_msg_block; - } - - if !is_unconstrained() { - // Verify the block we are compressing was appropriately constructed - let new_msg_byte_ptr = verify_msg_block(msg, message_size, msg_block, msg_start); - if msg_start < message_size { - msg_byte_ptr = new_msg_byte_ptr; - } - } else if msg_start < message_size { - msg_byte_ptr = new_msg_byte_ptr; - } - - // If the block is filled, compress it. - // An un-filled block is handled after this loop. - if (msg_start < message_size) & (msg_byte_ptr == BLOCK_SIZE) { - h = sha256_compression(msg_block, h); - } - } - - let modulo = N % BLOCK_SIZE; - // Handle setup of the final msg block. - // This case is only hit if the msg is less than the block size, - // or our message cannot be evenly split into blocks. - if modulo != 0 { - let msg_start = BLOCK_SIZE * num_blocks; - // Safety: the msg_block is checked below in verify_msg_block - let (new_msg_block, new_msg_byte_ptr) = - unsafe { build_msg_block(msg, message_size, msg_start) }; - - if msg_start < message_size { - msg_block = new_msg_block; - } - - if !is_unconstrained() { - let new_msg_byte_ptr = verify_msg_block(msg, message_size, msg_block, msg_start); - if msg_start < message_size { - msg_byte_ptr = new_msg_byte_ptr; - verify_msg_block_padding(msg_block, msg_byte_ptr); - } - } else if msg_start < message_size { - msg_byte_ptr = new_msg_byte_ptr; - } - } - - // If we had modulo == 0 then it means the last block was full, - // and we can reset the pointer to zero to overwrite it. - if msg_byte_ptr == BLOCK_SIZE { - msg_byte_ptr = 0; - } - - // Pad the rest such that we have a [u32; 2] block at the end representing the length - // of the message, and a block of 1 0 ... 0 following the message (i.e. [1 << 7, 0, ..., 0]). - // Here we rely on the fact that everything beyond the available input is set to 0. - msg_block = update_block_item( - msg_block, - msg_byte_ptr, - |msg_item| set_item_byte_then_zeros(msg_item, msg_byte_ptr, 1 << 7), - ); - msg_byte_ptr = msg_byte_ptr + 1; - let last_block = msg_block; - - // If we don't have room to write the size, compress the block and reset it. - if msg_byte_ptr > MSG_SIZE_PTR { - h = sha256_compression(msg_block, h); - // `attach_len_to_msg_block` will zero out everything after the `msg_byte_ptr`. - msg_byte_ptr = 0; - } - - // Safety: the msg_len is checked below in verify_msg_len - msg_block = unsafe { attach_len_to_msg_block(msg_block, msg_byte_ptr, message_size) }; - - if !is_unconstrained() { - verify_msg_len(msg_block, last_block, msg_byte_ptr, message_size); - } - - hash_final_block(msg_block, h) -} - -// Take `BLOCK_SIZE` number of bytes from `msg` starting at `msg_start`. -// Returns the block and the length that has been copied rather than padded with zeros. -unconstrained fn build_msg_block( - msg: [u8; N], - message_size: u32, - msg_start: u32, -) -> (MSG_BLOCK, BLOCK_BYTE_PTR) { - let mut msg_block: MSG_BLOCK = [0; INT_BLOCK_SIZE]; - - // We insert `BLOCK_SIZE` bytes (or up to the end of the message) - let block_input = if msg_start + BLOCK_SIZE > message_size { - if message_size < msg_start { - // This function is sometimes called with `msg_start` past the end of the message. - // In this case we return an empty block and zero pointer to signal that the result should be ignored. - 0 - } else { - message_size - msg_start - } - } else { - BLOCK_SIZE - }; - - // Figure out the number of items in the int array that we have to pack. - // e.g. if the input is [0,1,2,3,4,5] then we need to pack it as 2 items: [0123, 4500] - let mut int_input = block_input / INT_SIZE; - if block_input % INT_SIZE != 0 { - int_input = int_input + 1; - }; - - for i in 0..int_input { - let mut msg_item: u32 = 0; - // Always construct the integer as 4 bytes, even if it means going beyond the input. - for j in 0..INT_SIZE { - let k = i * INT_SIZE + j; - let msg_byte = if k < block_input { - msg[msg_start + k] - } else { - 0 - }; - msg_item = lshift8(msg_item, 1) + msg_byte as u32; - } - msg_block[i] = msg_item; - } - - // Returning the index as if it was a 64 byte array. - // We have to project it down to 16 items and bit shifting to get a byte back if we need it. - (msg_block, block_input) -} - -// Verify the block we are compressing was appropriately constructed by `build_msg_block` -// and matches the input data. Returns the index of the first unset item. -// If `message_size` is less than `msg_start` then this is called with the old non-empty block; -// in that case we can skip verification, ie. no need to check that everything is zero. -fn verify_msg_block( - msg: [u8; N], - message_size: u32, - msg_block: MSG_BLOCK, - msg_start: u32, -) -> BLOCK_BYTE_PTR { - let mut msg_byte_ptr = 0; - let mut msg_end = msg_start + BLOCK_SIZE; - if msg_end > N { - msg_end = N; - } - // We might have to go beyond the input to pad the fields. - if msg_end % INT_SIZE != 0 { - msg_end = msg_end + INT_SIZE - msg_end % INT_SIZE; - } - - // Reconstructed packed item. - let mut msg_item: u32 = 0; - - // Inclusive at the end so that we can compare the last item. - let mut i: u32 = 0; - for k in msg_start..=msg_end { - if k % INT_SIZE == 0 { - // If we consumed some input we can compare against the block. - if (msg_start < message_size) & (k > msg_start) { - assert_eq(msg_block[i], msg_item as u32); - i = i + 1; - msg_item = 0; - } - } - // Shift the accumulator - msg_item = lshift8(msg_item, 1); - // If we have input to consume, add it at the rightmost position. - if k < message_size & k < msg_end { - msg_item = msg_item + msg[k] as u32; - msg_byte_ptr = msg_byte_ptr + 1; - } - } - - msg_byte_ptr -} - -// Verify the block we are compressing was appropriately padded with zeros by `build_msg_block`. -// This is only relevant for the last, potentially partially filled block. -fn verify_msg_block_padding(msg_block: MSG_BLOCK, msg_byte_ptr: BLOCK_BYTE_PTR) { - // Check all the way to the end of the block. - verify_msg_block_zeros(msg_block, msg_byte_ptr, INT_BLOCK_SIZE); -} - -// Verify that a region of ints in the message block are (partially) zeroed, -// up to an (exclusive) maximum which can either be the end of the block -// or just where the size is to be written. -fn verify_msg_block_zeros( - msg_block: MSG_BLOCK, - mut msg_byte_ptr: BLOCK_BYTE_PTR, - max_int_byte_ptr: u32, -) { - // This variable is used to get around the compiler under-constrained check giving a warning. - // We want to check against a constant zero, but if it does not come from the circuit inputs - // or return values the compiler check will issue a warning. - let zero = msg_block[0] - msg_block[0]; - - // First integer which is supposed to be (partially) zero. - let mut int_byte_ptr = msg_byte_ptr / INT_SIZE; - - // Check partial zeros. - let modulo = msg_byte_ptr % INT_SIZE; - if modulo != 0 { - let zeros = INT_SIZE - modulo; - let mask = if zeros == 3 { - TWO_POW_24 - } else if zeros == 2 { - TWO_POW_16 - } else { - TWO_POW_8 - }; - assert_eq(msg_block[int_byte_ptr] % mask, zero); - int_byte_ptr = int_byte_ptr + 1; - } - - // Check the rest of the items. - for i in 0..max_int_byte_ptr { - if i >= int_byte_ptr { - assert_eq(msg_block[i], zero); - } - } -} - -// Verify that up to the byte pointer the two blocks are equal. -// At the byte pointer the new block can be partially zeroed. -fn verify_msg_block_equals_last( - msg_block: MSG_BLOCK, - last_block: MSG_BLOCK, - mut msg_byte_ptr: BLOCK_BYTE_PTR, -) { - // msg_byte_ptr is the position at which they are no longer have to be the same. - // First integer which is supposed to be (partially) zero contains that pointer. - let mut int_byte_ptr = msg_byte_ptr / INT_SIZE; - - // Check partial zeros. - let modulo = msg_byte_ptr % INT_SIZE; - if modulo != 0 { - // Reconstruct the partially zero item from the last block. - let last_field = last_block[int_byte_ptr]; - let mut msg_item: u32 = 0; - // Reset to where they are still equal. - msg_byte_ptr = msg_byte_ptr - modulo; - for i in 0..INT_SIZE { - msg_item = lshift8(msg_item, 1); - if i < modulo { - msg_item = msg_item + get_item_byte(last_field, msg_byte_ptr) as u32; - msg_byte_ptr = msg_byte_ptr + 1; - } - } - assert_eq(msg_block[int_byte_ptr], msg_item); - } - - for i in 0..INT_SIZE_PTR { - if i < int_byte_ptr { - assert_eq(msg_block[i], last_block[i]); - } - } -} - -// Apply a function on the block item which the pointer indicates. -fn update_block_item( - mut msg_block: MSG_BLOCK, - msg_byte_ptr: BLOCK_BYTE_PTR, - f: fn[Env](u32) -> u32, -) -> MSG_BLOCK { - let i = msg_byte_ptr / INT_SIZE; - msg_block[i] = f(msg_block[i]); - msg_block -} - -// Set the rightmost `zeros` number of bytes to 0. -fn set_item_zeros(item: u32, zeros: u8) -> u32 { - lshift8(rshift8(item, zeros), zeros) -} - -// Replace one byte in the item with a value, and set everything after it to zero. -fn set_item_byte_then_zeros(msg_item: u32, msg_byte_ptr: BLOCK_BYTE_PTR, msg_byte: u8) -> u32 { - let zeros = INT_SIZE - msg_byte_ptr % INT_SIZE; - let zeroed_item = set_item_zeros(msg_item, zeros as u8); - let new_item = byte_into_item(msg_byte, msg_byte_ptr); - zeroed_item + new_item -} - -// Get a byte of a message item according to its overall position in the `BLOCK_SIZE` space. -fn get_item_byte(mut msg_item: u32, msg_byte_ptr: BLOCK_BYTE_PTR) -> u8 { - // How many times do we have to shift to the right to get to the position we want? - let max_shifts = INT_SIZE - 1; - let shifts = max_shifts - msg_byte_ptr % INT_SIZE; - msg_item = rshift8(msg_item, shifts as u8); - // At this point the byte we want is in the rightmost position. - msg_item as u8 -} - -// Project a byte into a position in a field based on the overall block pointer. -// For example putting 1 into pointer 5 would be 100, because overall we would -// have [____, 0100] with indexes [0123,4567]. -fn byte_into_item(msg_byte: u8, msg_byte_ptr: BLOCK_BYTE_PTR) -> u32 { - let mut msg_item = msg_byte as u32; - // How many times do we have to shift to the left to get to the position we want? - let max_shifts = INT_SIZE - 1; - let shifts = max_shifts - msg_byte_ptr % INT_SIZE; - lshift8(msg_item, shifts as u8) -} - -// Construct a field out of 4 bytes. -fn make_item(b0: u8, b1: u8, b2: u8, b3: u8) -> u32 { - let mut item = b0 as u32; - item = lshift8(item, 1) + b1 as u32; - item = lshift8(item, 1) + b2 as u32; - item = lshift8(item, 1) + b3 as u32; - item -} - -// Shift by 8 bits to the left between 0 and 4 times. -// Checks `is_unconstrained()` to just use a bitshift if we're running in an unconstrained context, -// otherwise multiplies by 256. -fn lshift8(item: u32, shifts: u8) -> u32 { - if is_unconstrained() { - if item == 0 { - 0 - } else { - // Brillig wouldn't shift 0<<4 without overflow. - item << (8 * shifts) - } - } else { - // We can do a for loop up to INT_SIZE or an if-else. - if shifts == 0 { - item - } else if shifts == 1 { - item * TWO_POW_8 - } else if shifts == 2 { - item * TWO_POW_16 - } else if shifts == 3 { - item * TWO_POW_24 - } else { - // Doesn't make sense, but it's most likely called on 0 anyway. - 0 - } - } -} - -// Shift by 8 bits to the right between 0 and 4 times. -// Checks `is_unconstrained()` to just use a bitshift if we're running in an unconstrained context, -// otherwise divides by 256. -fn rshift8(item: u32, shifts: u8) -> u32 { - if is_unconstrained() { - item >> (8 * shifts) - } else { - // Division wouldn't work on `Field`. - if shifts == 0 { - item - } else if shifts == 1 { - item / TWO_POW_8 - } else if shifts == 2 { - item / TWO_POW_16 - } else if shifts == 3 { - item / TWO_POW_24 - } else { - 0 - } - } -} - -// Zero out all bytes between the end of the message and where the length is appended, -// then write the length into the last 8 bytes of the block. -unconstrained fn attach_len_to_msg_block( - mut msg_block: MSG_BLOCK, - mut msg_byte_ptr: BLOCK_BYTE_PTR, - message_size: u32, -) -> MSG_BLOCK { - // We assume that `msg_byte_ptr` is less than 57 because if not then it is reset to zero before calling this function. - // In any case, fill blocks up with zeros until the last 64 bits (i.e. until msg_byte_ptr = 56). - // There can be one item which has to be partially zeroed. - let modulo = msg_byte_ptr % INT_SIZE; - if modulo != 0 { - // Index of the block in which we find the item we need to partially zero. - let i = msg_byte_ptr / INT_SIZE; - let zeros = INT_SIZE - modulo; - msg_block[i] = set_item_zeros(msg_block[i], zeros as u8); - msg_byte_ptr = msg_byte_ptr + zeros; - } - - // The rest can be zeroed without bit shifting anything. - for i in (msg_byte_ptr / INT_SIZE)..INT_SIZE_PTR { - msg_block[i] = 0; - } - - // Set the last two 4 byte ints as the first/second half of the 8 bytes of the length. - let len = 8 * message_size; - let len_bytes: [u8; 8] = (len as Field).to_be_bytes(); - for i in 0..=1 { - let shift = i * 4; - msg_block[INT_SIZE_PTR + i] = make_item( - len_bytes[shift], - len_bytes[shift + 1], - len_bytes[shift + 2], - len_bytes[shift + 3], - ); - } - msg_block -} - -// Verify that the message length was correctly written by `attach_len_to_msg_block`, -// and that everything between the byte pointer and the size pointer was zeroed, -// and that everything before the byte pointer was untouched. -fn verify_msg_len( - msg_block: MSG_BLOCK, - last_block: MSG_BLOCK, - msg_byte_ptr: BLOCK_BYTE_PTR, - message_size: u32, -) { - // Check zeros up to the size pointer. - verify_msg_block_zeros(msg_block, msg_byte_ptr, INT_SIZE_PTR); - - // Check that up to the pointer we match the last block. - verify_msg_block_equals_last(msg_block, last_block, msg_byte_ptr); - - // We verify the message length was inserted correctly by reversing the byte decomposition. - let mut reconstructed_len: u64 = 0; - for i in INT_SIZE_PTR..INT_BLOCK_SIZE { - reconstructed_len = reconstructed_len * TWO_POW_32; - reconstructed_len = reconstructed_len + msg_block[i] as u64; - } - let len = 8 * message_size as u64; - assert_eq(reconstructed_len, len); -} - -// Perform the final compression, then transform the `STATE` into `HASH`. -fn hash_final_block(msg_block: MSG_BLOCK, mut state: STATE) -> HASH { - let mut out_h: HASH = [0; 32]; // Digest as sequence of bytes - // Hash final padded block - state = sha256_compression(msg_block, state); - - // Return final hash as byte array - for j in 0..8 { - let h_bytes: [u8; 4] = (state[j] as Field).to_be_bytes(); - for k in 0..4 { - out_h[4 * j + k] = h_bytes[k]; - } - } - - out_h -} - -mod tests { - use super::{ - attach_len_to_msg_block, build_msg_block, byte_into_item, get_item_byte, make_item, - set_item_byte_then_zeros, set_item_zeros, - }; - use super::INT_BLOCK; - use super::sha256_var; - - #[test] - fn smoke_test() { - let input = [0xbd]; - let result = [ - 0x68, 0x32, 0x57, 0x20, 0xaa, 0xbd, 0x7c, 0x82, 0xf3, 0x0f, 0x55, 0x4b, 0x31, 0x3d, - 0x05, 0x70, 0xc9, 0x5a, 0xcc, 0xbb, 0x7d, 0xc4, 0xb5, 0xaa, 0xe1, 0x12, 0x04, 0xc0, - 0x8f, 0xfe, 0x73, 0x2b, - ]; - assert_eq(sha256_var(input, input.len() as u64), result); - } - - #[test] - fn msg_just_over_block() { - let input = [ - 102, 114, 111, 109, 58, 114, 117, 110, 110, 105, 101, 114, 46, 108, 101, 97, 103, 117, - 101, 115, 46, 48, 106, 64, 105, 99, 108, 111, 117, 100, 46, 99, 111, 109, 13, 10, 99, - 111, 110, 116, 101, 110, 116, 45, 116, 121, 112, 101, 58, 116, 101, 120, 116, 47, 112, - 108, 97, 105, 110, 59, 32, 99, 104, 97, 114, 115, 101, 116, - ]; - let result = [ - 91, 122, 146, 93, 52, 109, 133, 148, 171, 61, 156, 70, 189, 238, 153, 7, 222, 184, 94, - 24, 65, 114, 192, 244, 207, 199, 87, 232, 192, 224, 171, 207, - ]; - assert_eq(sha256_var(input, input.len() as u64), result); - } - - #[test] - fn msg_multiple_over_block() { - let input = [ - 102, 114, 111, 109, 58, 114, 117, 110, 110, 105, 101, 114, 46, 108, 101, 97, 103, 117, - 101, 115, 46, 48, 106, 64, 105, 99, 108, 111, 117, 100, 46, 99, 111, 109, 13, 10, 99, - 111, 110, 116, 101, 110, 116, 45, 116, 121, 112, 101, 58, 116, 101, 120, 116, 47, 112, - 108, 97, 105, 110, 59, 32, 99, 104, 97, 114, 115, 101, 116, 61, 117, 115, 45, 97, 115, - 99, 105, 105, 13, 10, 109, 105, 109, 101, 45, 118, 101, 114, 115, 105, 111, 110, 58, 49, - 46, 48, 32, 40, 77, 97, 99, 32, 79, 83, 32, 88, 32, 77, 97, 105, 108, 32, 49, 54, 46, - 48, 32, 92, 40, 51, 55, 51, 49, 46, 53, 48, 48, 46, 50, 51, 49, 92, 41, 41, 13, 10, 115, - 117, 98, 106, 101, 99, 116, 58, 72, 101, 108, 108, 111, 13, 10, 109, 101, 115, 115, 97, - 103, 101, 45, 105, 100, 58, 60, 56, 70, 56, 49, 57, 68, 51, 50, 45, 66, 54, 65, 67, 45, - 52, 56, 57, 68, 45, 57, 55, 55, 70, 45, 52, 51, 56, 66, 66, 67, 52, 67, 65, 66, 50, 55, - 64, 109, 101, 46, 99, 111, 109, 62, 13, 10, 100, 97, 116, 101, 58, 83, 97, 116, 44, 32, - 50, 54, 32, 65, 117, 103, 32, 50, 48, 50, 51, 32, 49, 50, 58, 50, 53, 58, 50, 50, 32, - 43, 48, 52, 48, 48, 13, 10, 116, 111, 58, 122, 107, 101, 119, 116, 101, 115, 116, 64, - 103, 109, 97, 105, 108, 46, 99, 111, 109, 13, 10, 100, 107, 105, 109, 45, 115, 105, 103, - 110, 97, 116, 117, 114, 101, 58, 118, 61, 49, 59, 32, 97, 61, 114, 115, 97, 45, 115, - 104, 97, 50, 53, 54, 59, 32, 99, 61, 114, 101, 108, 97, 120, 101, 100, 47, 114, 101, - 108, 97, 120, 101, 100, 59, 32, 100, 61, 105, 99, 108, 111, 117, 100, 46, 99, 111, 109, - 59, 32, 115, 61, 49, 97, 49, 104, 97, 105, 59, 32, 116, 61, 49, 54, 57, 51, 48, 51, 56, - 51, 51, 55, 59, 32, 98, 104, 61, 55, 120, 81, 77, 68, 117, 111, 86, 86, 85, 52, 109, 48, - 87, 48, 87, 82, 86, 83, 114, 86, 88, 77, 101, 71, 83, 73, 65, 83, 115, 110, 117, 99, 75, - 57, 100, 74, 115, 114, 99, 43, 118, 85, 61, 59, 32, 104, 61, 102, 114, 111, 109, 58, 67, - 111, 110, 116, 101, 110, 116, 45, 84, 121, 112, 101, 58, 77, 105, 109, 101, 45, 86, 101, - 114, 115, 105, 111, 110, 58, 83, 117, 98, 106, 101, 99, - ]; - let result = [ - 116, 90, 151, 31, 78, 22, 138, 180, 211, 189, 69, 76, 227, 200, 155, 29, 59, 123, 154, - 60, 47, 153, 203, 129, 157, 251, 48, 2, 79, 11, 65, 47, - ]; - assert_eq(sha256_var(input, input.len() as u64), result); - } - - #[test] - fn msg_just_under_block() { - let input = [ - 102, 114, 111, 109, 58, 114, 117, 110, 110, 105, 101, 114, 46, 108, 101, 97, 103, 117, - 101, 115, 46, 48, 106, 64, 105, 99, 108, 111, 117, 100, 46, 99, 111, 109, 13, 10, 99, - 111, 110, 116, 101, 110, 116, 45, 116, 121, 112, 101, 58, 116, 101, 120, 116, 47, 112, - 108, 97, 105, 110, 59, - ]; - let result = [ - 143, 140, 76, 173, 222, 123, 102, 68, 70, 149, 207, 43, 39, 61, 34, 79, 216, 252, 213, - 165, 74, 16, 110, 74, 29, 64, 138, 167, 30, 1, 9, 119, - ]; - assert_eq(sha256_var(input, input.len() as u64), result); - } - - #[test] - fn msg_big_not_block_multiple() { - let input = [ - 102, 114, 111, 109, 58, 114, 117, 110, 110, 105, 101, 114, 46, 108, 101, 97, 103, 117, - 101, 115, 46, 48, 106, 64, 105, 99, 108, 111, 117, 100, 46, 99, 111, 109, 13, 10, 99, - 111, 110, 116, 101, 110, 116, 45, 116, 121, 112, 101, 58, 116, 101, 120, 116, 47, 112, - 108, 97, 105, 110, 59, 32, 99, 104, 97, 114, 115, 101, 116, 61, 117, 115, 45, 97, 115, - 99, 105, 105, 13, 10, 109, 105, 109, 101, 45, 118, 101, 114, 115, 105, 111, 110, 58, 49, - 46, 48, 32, 40, 77, 97, 99, 32, 79, 83, 32, 88, 32, 77, 97, 105, 108, 32, 49, 54, 46, - 48, 32, 92, 40, 51, 55, 51, 49, 46, 53, 48, 48, 46, 50, 51, 49, 92, 41, 41, 13, 10, 115, - 117, 98, 106, 101, 99, 116, 58, 72, 101, 108, 108, 111, 13, 10, 109, 101, 115, 115, 97, - 103, 101, 45, 105, 100, 58, 60, 56, 70, 56, 49, 57, 68, 51, 50, 45, 66, 54, 65, 67, 45, - 52, 56, 57, 68, 45, 57, 55, 55, 70, 45, 52, 51, 56, 66, 66, 67, 52, 67, 65, 66, 50, 55, - 64, 109, 101, 46, 99, 111, 109, 62, 13, 10, 100, 97, 116, 101, 58, 83, 97, 116, 44, 32, - 50, 54, 32, 65, 117, 103, 32, 50, 48, 50, 51, 32, 49, 50, 58, 50, 53, 58, 50, 50, 32, - 43, 48, 52, 48, 48, 13, 10, 116, 111, 58, 122, 107, 101, 119, 116, 101, 115, 116, 64, - 103, 109, 97, 105, 108, 46, 99, 111, 109, 13, 10, 100, 107, 105, 109, 45, 115, 105, 103, - 110, 97, 116, 117, 114, 101, 58, 118, 61, 49, 59, 32, 97, 61, 114, 115, 97, 45, 115, - 104, 97, 50, 53, 54, 59, 32, 99, 61, 114, 101, 108, 97, 120, 101, 100, 47, 114, 101, - 108, 97, 120, 101, 100, 59, 32, 100, 61, 105, 99, 108, 111, 117, 100, 46, 99, 111, 109, - 59, 32, 115, 61, 49, 97, 49, 104, 97, 105, 59, 32, 116, 61, 49, 54, 57, 51, 48, 51, 56, - 51, 51, 55, 59, 32, 98, 104, 61, 55, 120, 81, 77, 68, 117, 111, 86, 86, 85, 52, 109, 48, - 87, 48, 87, 82, 86, 83, 114, 86, 88, 77, 101, 71, 83, 73, 65, 83, 115, 110, 117, 99, 75, - 57, 100, 74, 115, 114, 99, 43, 118, 85, 61, 59, 32, 104, 61, 102, 114, 111, 109, 58, 67, - 111, 110, 116, 101, 110, 116, 45, 84, 121, 112, 101, 58, 77, 105, 109, 101, 45, 86, 101, - 114, 115, 105, 111, 110, 58, 83, 117, 98, 106, 101, 99, 116, 58, 77, 101, 115, 115, 97, - 103, 101, 45, 73, 100, 58, 68, 97, 116, 101, 58, 116, 111, 59, 32, 98, 61, - ]; - let result = [ - 112, 144, 73, 182, 208, 98, 9, 238, 54, 229, 61, 145, 222, 17, 72, 62, 148, 222, 186, - 55, 192, 82, 220, 35, 66, 47, 193, 200, 22, 38, 26, 186, - ]; - assert_eq(sha256_var(input, input.len() as u64), result); - } - - #[test] - fn msg_big_with_padding() { - let input = [ - 48, 130, 1, 37, 2, 1, 0, 48, 11, 6, 9, 96, 134, 72, 1, 101, 3, 4, 2, 1, 48, 130, 1, 17, - 48, 37, 2, 1, 1, 4, 32, 176, 223, 31, 133, 108, 84, 158, 102, 70, 11, 165, 175, 196, 12, - 201, 130, 25, 131, 46, 125, 156, 194, 28, 23, 55, 133, 157, 164, 135, 136, 220, 78, 48, - 37, 2, 1, 2, 4, 32, 190, 82, 180, 235, 222, 33, 79, 50, 152, 136, 142, 35, 116, 224, 6, - 242, 156, 141, 128, 248, 10, 61, 98, 86, 248, 45, 207, 210, 90, 232, 175, 38, 48, 37, 2, - 1, 3, 4, 32, 0, 194, 104, 108, 237, 246, 97, 230, 116, 198, 69, 110, 26, 87, 17, 89, - 110, 199, 108, 250, 36, 21, 39, 87, 110, 102, 250, 213, 174, 131, 171, 174, 48, 37, 2, - 1, 11, 4, 32, 136, 155, 87, 144, 111, 15, 152, 127, 85, 25, 154, 81, 20, 58, 51, 75, - 193, 116, 234, 0, 60, 30, 29, 30, 183, 141, 72, 247, 255, 203, 100, 124, 48, 37, 2, 1, - 12, 4, 32, 41, 234, 106, 78, 31, 11, 114, 137, 237, 17, 92, 71, 134, 47, 62, 78, 189, - 233, 201, 214, 53, 4, 47, 189, 201, 133, 6, 121, 34, 131, 64, 142, 48, 37, 2, 1, 13, 4, - 32, 91, 222, 210, 193, 62, 222, 104, 82, 36, 41, 138, 253, 70, 15, 148, 208, 156, 45, - 105, 171, 241, 195, 185, 43, 217, 162, 146, 201, 222, 89, 238, 38, 48, 37, 2, 1, 14, 4, - 32, 76, 123, 216, 13, 51, 227, 72, 245, 59, 193, 238, 166, 103, 49, 23, 164, 171, 188, - 194, 197, 156, 187, 249, 28, 198, 95, 69, 15, 182, 56, 54, 38, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - ]; - let result = [ - 32, 85, 108, 174, 127, 112, 178, 182, 8, 43, 134, 123, 192, 211, 131, 66, 184, 240, 212, - 181, 240, 180, 106, 195, 24, 117, 54, 129, 19, 10, 250, 53, - ]; - let message_size = 297; - assert_eq(sha256_var(input, message_size), result); - } - - #[test] - fn msg_big_no_padding() { - let input = [ - 48, 130, 1, 37, 2, 1, 0, 48, 11, 6, 9, 96, 134, 72, 1, 101, 3, 4, 2, 1, 48, 130, 1, 17, - 48, 37, 2, 1, 1, 4, 32, 176, 223, 31, 133, 108, 84, 158, 102, 70, 11, 165, 175, 196, 12, - 201, 130, 25, 131, 46, 125, 156, 194, 28, 23, 55, 133, 157, 164, 135, 136, 220, 78, 48, - 37, 2, 1, 2, 4, 32, 190, 82, 180, 235, 222, 33, 79, 50, 152, 136, 142, 35, 116, 224, 6, - 242, 156, 141, 128, 248, 10, 61, 98, 86, 248, 45, 207, 210, 90, 232, 175, 38, 48, 37, 2, - 1, 3, 4, 32, 0, 194, 104, 108, 237, 246, 97, 230, 116, 198, 69, 110, 26, 87, 17, 89, - 110, 199, 108, 250, 36, 21, 39, 87, 110, 102, 250, 213, 174, 131, 171, 174, 48, 37, 2, - 1, 11, 4, 32, 136, 155, 87, 144, 111, 15, 152, 127, 85, 25, 154, 81, 20, 58, 51, 75, - 193, 116, 234, 0, 60, 30, 29, 30, 183, 141, 72, 247, 255, 203, 100, 124, 48, 37, 2, 1, - 12, 4, 32, 41, 234, 106, 78, 31, 11, 114, 137, 237, 17, 92, 71, 134, 47, 62, 78, 189, - 233, 201, 214, 53, 4, 47, 189, 201, 133, 6, 121, 34, 131, 64, 142, 48, 37, 2, 1, 13, 4, - 32, 91, 222, 210, 193, 62, 222, 104, 82, 36, 41, 138, 253, 70, 15, 148, 208, 156, 45, - 105, 171, 241, 195, 185, 43, 217, 162, 146, 201, 222, 89, 238, 38, 48, 37, 2, 1, 14, 4, - 32, 76, 123, 216, 13, 51, 227, 72, 245, 59, 193, 238, 166, 103, 49, 23, 164, 171, 188, - 194, 197, 156, 187, 249, 28, 198, 95, 69, 15, 182, 56, 54, 38, - ]; - let result = [ - 32, 85, 108, 174, 127, 112, 178, 182, 8, 43, 134, 123, 192, 211, 131, 66, 184, 240, 212, - 181, 240, 180, 106, 195, 24, 117, 54, 129, 19, 10, 250, 53, - ]; - assert_eq(sha256_var(input, input.len() as u64), result); - } - - #[test] - fn same_msg_len_variable_padding() { - let input = [ - 29, 81, 165, 84, 243, 114, 101, 37, 242, 146, 127, 99, 69, 145, 39, 72, 213, 39, 253, - 179, 218, 37, 217, 201, 172, 93, 198, 50, 249, 70, 15, 30, 162, 112, 187, 40, 140, 9, - 236, 53, 32, 44, 38, 163, 113, 254, 192, 197, 44, 89, 71, 130, 169, 242, 17, 211, 214, - 72, 19, 178, 186, 168, 147, 127, 99, 101, 252, 227, 8, 147, 150, 85, 97, 158, 17, 107, - 218, 244, 82, 113, 247, 91, 208, 214, 60, 244, 87, 137, 173, 201, 130, 18, 66, 56, 198, - 149, 207, 189, 175, 120, 123, 224, 177, 167, 251, 159, 143, 110, 68, 183, 189, 70, 126, - 32, 35, 164, 44, 30, 44, 12, 65, 18, 62, 239, 242, 2, 248, 104, 2, 178, 64, 28, 126, 36, - 137, 24, 14, 116, 91, 98, 90, 159, 218, 102, 45, 11, 110, 223, 245, 184, 52, 99, 59, - 245, 136, 175, 3, 72, 164, 146, 145, 116, 22, 66, 24, 49, 193, 121, 3, 60, 37, 41, 97, - 3, 190, 66, 195, 225, 63, 46, 3, 118, 4, 208, 15, 1, 40, 254, 235, 151, 123, 70, 180, - 170, 44, 172, 90, 4, 254, 53, 239, 116, 246, 67, 56, 129, 61, 22, 169, 213, 65, 27, 216, - 116, 162, 239, 214, 207, 126, 177, 20, 100, 25, 48, 143, 84, 215, 70, 197, 53, 65, 70, - 86, 172, 61, 62, 9, 212, 167, 169, 133, 41, 126, 213, 196, 33, 192, 238, 0, 63, 246, - 215, 58, 128, 110, 101, 92, 3, 170, 214, 130, 149, 52, 81, 125, 118, 233, 3, 118, 193, - 104, 207, 120, 115, 77, 253, 191, 122, 0, 107, 164, 207, 113, 81, 169, 36, 201, 228, 74, - 134, 131, 218, 178, 35, 30, 216, 101, 2, 103, 174, 87, 95, 50, 50, 215, 157, 5, 210, - 188, 54, 211, 78, 45, 199, 96, 121, 241, 241, 176, 226, 194, 134, 130, 89, 217, 210, - 186, 32, 140, 39, 91, 103, 212, 26, 87, 32, 72, 144, 228, 230, 117, 99, 188, 50, 15, 69, - 79, 179, 50, 12, 106, 86, 218, 101, 73, 142, 243, 29, 250, 122, 228, 233, 29, 255, 22, - 121, 114, 125, 103, 41, 250, 241, 179, 126, 158, 198, 116, 209, 65, 94, 98, 228, 175, - 169, 96, 3, 9, 233, 133, 214, 55, 161, 164, 103, 80, 85, 24, 186, 64, 167, 92, 131, 53, - 101, 202, 47, 25, 104, 118, 155, 14, 12, 12, 25, 116, 45, 221, 249, 28, 246, 212, 200, - 157, 167, 169, 56, 197, 181, 4, 245, 146, 1, 140, 234, 191, 212, 228, 125, 87, 81, 86, - 119, 30, 63, 129, 143, 32, 96, - ]; - - // Prepare inputs of different lengths - let mut input_511 = [0; 511]; - let mut input_512 = [0; 512]; // Next block - let mut input_575 = [0; 575]; - let mut input_576 = [0; 576]; // Next block - for i in 0..input.len() { - input_511[i] = input[i]; - input_512[i] = input[i]; - input_575[i] = input[i]; - input_576[i] = input[i]; - } - - // Compute hashes of all inputs (with same message length) - let fixed_length_hash = super::sha256(input); - let var_full_length_hash = sha256_var(input, input.len() as u64); - let var_length_hash_511 = sha256_var(input_511, input.len() as u64); - let var_length_hash_512 = sha256_var(input_512, input.len() as u64); - let var_length_hash_575 = sha256_var(input_575, input.len() as u64); - let var_length_hash_576 = sha256_var(input_576, input.len() as u64); - - // All of the above should have produced the same hash - assert_eq(var_full_length_hash, fixed_length_hash); - assert_eq(var_length_hash_511, fixed_length_hash); - assert_eq(var_length_hash_512, fixed_length_hash); - assert_eq(var_length_hash_575, fixed_length_hash); - assert_eq(var_length_hash_576, fixed_length_hash); - } - - #[test] - fn test_get_item_byte() { - let fld = make_item(10, 20, 30, 40); - assert_eq(fld, 0x0a141e28); - assert_eq(get_item_byte(fld, 0), 10); - assert_eq(get_item_byte(fld, 4), 10); - assert_eq(get_item_byte(fld, 6), 30); - } - - #[test] - fn test_byte_into_item() { - let fld = make_item(0, 20, 0, 0); - assert_eq(byte_into_item(20, 1), fld); - assert_eq(byte_into_item(20, 5), fld); - } - - #[test] - fn test_set_item_zeros() { - let fld0 = make_item(10, 20, 30, 40); - let fld1 = make_item(10, 0, 0, 0); - assert_eq(set_item_zeros(fld0, 3), fld1); - assert_eq(set_item_zeros(fld0, 4), 0); - assert_eq(set_item_zeros(0, 4), 0); - } - - #[test] - fn test_set_item_byte_then_zeros() { - let fld0 = make_item(10, 20, 30, 40); - let fld1 = make_item(10, 50, 0, 0); - assert_eq(set_item_byte_then_zeros(fld0, 1, 50), fld1); - } - - #[test] - fn test_build_msg_block_start_0() { - let input = [ - 102, 114, 111, 109, 58, 114, 117, 110, 110, 105, 101, 114, 46, 108, 101, 97, 103, 117, - 101, 115, 46, 48, - ]; - assert_eq(input.len(), 22); - - // Safety: testing context - let (msg_block, msg_byte_ptr) = unsafe { build_msg_block(input, input.len(), 0) }; - assert_eq(msg_byte_ptr, input.len()); - assert_eq(msg_block[0], make_item(input[0], input[1], input[2], input[3])); - assert_eq(msg_block[1], make_item(input[4], input[5], input[6], input[7])); - assert_eq(msg_block[5], make_item(input[20], input[21], 0, 0)); - assert_eq(msg_block[6], 0); - } - - #[test] - fn test_build_msg_block_start_1() { - let input = [ - 102, 114, 111, 109, 58, 114, 117, 110, 110, 105, 101, 114, 46, 108, 101, 97, 103, 117, - 101, 115, 46, 48, 106, 64, 105, 99, 108, 111, 117, 100, 46, 99, 111, 109, 13, 10, 99, - 111, 110, 116, 101, 110, 116, 45, 116, 121, 112, 101, 58, 116, 101, 120, 116, 47, 112, - 108, 97, 105, 110, 59, 32, 99, 104, 97, 114, 115, 101, 116, - ]; - assert_eq(input.len(), 68); - // Safety: test context - let (msg_block, msg_byte_ptr) = unsafe { build_msg_block(input, input.len(), 64) }; - assert_eq(msg_byte_ptr, 4); - assert_eq(msg_block[0], make_item(input[64], input[65], input[66], input[67])); - assert_eq(msg_block[1], 0); - } - - #[test] - fn test_attach_len_to_msg_block() { - let input: INT_BLOCK = [ - 2152555847, 1397309779, 1936618851, 1262052426, 1936876331, 1985297723, 543702374, - 1919905082, 1131376244, 1701737517, 1417244773, 978151789, 1697470053, 1920166255, - 1849316213, 1651139939, - ]; - // Safety: testing context - let msg_block = unsafe { attach_len_to_msg_block(input, 1, 448) }; - assert_eq(msg_block[0], ((1 << 7) as u32) * 256 * 256 * 256); - assert_eq(msg_block[1], 0); - assert_eq(msg_block[15], 3584); - } -} diff --git a/noir/noir-repo/noir_stdlib/src/hash/sha512.nr b/noir/noir-repo/noir_stdlib/src/hash/sha512.nr deleted file mode 100644 index 5630139c1f1..00000000000 --- a/noir/noir-repo/noir_stdlib/src/hash/sha512.nr +++ /dev/null @@ -1,165 +0,0 @@ -// Implementation of SHA-512 mapping a byte array of variable length to -// 64 bytes. -// Internal functions act on 64-bit unsigned integers for simplicity. -// Auxiliary mappings; names as in FIPS PUB 180-4 -fn rotr64(a: u64, b: u8) -> u64 // 64-bit right rotation -{ - // None of the bits overlap between `(a >> b)` and `(a << (64 - b))` - // Addition is then equivalent to OR, with fewer constraints. - (a >> b) + (a << (64 - b)) -} - -fn sha_ch(x: u64, y: u64, z: u64) -> u64 { - (x & y) ^ (!x & z) -} - -fn sha_maj(x: u64, y: u64, z: u64) -> u64 { - (x & y) ^ (x & z) ^ (y & z) -} - -fn sha_bigma0(x: u64) -> u64 { - rotr64(x, 28) ^ rotr64(x, 34) ^ rotr64(x, 39) -} - -fn sha_bigma1(x: u64) -> u64 { - rotr64(x, 14) ^ rotr64(x, 18) ^ rotr64(x, 41) -} - -fn sha_sigma0(x: u64) -> u64 { - rotr64(x, 1) ^ rotr64(x, 8) ^ (x >> 7) -} - -fn sha_sigma1(x: u64) -> u64 { - rotr64(x, 19) ^ rotr64(x, 61) ^ (x >> 6) -} - -fn sha_w(msg: [u64; 16]) -> [u64; 80] // Expanded message blocks -{ - let mut w: [u64; 80] = [0; 80]; - - for j in 0..16 { - w[j] = msg[j]; - } - - for j in 16..80 { - w[j] = crate::wrapping_add( - crate::wrapping_add(sha_sigma1(w[j - 2]), w[j - 7]), - crate::wrapping_add(sha_sigma0(w[j - 15]), w[j - 16]), - ); - } - w -} - -// SHA-512 compression function -#[no_predicates] -fn sha_c(msg: [u64; 16], hash: [u64; 8]) -> [u64; 8] { - // noir-fmt:ignore - let K: [u64; 80] = [4794697086780616226, 8158064640168781261, 13096744586834688815, 16840607885511220156, 4131703408338449720, 6480981068601479193, 10538285296894168987, 12329834152419229976, 15566598209576043074, 1334009975649890238, 2608012711638119052, 6128411473006802146, 8268148722764581231, 9286055187155687089, 11230858885718282805, 13951009754708518548, 16472876342353939154, 17275323862435702243, 1135362057144423861, 2597628984639134821, 3308224258029322869, 5365058923640841347, 6679025012923562964, 8573033837759648693, 10970295158949994411, 12119686244451234320, 12683024718118986047, 13788192230050041572, 14330467153632333762, 15395433587784984357, 489312712824947311, 1452737877330783856, 2861767655752347644, 3322285676063803686, 5560940570517711597, 5996557281743188959, 7280758554555802590, 8532644243296465576, 9350256976987008742, 10552545826968843579, 11727347734174303076, 12113106623233404929, 14000437183269869457, 14369950271660146224, 15101387698204529176, 15463397548674623760, 17586052441742319658, 1182934255886127544, 1847814050463011016, 2177327727835720531, 2830643537854262169, 3796741975233480872, 4115178125766777443, 5681478168544905931, 6601373596472566643, 7507060721942968483, 8399075790359081724, 8693463985226723168, 9568029438360202098, 10144078919501101548, 10430055236837252648, 11840083180663258601, 13761210420658862357, 14299343276471374635, 14566680578165727644, 15097957966210449927, 16922976911328602910, 17689382322260857208, 500013540394364858, 748580250866718886, 1242879168328830382, 1977374033974150939, 2944078676154940804, 3659926193048069267, 4368137639120453308, 4836135668995329356, 5532061633213252278, 6448918945643986474, 6902733635092675308, 7801388544844847127]; // first 64 bits of fractional parts of cube roots of first 80 primes - let mut out_h: [u64; 8] = hash; - let w = sha_w(msg); - for j in 0..80 { - let out1 = crate::wrapping_add(out_h[7], sha_bigma1(out_h[4])); - let out2 = crate::wrapping_add(out1, sha_ch(out_h[4], out_h[5], out_h[6])); - let t1 = crate::wrapping_add(crate::wrapping_add(out2, K[j]), w[j]); - let t2 = crate::wrapping_add(sha_bigma0(out_h[0]), sha_maj(out_h[0], out_h[1], out_h[2])); - out_h[7] = out_h[6]; - out_h[6] = out_h[5]; - out_h[5] = out_h[4]; - out_h[4] = crate::wrapping_add(out_h[3], t1); - out_h[3] = out_h[2]; - out_h[2] = out_h[1]; - out_h[1] = out_h[0]; - out_h[0] = crate::wrapping_add(t1, t2); - } - - out_h -} -// Convert 128-byte array to array of 16 u64s -fn msg_u8_to_u64(msg: [u8; 128]) -> [u64; 16] { - let mut msg64: [u64; 16] = [0; 16]; - - for i in 0..16 { - let mut msg_field: Field = 0; - for j in 0..8 { - msg_field = msg_field * 256 + msg[128 - 8 * (i + 1) + j] as Field; - } - msg64[15 - i] = msg_field as u64; - } - - msg64 -} -// SHA-512 hash function -pub fn digest(msg: [u8; N]) -> [u8; 64] { - let mut msg_block: [u8; 128] = [0; 128]; - // noir-fmt:ignore - let mut h: [u64; 8] = [7640891576956012808, 13503953896175478587, 4354685564936845355, 11912009170470909681, 5840696475078001361, 11170449401992604703, 2270897969802886507, 6620516959819538809]; // Intermediate hash, starting with the canonical initial value - let mut c: [u64; 8] = [0; 8]; // Compression of current message block as sequence of u64 - let mut out_h: [u8; 64] = [0; 64]; // Digest as sequence of bytes - let mut i: u64 = 0; // Message byte pointer - for k in 0..msg.len() { - // Populate msg_block - msg_block[i] = msg[k]; - i = i + 1; - if i == 128 { - // Enough to hash block - c = sha_c(msg_u8_to_u64(msg_block), h); - for j in 0..8 { - h[j] = crate::wrapping_add(h[j], c[j]); - } - - i = 0; - } - } - // Pad the rest such that we have a [u64; 2] block at the end representing the length - // of the message, and a block of 1 0 ... 0 following the message (i.e. [1 << 7, 0, ..., 0]). - msg_block[i] = 1 << 7; - i += 1; - // If i >= 113, there aren't enough bits in the current message block to accomplish this, so - // the 1 and 0s fill up the current block, which we then compress accordingly. - if i >= 113 { - // Not enough bits (128) to store length. Fill up with zeros. - if i < 128 { - for _i in 113..128 { - if i <= 127 { - msg_block[i] = 0; - i += 1; - } - } - } - c = sha_c(msg_u8_to_u64(msg_block), h); - for j in 0..8 { - h[j] = crate::wrapping_add(h[j], c[j]); - } - - i = 0; - } - - let len = 8 * msg.len(); - let len_bytes: [u8; 16] = (len as Field).to_le_bytes(); - for _i in 0..128 { - // In any case, fill blocks up with zeros until the last 128 (i.e. until i = 112). - if i < 112 { - msg_block[i] = 0; - i += 1; - } else if i < 128 { - for j in 0..16 { - msg_block[127 - j] = len_bytes[j]; - } - i += 16; // Done. - } - } - // Hash final padded block - c = sha_c(msg_u8_to_u64(msg_block), h); - for j in 0..8 { - h[j] = crate::wrapping_add(h[j], c[j]); - } - // Return final hash as byte array - for j in 0..8 { - let h_bytes: [u8; 8] = (h[7 - j] as Field).to_le_bytes(); - for k in 0..8 { - out_h[63 - 8 * j - k] = h_bytes[k]; - } - } - - out_h -} diff --git a/noir/noir-repo/noir_stdlib/src/lib.nr b/noir/noir-repo/noir_stdlib/src/lib.nr index d5c360792d9..cd54162a504 100644 --- a/noir/noir-repo/noir_stdlib/src/lib.nr +++ b/noir/noir-repo/noir_stdlib/src/lib.nr @@ -2,12 +2,9 @@ pub mod hash; pub mod aes128; pub mod array; pub mod slice; -pub mod merkle; pub mod ecdsa_secp256k1; pub mod ecdsa_secp256r1; pub mod embedded_curve_ops; -pub mod sha256; -pub mod sha512; pub mod field; pub mod collections; pub mod compat; @@ -19,7 +16,6 @@ pub mod cmp; pub mod ops; pub mod default; pub mod prelude; -pub mod uint128; pub mod runtime; pub mod meta; pub mod append; @@ -121,8 +117,46 @@ where pub fn as_witness(x: Field) {} mod tests { + use super::wrapping_mul; + #[test(should_fail_with = "custom message")] fn test_static_assert_custom_message() { super::static_assert(1 == 2, "custom message"); } + + #[test(should_fail)] + fn test_wrapping_mul() { + // This currently fails. + // See: https://github.com/noir-lang/noir/issues/7528 + let zero: u128 = 0; + let one: u128 = 1; + let two_pow_64: u128 = 0x10000000000000000; + let u128_max: u128 = 0xffffffffffffffffffffffffffffffff; + + // 1*0==0 + assert_eq(zero, wrapping_mul(zero, one)); + + // 0*1==0 + assert_eq(zero, wrapping_mul(one, zero)); + + // 1*1==1 + assert_eq(one, wrapping_mul(one, one)); + + // 0 * ( 1 << 64 ) == 0 + assert_eq(zero, wrapping_mul(zero, two_pow_64)); + + // ( 1 << 64 ) * 0 == 0 + assert_eq(zero, wrapping_mul(two_pow_64, zero)); + + // 1 * ( 1 << 64 ) == 1 << 64 + assert_eq(two_pow_64, wrapping_mul(two_pow_64, one)); + + // ( 1 << 64 ) * 1 == 1 << 64 + assert_eq(two_pow_64, wrapping_mul(one, two_pow_64)); + + // ( 1 << 64 ) * ( 1 << 64 ) == 1 << 64 + assert_eq(zero, wrapping_mul(two_pow_64, two_pow_64)); + // -1 * -1 == 1 + assert_eq(one, wrapping_mul(u128_max, u128_max)); + } } diff --git a/noir/noir-repo/noir_stdlib/src/merkle.nr b/noir/noir-repo/noir_stdlib/src/merkle.nr deleted file mode 100644 index 34cfcdb1787..00000000000 --- a/noir/noir-repo/noir_stdlib/src/merkle.nr +++ /dev/null @@ -1,19 +0,0 @@ -// Regular merkle tree means a append-only merkle tree (Explain why this is the only way to have privacy and alternatives if you don't want it) -// Currently we assume that it is a binary tree, so depth k implies a width of 2^k -// XXX: In the future we can add an arity parameter -// Returns the merkle root of the tree from the provided leaf, its hashpath, using a pedersen hash function. -#[deprecated("This function will be removed from the stdlib in version 1.0.0-beta.4")] -pub fn compute_merkle_root(leaf: Field, index: Field, hash_path: [Field; N]) -> Field { - let index_bits: [u1; N] = index.to_le_bits(); - let mut current = leaf; - for i in 0..N { - let path_bit = index_bits[i] as bool; - let (hash_left, hash_right) = if path_bit { - (hash_path[i], current) - } else { - (current, hash_path[i]) - }; - current = crate::hash::pedersen_hash([hash_left, hash_right]); - } - current -} diff --git a/noir/noir-repo/noir_stdlib/src/prelude.nr b/noir/noir-repo/noir_stdlib/src/prelude.nr index a4a6c35b615..7aa60456b6d 100644 --- a/noir/noir-repo/noir_stdlib/src/prelude.nr +++ b/noir/noir-repo/noir_stdlib/src/prelude.nr @@ -7,4 +7,3 @@ pub use crate::default::Default; pub use crate::meta::{derive, derive_via}; pub use crate::option::Option; pub use crate::panic::panic; -pub use crate::uint128::U128; diff --git a/noir/noir-repo/noir_stdlib/src/sha256.nr b/noir/noir-repo/noir_stdlib/src/sha256.nr deleted file mode 100644 index 534c954d3dc..00000000000 --- a/noir/noir-repo/noir_stdlib/src/sha256.nr +++ /dev/null @@ -1,10 +0,0 @@ -// This file is kept for backwards compatibility. -#[deprecated("sha256 is being deprecated from the stdlib, use https://github.com/noir-lang/sha256 instead")] -pub fn digest(msg: [u8; N]) -> [u8; 32] { - crate::hash::sha256::digest(msg) -} - -#[deprecated("sha256 is being deprecated from the stdlib, use https://github.com/noir-lang/sha256 instead")] -pub fn sha256_var(msg: [u8; N], message_size: u64) -> [u8; 32] { - crate::hash::sha256::sha256_var(msg, message_size) -} diff --git a/noir/noir-repo/noir_stdlib/src/sha512.nr b/noir/noir-repo/noir_stdlib/src/sha512.nr deleted file mode 100644 index 27b53f4395f..00000000000 --- a/noir/noir-repo/noir_stdlib/src/sha512.nr +++ /dev/null @@ -1,5 +0,0 @@ -// This file is kept for backwards compatibility. -#[deprecated] -pub fn digest(msg: [u8; N]) -> [u8; 64] { - crate::hash::sha512::digest(msg) -} diff --git a/noir/noir-repo/noir_stdlib/src/uint128.nr b/noir/noir-repo/noir_stdlib/src/uint128.nr deleted file mode 100644 index f41958e0e30..00000000000 --- a/noir/noir-repo/noir_stdlib/src/uint128.nr +++ /dev/null @@ -1,572 +0,0 @@ -use crate::cmp::{Eq, Ord, Ordering}; -use crate::ops::{Add, BitAnd, BitOr, BitXor, Div, Mul, Not, Rem, Shl, Shr, Sub}; -use crate::static_assert; -use super::{convert::AsPrimitive, default::Default}; - -global pow64: Field = 18446744073709551616; //2^64; -global pow63: Field = 9223372036854775808; // 2^63; -pub struct U128 { - pub(crate) lo: Field, - pub(crate) hi: Field, -} - -impl U128 { - - pub fn from_u64s_le(lo: u64, hi: u64) -> U128 { - // in order to handle multiplication, we need to represent the product of two u64 without overflow - assert(crate::field::modulus_num_bits() as u32 > 128); - U128 { lo: lo as Field, hi: hi as Field } - } - - pub fn from_u64s_be(hi: u64, lo: u64) -> U128 { - U128::from_u64s_le(lo, hi) - } - - pub fn zero() -> U128 { - U128 { lo: 0, hi: 0 } - } - - pub fn one() -> U128 { - U128 { lo: 1, hi: 0 } - } - pub fn from_le_bytes(bytes: [u8; 16]) -> U128 { - let mut lo = 0; - let mut base = 1; - for i in 0..8 { - lo += (bytes[i] as Field) * base; - base *= 256; - } - let mut hi = 0; - base = 1; - for i in 8..16 { - hi += (bytes[i] as Field) * base; - base *= 256; - } - U128 { lo, hi } - } - - pub fn to_be_bytes(self: Self) -> [u8; 16] { - let lo: [u8; 8] = self.lo.to_be_bytes(); - let hi: [u8; 8] = self.hi.to_be_bytes(); - let mut bytes = [0; 16]; - for i in 0..8 { - bytes[i] = hi[i]; - bytes[i + 8] = lo[i]; - } - bytes - } - - pub fn to_le_bytes(self: Self) -> [u8; 16] { - let lo: [u8; 8] = self.lo.to_le_bytes(); - let hi: [u8; 8] = self.hi.to_le_bytes(); - let mut bytes = [0; 16]; - for i in 0..8 { - bytes[i] = lo[i]; - bytes[i + 8] = hi[i]; - } - bytes - } - - pub fn from_hex(hex: str) -> U128 { - let bytes = hex.as_bytes(); - // string must starts with "0x" - assert((bytes[0] == 48) & (bytes[1] == 120), "Invalid hexadecimal string"); - static_assert(N < 35, "Input does not fit into a U128"); - - let mut lo = 0; - let mut hi = 0; - let mut base = 1; - if N <= 18 { - for i in 0..N - 2 { - lo += U128::decode_ascii(bytes[N - i - 1]) * base; - base = base * 16; - } - } else { - for i in 0..16 { - lo += U128::decode_ascii(bytes[N - i - 1]) * base; - base = base * 16; - } - base = 1; - for i in 17..N - 1 { - hi += U128::decode_ascii(bytes[N - i]) * base; - base = base * 16; - } - } - U128 { lo: lo as Field, hi: hi as Field } - } - - unconstrained fn unconstrained_check_is_upper_ascii(ascii: u8) -> bool { - ((ascii >= 65) & (ascii <= 90)) // Between 'A' and 'Z' - } - - pub(crate) fn decode_ascii(ascii: u8) -> Field { - ( - if ascii < 58 { - ascii - 48 - } else { - // Safety: optionally adds 32 and then check (below) the result is in 'a..f' range - let ascii = - ascii + 32 * (unsafe { U128::unconstrained_check_is_upper_ascii(ascii) as u8 }); - assert(ascii >= 97); // enforce >= 'a' - assert(ascii <= 102); // enforce <= 'f' - ascii - 87 - } - ) as Field - } - - // TODO: Replace with a faster version. - // A circuit that uses this function can be slow to compute - // (we're doing up to 127 calls to compute the quotient) - unconstrained fn unconstrained_div(self: Self, b: U128) -> (U128, U128) { - if b == U128::zero() { - // Return 0,0 to avoid eternal loop - (U128::zero(), U128::zero()) - } else if self < b { - (U128::zero(), self) - } else if self == b { - (U128::one(), U128::zero()) - } else { - let (q, r) = if b.hi as u64 >= pow63 as u64 { - // The result of multiplication by 2 would overflow - (U128::zero(), self) - } else { - self.unconstrained_div(b * U128::from_u64s_le(2, 0)) - }; - let q_mul_2 = q * U128::from_u64s_le(2, 0); - if r < b { - (q_mul_2, r) - } else { - (q_mul_2 + U128::one(), r - b) - } - } - } - - pub fn from_integer(i: T) -> U128 - where - T: AsPrimitive, - { - let f = i.as_(); - // Reject values which would overflow a u128 - f.assert_max_bit_size::<128>(); - let lo = f as u64 as Field; - let hi = (f - lo) / pow64; - U128 { lo, hi } - } - - pub fn to_integer(self) -> T - where - Field: AsPrimitive, - { - AsPrimitive::as_(self.lo + self.hi * pow64) - } - - fn wrapping_mul(self: Self, b: U128) -> U128 { - let low = self.lo * b.lo; - let lo = low as u64 as Field; - let carry = (low - lo) / pow64; - let high = self.lo * b.hi + self.hi * b.lo + carry; - let hi = high as u64 as Field; - U128 { lo, hi } - } -} - -impl Add for U128 { - fn add(self: Self, b: U128) -> U128 { - let low = self.lo + b.lo; - let lo = low as u64 as Field; - let carry = (low - lo) / pow64; - let high = self.hi + b.hi + carry; - let hi = high as u64 as Field; - assert(hi == high, "attempt to add with overflow"); - U128 { lo, hi } - } -} - -impl Sub for U128 { - fn sub(self: Self, b: U128) -> U128 { - let low = pow64 + self.lo - b.lo; - let lo = low as u64 as Field; - let borrow = (low == lo) as Field; - let high = self.hi - b.hi - borrow; - let hi = high as u64 as Field; - assert(hi == high, "attempt to subtract with underflow"); - U128 { lo, hi } - } -} - -impl Mul for U128 { - fn mul(self: Self, b: U128) -> U128 { - assert(self.hi * b.hi == 0, "attempt to multiply with overflow"); - let low = self.lo * b.lo; - let lo = low as u64 as Field; - let carry = (low - lo) / pow64; - let high = if crate::field::modulus_num_bits() as u32 > 196 { - (self.lo + self.hi) * (b.lo + b.hi) - low + carry - } else { - self.lo * b.hi + self.hi * b.lo + carry - }; - let hi = high as u64 as Field; - assert(hi == high, "attempt to multiply with overflow"); - U128 { lo, hi } - } -} - -impl Div for U128 { - fn div(self: Self, b: U128) -> U128 { - // Safety: euclidian division is asserted to be correct: assert(a == b * q + r); and assert(r < b); - // Furthermore, U128 addition and multiplication ensures that b * q + r does not overflow - unsafe { - let (q, r) = self.unconstrained_div(b); - let a = b * q + r; - assert_eq(self, a); - assert(r < b); - q - } - } -} - -impl Rem for U128 { - fn rem(self: Self, b: U128) -> U128 { - // Safety: cf div() above - unsafe { - let (q, r) = self.unconstrained_div(b); - let a = b * q + r; - assert_eq(self, a); - assert(r < b); - - r - } - } -} - -impl Eq for U128 { - fn eq(self: Self, b: U128) -> bool { - (self.lo == b.lo) & (self.hi == b.hi) - } -} - -impl Ord for U128 { - fn cmp(self, other: Self) -> Ordering { - let hi_ordering = (self.hi as u64).cmp((other.hi as u64)); - let lo_ordering = (self.lo as u64).cmp((other.lo as u64)); - - if hi_ordering == Ordering::equal() { - lo_ordering - } else { - hi_ordering - } - } -} - -impl Not for U128 { - fn not(self) -> U128 { - U128 { lo: (!(self.lo as u64)) as Field, hi: (!(self.hi as u64)) as Field } - } -} - -impl BitOr for U128 { - fn bitor(self, other: U128) -> U128 { - U128 { - lo: ((self.lo as u64) | (other.lo as u64)) as Field, - hi: ((self.hi as u64) | (other.hi as u64)) as Field, - } - } -} - -impl BitAnd for U128 { - fn bitand(self, other: U128) -> U128 { - U128 { - lo: ((self.lo as u64) & (other.lo as u64)) as Field, - hi: ((self.hi as u64) & (other.hi as u64)) as Field, - } - } -} - -impl BitXor for U128 { - fn bitxor(self, other: U128) -> U128 { - U128 { - lo: ((self.lo as u64) ^ (other.lo as u64)) as Field, - hi: ((self.hi as u64) ^ (other.hi as u64)) as Field, - } - } -} - -impl Shl for U128 { - fn shl(self, other: u8) -> U128 { - assert(other < 128, "attempt to shift left with overflow"); - let exp_bits: [u1; 7] = (other as Field).to_be_bits(); - - let mut r: Field = 2; - let mut y: Field = 1; - for i in 1..8 { - let bit = exp_bits[7 - i] as Field; - y = bit * (r * y) + (1 - bit) * y; - r *= r; - } - self.wrapping_mul(U128::from_integer(y)) - } -} - -impl Shr for U128 { - fn shr(self, other: u8) -> U128 { - assert(other < 128, "attempt to shift right with overflow"); - let exp_bits: [u1; 7] = (other as Field).to_be_bits(); - - let mut r: Field = 2; - let mut y: Field = 1; - for i in 1..8 { - let bit = exp_bits[7 - i] as Field; - y = bit * (r * y) + (1 - bit) * y; - r *= r; - } - self / U128::from_integer(y) - } -} - -impl Default for U128 { - fn default() -> Self { - U128::zero() - } -} - -mod tests { - use crate::default::Default; - use crate::ops::Not; - use crate::uint128::{pow63, pow64, U128}; - - #[test] - fn test_not(lo: u64, hi: u64) { - let num = U128::from_u64s_le(lo, hi); - let not_num = num.not(); - - assert_eq(not_num.hi, (hi.not() as Field)); - assert_eq(not_num.lo, (lo.not() as Field)); - - let not_not_num = not_num.not(); - assert_eq(num, not_not_num); - } - #[test] - fn test_construction() { - // Check little-endian u64 is inversed with big-endian u64 construction - let a = U128::from_u64s_le(2, 1); - let b = U128::from_u64s_be(1, 2); - assert_eq(a, b); - // Check byte construction is equivalent - let c = U128::from_le_bytes([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15]); - let d = U128::from_u64s_le(0x0706050403020100, 0x0f0e0d0c0b0a0908); - assert_eq(c, d); - } - #[test] - fn test_byte_decomposition() { - let a = U128::from_u64s_le(0x0706050403020100, 0x0f0e0d0c0b0a0908); - // Get big-endian and little-endian byte decompostions - let le_bytes_a = a.to_le_bytes(); - let be_bytes_a = a.to_be_bytes(); - - // Check equivalence - for i in 0..16 { - assert_eq(le_bytes_a[i], be_bytes_a[15 - i]); - } - // Reconstruct U128 from byte decomposition - let b = U128::from_le_bytes(le_bytes_a); - // Check that it's the same element - assert_eq(a, b); - } - #[test] - fn test_hex_constuction() { - let a = U128::from_u64s_le(0x1, 0x2); - let b = U128::from_hex("0x20000000000000001"); - assert_eq(a, b); - - let c = U128::from_hex("0xffffffffffffffffffffffffffffffff"); - let d = U128::from_u64s_le(0xffffffffffffffff, 0xffffffffffffffff); - assert_eq(c, d); - - let e = U128::from_hex("0x00000000000000000000000000000000"); - let f = U128::from_u64s_le(0, 0); - assert_eq(e, f); - } - - // Ascii decode tests - - #[test] - fn test_ascii_decode_correct_range() { - // '0'..'9' range - for i in 0..10 { - let decoded = U128::decode_ascii(48 + i); - assert_eq(decoded, i as Field); - } - // 'A'..'F' range - for i in 0..6 { - let decoded = U128::decode_ascii(65 + i); - assert_eq(decoded, (i + 10) as Field); - } - // 'a'..'f' range - for i in 0..6 { - let decoded = U128::decode_ascii(97 + i); - assert_eq(decoded, (i + 10) as Field); - } - } - - #[test(should_fail)] - fn test_ascii_decode_range_less_than_48_fails_0() { - crate::println(U128::decode_ascii(0)); - } - #[test(should_fail)] - fn test_ascii_decode_range_less_than_48_fails_1() { - crate::println(U128::decode_ascii(47)); - } - - #[test(should_fail)] - fn test_ascii_decode_range_58_64_fails_0() { - let _ = U128::decode_ascii(58); - } - #[test(should_fail)] - fn test_ascii_decode_range_58_64_fails_1() { - let _ = U128::decode_ascii(64); - } - #[test(should_fail)] - fn test_ascii_decode_range_71_96_fails_0() { - let _ = U128::decode_ascii(71); - } - #[test(should_fail)] - fn test_ascii_decode_range_71_96_fails_1() { - let _ = U128::decode_ascii(96); - } - #[test(should_fail)] - fn test_ascii_decode_range_greater_than_102_fails() { - let _ = U128::decode_ascii(103); - } - - #[test(should_fail)] - fn test_ascii_decode_regression() { - // This code will actually fail because of ascii_decode, - // but in the past it was possible to create a value > (1<<128) - let a = U128::from_hex("0x~fffffffffffffffffffffffffffffff"); - let b: Field = a.to_integer(); - let c: [u8; 17] = b.to_le_bytes(); - assert(c[16] != 0); - } - - #[test] - fn test_unconstrained_div() { - // Test the potential overflow case - let a = U128::from_u64s_le(0x0, 0xffffffffffffffff); - let b = U128::from_u64s_le(0x0, 0xfffffffffffffffe); - let c = U128::one(); - let d = U128::from_u64s_le(0x0, 0x1); - // Safety: testing context - unsafe { - let (q, r) = a.unconstrained_div(b); - assert_eq(q, c); - assert_eq(r, d); - } - - let a = U128::from_u64s_le(2, 0); - let b = U128::one(); - // Check the case where a is a multiple of b - // Safety: testing context - unsafe { - let (c, d) = a.unconstrained_div(b); - assert_eq((c, d), (a, U128::zero())); - } - - // Check where b is a multiple of a - // Safety: testing context - unsafe { - let (c, d) = b.unconstrained_div(a); - assert_eq((c, d), (U128::zero(), b)); - } - - // Dividing by zero returns 0,0 - let a = U128::from_u64s_le(0x1, 0x0); - let b = U128::zero(); - // Safety: testing context - unsafe { - let (c, d) = a.unconstrained_div(b); - assert_eq((c, d), (U128::zero(), U128::zero())); - } - // Dividing 1<<127 by 1<<127 (special case) - let a = U128::from_u64s_le(0x0, pow63 as u64); - let b = U128::from_u64s_le(0x0, pow63 as u64); - // Safety: testing context - unsafe { - let (c, d) = a.unconstrained_div(b); - assert_eq((c, d), (U128::one(), U128::zero())); - } - } - - #[test] - fn integer_conversions() { - // Maximum - let start: Field = 0xffffffffffffffffffffffffffffffff; - let a = U128::from_integer(start); - let end = a.to_integer(); - assert_eq(start, end); - - // Minimum - let start: Field = 0x0; - let a = U128::from_integer(start); - let end = a.to_integer(); - assert_eq(start, end); - - // Low limb - let start: Field = 0xffffffffffffffff; - let a = U128::from_integer(start); - let end = a.to_integer(); - assert_eq(start, end); - - // High limb - let start: Field = 0xffffffffffffffff0000000000000000; - let a = U128::from_integer(start); - let end = a.to_integer(); - assert_eq(start, end); - } - - #[test] - fn integer_conversions_fuzz(lo: u64, hi: u64) { - let start: Field = (lo as Field) + pow64 * (hi as Field); - let a = U128::from_integer(start); - let end = a.to_integer(); - assert_eq(start, end); - } - - #[test] - fn test_wrapping_mul() { - // 1*0==0 - assert_eq(U128::zero(), U128::zero().wrapping_mul(U128::one())); - - // 0*1==0 - assert_eq(U128::zero(), U128::one().wrapping_mul(U128::zero())); - - // 1*1==1 - assert_eq(U128::one(), U128::one().wrapping_mul(U128::one())); - - // 0 * ( 1 << 64 ) == 0 - assert_eq(U128::zero(), U128::zero().wrapping_mul(U128::from_u64s_le(0, 1))); - - // ( 1 << 64 ) * 0 == 0 - assert_eq(U128::zero(), U128::from_u64s_le(0, 1).wrapping_mul(U128::zero())); - - // 1 * ( 1 << 64 ) == 1 << 64 - assert_eq(U128::from_u64s_le(0, 1), U128::from_u64s_le(0, 1).wrapping_mul(U128::one())); - - // ( 1 << 64 ) * 1 == 1 << 64 - assert_eq(U128::from_u64s_le(0, 1), U128::one().wrapping_mul(U128::from_u64s_le(0, 1))); - - // ( 1 << 64 ) * ( 1 << 64 ) == 1 << 64 - assert_eq(U128::zero(), U128::from_u64s_le(0, 1).wrapping_mul(U128::from_u64s_le(0, 1))); - // -1 * -1 == 1 - assert_eq( - U128::one(), - U128::from_u64s_le(0xffffffffffffffff, 0xffffffffffffffff).wrapping_mul( - U128::from_u64s_le(0xffffffffffffffff, 0xffffffffffffffff), - ), - ); - } - - #[test] - fn test_default() { - assert_eq(U128::default(), U128::zero()); - } -} diff --git a/noir/noir-repo/scripts/bytecode-sizes/README.md b/noir/noir-repo/scripts/bytecode-sizes/README.md new file mode 100644 index 00000000000..d860e843428 --- /dev/null +++ b/noir/noir-repo/scripts/bytecode-sizes/README.md @@ -0,0 +1,41 @@ +# Bytecode Size Comparison + +These scripts can be used to compare the bytecode size of circuits in `aztec-packages` between two different versions of `nargo`. +For example we can see what happens if we change the serialization format from `bincode` to `protobuf` in https://github.com/noir-lang/noir/pull/7513 + +## Compiling contracts + +Run these commands to compile Noir protocol circuits and contracts in `aztec-packages` after rebuilding `nargo`: + +```shell +cargo build -p nargo_cli --release +./target/release/nargo --program-dir ../aztec-packages/noir-projects/noir-protocol-circuits compile --force --silence-warnings --skip-underconstrained-check +./target/release/nargo --program-dir ../aztec-packages/noir-projects/noir-contracts compile --force --silence-warnings --skip-underconstrained-check +``` + +## Baseline + +Record the baseline bytecode size with before switching to other implementations: +```shell +./scripts/bytecode-sizes/print-bytecode-size.sh ../aztec-packages > ./scripts/bytecode-sizes/baseline.jsonl +``` + +## Alternative + +After making some changes to `nargo`, compile the contracts again with the commands above, then run the following +commands to record a new measurement, and compare it against the baseline recorded earlier. + +```shell +BASELINE=baseline +ALTERNATIVE=alternative +./scripts/bytecode-sizes/print-bytecode-size.sh ../aztec-packages \ + > ./scripts/bytecode-sizes/$ALTERNATIVE.jsonl +./scripts/bytecode-sizes/compare-bytecode-size.sh \ + ./scripts/bytecode-sizes/$BASELINE.jsonl \ + ./scripts/bytecode-sizes/$ALTERNATIVE.jsonl \ + > ./scripts/bytecode-sizes/$BASELINE-vs-$ALTERNATIVE.jsonl +./scripts/bytecode-sizes/plot-bytecode-size.sh \ + ./scripts/bytecode-sizes/$BASELINE-vs-$ALTERNATIVE.jsonl +``` + +You can look at the impact in `./scripts/bytecode-sizes/$BASELINE-vs-$ALTERNATIVE.png`. \ No newline at end of file diff --git a/noir/noir-repo/scripts/bytecode-sizes/bytecode-size-scatter.plt b/noir/noir-repo/scripts/bytecode-sizes/bytecode-size-scatter.plt new file mode 100644 index 00000000000..713d66db8b8 --- /dev/null +++ b/noir/noir-repo/scripts/bytecode-sizes/bytecode-size-scatter.plt @@ -0,0 +1,9 @@ +set term png size 1200,800; +set output FILEOUT; +unset key; +set title NAME; +set logscale x; +set xlabel "Base Bytecode Size (Log)"; +set ylabel "Alt Bytecode Ratio"; + +plot FILEIN using 2:4 with points; diff --git a/noir/noir-repo/scripts/bytecode-sizes/compare-bytecode-size.sh b/noir/noir-repo/scripts/bytecode-sizes/compare-bytecode-size.sh new file mode 100755 index 00000000000..12c0bdace38 --- /dev/null +++ b/noir/noir-repo/scripts/bytecode-sizes/compare-bytecode-size.sh @@ -0,0 +1,19 @@ +#!/usr/bin/env bash +set -eu + +IN1=$1 +IN2=$2 + +jq --slurp -c ' +. as $top | +($top[] | select(.encoding == "base") | .data[]) as $base | +($top[] | select(.encoding == "alt") | .data[] | select(.name == $base.name)) as $alt | +{ + name: $base.name, + base_size: $base.bytecode_size, + alt_size: $alt.bytecode_size, + ratio: ($alt.bytecode_size / $base.bytecode_size) +} +' \ + <(cat $IN1 | jq --slurp '{encoding: "base", data: .}') \ + <(cat $IN2 | jq --slurp '{encoding: "alt", data: .}') \ diff --git a/noir/noir-repo/scripts/bytecode-sizes/plot-bytecode-size.sh b/noir/noir-repo/scripts/bytecode-sizes/plot-bytecode-size.sh new file mode 100755 index 00000000000..39d62876171 --- /dev/null +++ b/noir/noir-repo/scripts/bytecode-sizes/plot-bytecode-size.sh @@ -0,0 +1,18 @@ +#!/usr/bin/env bash +set -eu + +IN=$1 +NAME=$(basename $IN .jsonl) +DAT=$(dirname $IN)/$NAME.dat +PNG=$(dirname $IN)/$NAME.png +PLT=$(dirname $0)/bytecode-size-scatter.plt + +cat $IN | jq -r '[.name, .base_size, .alt_size, .ratio] | @tsv' > $DAT + +gnuplot \ + -e "NAME='$(echo $NAME | tr _ - )'" \ + -e "FILEIN='$DAT'" \ + -e "FILEOUT='$PNG'" \ + $PLT + +rm $DAT diff --git a/noir/noir-repo/scripts/bytecode-sizes/print-bytecode-size.sh b/noir/noir-repo/scripts/bytecode-sizes/print-bytecode-size.sh new file mode 100755 index 00000000000..8b68c5907ef --- /dev/null +++ b/noir/noir-repo/scripts/bytecode-sizes/print-bytecode-size.sh @@ -0,0 +1,18 @@ +#!/usr/bin/env bash +set -eu + +AZTEC_PACKAGES_DIR=$1 + +for file in $AZTEC_PACKAGES_DIR/noir-projects/noir-protocol-circuits/target/*.json; do + PROGRAM=$(basename $file .json) + cat $file \ + | jq --arg PROGRAM $PROGRAM \ + -c '{name: $PROGRAM, bytecode_size: .bytecode | @base64d | length}' +done + +for file in $AZTEC_PACKAGES_DIR/noir-projects/noir-contracts/target/*.json; do + CONTRACT=$(basename $file .json) + cat $file \ + | jq --arg CONTRACT $CONTRACT \ + -c '.functions | sort_by(.name) | .[] | {name: ($CONTRACT + "::" + .name), "bytecode_size": .bytecode | @base64d | length}' +done diff --git a/noir/noir-repo/scripts/install_bb.sh b/noir/noir-repo/scripts/install_bb.sh index 72170af78d8..e95bd50a0f0 100755 --- a/noir/noir-repo/scripts/install_bb.sh +++ b/noir/noir-repo/scripts/install_bb.sh @@ -1,11 +1,11 @@ #!/bin/bash -VERSION="0.72.1" +VERSION="0.77.1" BBUP_PATH=~/.bb/bbup if ! [ -f $BBUP_PATH ]; then - curl -L https://raw.githubusercontent.com/AztecProtocol/aztec-packages/master/barretenberg/cpp/installation/install | bash + curl -L https://bbup.aztec.network | bash fi $BBUP_PATH -v $VERSION diff --git a/noir/noir-repo/test_programs/benchmarks/bench_sha256/Nargo.toml b/noir/noir-repo/test_programs/benchmarks/bench_sha256/Nargo.toml deleted file mode 100644 index 488b94ca858..00000000000 --- a/noir/noir-repo/test_programs/benchmarks/bench_sha256/Nargo.toml +++ /dev/null @@ -1,7 +0,0 @@ -[package] -name = "bench_sha256" -version = "0.1.0" -type = "bin" -authors = [""] - -[dependencies] diff --git a/noir/noir-repo/test_programs/benchmarks/bench_sha256/Prover.toml b/noir/noir-repo/test_programs/benchmarks/bench_sha256/Prover.toml deleted file mode 100644 index 66779dea9d7..00000000000 --- a/noir/noir-repo/test_programs/benchmarks/bench_sha256/Prover.toml +++ /dev/null @@ -1 +0,0 @@ -input = [1,2] diff --git a/noir/noir-repo/test_programs/benchmarks/bench_sha256/src/main.nr b/noir/noir-repo/test_programs/benchmarks/bench_sha256/src/main.nr deleted file mode 100644 index c94d359239d..00000000000 --- a/noir/noir-repo/test_programs/benchmarks/bench_sha256/src/main.nr +++ /dev/null @@ -1,4 +0,0 @@ - -fn main(input: [u8; 2]) -> pub [u8; 32] { - std::hash::sha256(input) -} \ No newline at end of file diff --git a/noir/noir-repo/test_programs/benchmarks/bench_sha256_100/Nargo.toml b/noir/noir-repo/test_programs/benchmarks/bench_sha256_100/Nargo.toml deleted file mode 100644 index d0c90d75088..00000000000 --- a/noir/noir-repo/test_programs/benchmarks/bench_sha256_100/Nargo.toml +++ /dev/null @@ -1,7 +0,0 @@ -[package] -name = "bench_sha256_100" -version = "0.1.0" -type = "bin" -authors = [""] - -[dependencies] diff --git a/noir/noir-repo/test_programs/benchmarks/bench_sha256_100/Prover.toml b/noir/noir-repo/test_programs/benchmarks/bench_sha256_100/Prover.toml deleted file mode 100644 index 542b4a08dd6..00000000000 --- a/noir/noir-repo/test_programs/benchmarks/bench_sha256_100/Prover.toml +++ /dev/null @@ -1,102 +0,0 @@ -input = [ - [1,2], - [1,2], - [1,2], - [1,2], - [1,2], - [1,2], - [1,2], - [1,2], - [1,2], - [1,2], - [1,2], - [1,2], - [1,2], - [1,2], - [1,2], - [1,2], - [1,2], - [1,2], - [1,2], - [1,2], - [1,2], - [1,2], - [1,2], - [1,2], - [1,2], - [1,2], - [1,2], - [1,2], - [1,2], - [1,2], - [1,2], - [1,2], - [1,2], - [1,2], - [1,2], - [1,2], - [1,2], - [1,2], - [1,2], - [1,2], - [1,2], - [1,2], - [1,2], - [1,2], - [1,2], - [1,2], - [1,2], - [1,2], - [1,2], - [1,2], - [1,2], - [1,2], - [1,2], - [1,2], - [1,2], - [1,2], - [1,2], - [1,2], - [1,2], - [1,2], - [1,2], - [1,2], - [1,2], - [1,2], - [1,2], - [1,2], - [1,2], - [1,2], - [1,2], - [1,2], - [1,2], - [1,2], - [1,2], - [1,2], - [1,2], - [1,2], - [1,2], - [1,2], - [1,2], - [1,2], - [1,2], - [1,2], - [1,2], - [1,2], - [1,2], - [1,2], - [1,2], - [1,2], - [1,2], - [1,2], - [1,2], - [1,2], - [1,2], - [1,2], - [1,2], - [1,2], - [1,2], - [1,2], - [1,2], - [1,2], -] \ No newline at end of file diff --git a/noir/noir-repo/test_programs/benchmarks/bench_sha256_100/src/main.nr b/noir/noir-repo/test_programs/benchmarks/bench_sha256_100/src/main.nr deleted file mode 100644 index 6e4bfc27c8f..00000000000 --- a/noir/noir-repo/test_programs/benchmarks/bench_sha256_100/src/main.nr +++ /dev/null @@ -1,10 +0,0 @@ -global SIZE: u32 = 100; - -fn main(input: [[u8; 2]; SIZE]) -> pub [[u8; 32]; SIZE] { - let mut results: [[u8; 32]; SIZE] = [[0; 32]; SIZE]; - for i in 0..SIZE { - results[i] = std::hash::sha256(input[i]); - } - - results -} diff --git a/noir/noir-repo/test_programs/benchmarks/bench_sha256_30/Nargo.toml b/noir/noir-repo/test_programs/benchmarks/bench_sha256_30/Nargo.toml deleted file mode 100644 index c1dc76df394..00000000000 --- a/noir/noir-repo/test_programs/benchmarks/bench_sha256_30/Nargo.toml +++ /dev/null @@ -1,7 +0,0 @@ -[package] -name = "bench_sha256_30" -version = "0.1.0" -type = "bin" -authors = [""] - -[dependencies] diff --git a/noir/noir-repo/test_programs/benchmarks/bench_sha256_30/Prover.toml b/noir/noir-repo/test_programs/benchmarks/bench_sha256_30/Prover.toml deleted file mode 100644 index 7792a9ab8e3..00000000000 --- a/noir/noir-repo/test_programs/benchmarks/bench_sha256_30/Prover.toml +++ /dev/null @@ -1,32 +0,0 @@ -input = [ - [1,2], - [1,2], - [1,2], - [1,2], - [1,2], - [1,2], - [1,2], - [1,2], - [1,2], - [1,2], - [1,2], - [1,2], - [1,2], - [1,2], - [1,2], - [1,2], - [1,2], - [1,2], - [1,2], - [1,2], - [1,2], - [1,2], - [1,2], - [1,2], - [1,2], - [1,2], - [1,2], - [1,2], - [1,2], - [1,2], -] \ No newline at end of file diff --git a/noir/noir-repo/test_programs/benchmarks/bench_sha256_30/src/main.nr b/noir/noir-repo/test_programs/benchmarks/bench_sha256_30/src/main.nr deleted file mode 100644 index 0a4288114e3..00000000000 --- a/noir/noir-repo/test_programs/benchmarks/bench_sha256_30/src/main.nr +++ /dev/null @@ -1,10 +0,0 @@ -global SIZE: u32 = 30; - -fn main(input: [[u8; 2]; SIZE]) -> pub [[u8; 32]; SIZE] { - let mut results: [[u8; 32]; SIZE] = [[0; 32]; SIZE]; - for i in 0..SIZE { - results[i] = std::hash::sha256(input[i]); - } - - results -} diff --git a/noir/noir-repo/test_programs/benchmarks/bench_sha256_long/Nargo.toml b/noir/noir-repo/test_programs/benchmarks/bench_sha256_long/Nargo.toml deleted file mode 100644 index ae66d7ed5a6..00000000000 --- a/noir/noir-repo/test_programs/benchmarks/bench_sha256_long/Nargo.toml +++ /dev/null @@ -1,7 +0,0 @@ -[package] -name = "bench_sha256_long" -version = "0.1.0" -type = "bin" -authors = [""] - -[dependencies] diff --git a/noir/noir-repo/test_programs/benchmarks/bench_sha256_long/Prover.toml b/noir/noir-repo/test_programs/benchmarks/bench_sha256_long/Prover.toml deleted file mode 100644 index ba4bbc1540d..00000000000 --- a/noir/noir-repo/test_programs/benchmarks/bench_sha256_long/Prover.toml +++ /dev/null @@ -1,191 +0,0 @@ -# 2*64+60=188 bytes -input = [ - 0, - 1, - 2, - 3, - 4, - 5, - 6, - 7, - 8, - 9, - 10, - 11, - 12, - 13, - 14, - 15, - 16, - 17, - 18, - 19, - 20, - 21, - 22, - 23, - 24, - 25, - 26, - 27, - 28, - 29, - 30, - 31, - 32, - 33, - 34, - 35, - 36, - 37, - 38, - 39, - 40, - 41, - 42, - 43, - 44, - 45, - 46, - 47, - 48, - 49, - 50, - 51, - 52, - 53, - 54, - 55, - 56, - 57, - 58, - 59, - 60, - 61, - 62, - 63, - 64, - 65, - 66, - 67, - 68, - 69, - 70, - 71, - 72, - 73, - 74, - 75, - 76, - 77, - 78, - 79, - 80, - 81, - 82, - 83, - 84, - 85, - 86, - 87, - 88, - 89, - 90, - 91, - 92, - 93, - 94, - 95, - 96, - 97, - 98, - 99, - 0, - 1, - 2, - 3, - 4, - 5, - 6, - 7, - 8, - 9, - 10, - 11, - 12, - 13, - 14, - 15, - 16, - 17, - 18, - 19, - 20, - 21, - 22, - 23, - 24, - 25, - 26, - 27, - 28, - 29, - 30, - 31, - 32, - 33, - 34, - 35, - 36, - 37, - 38, - 39, - 40, - 41, - 42, - 43, - 44, - 45, - 46, - 47, - 48, - 49, - 50, - 51, - 52, - 53, - 54, - 55, - 56, - 57, - 58, - 59, - 60, - 61, - 62, - 63, - 64, - 65, - 66, - 67, - 68, - 69, - 70, - 71, - 72, - 73, - 74, - 75, - 76, - 77, - 78, - 79, - 80, - 81, - 82, - 83, - 84, - 85, - 86, - 87, -] diff --git a/noir/noir-repo/test_programs/benchmarks/bench_sha256_long/src/main.nr b/noir/noir-repo/test_programs/benchmarks/bench_sha256_long/src/main.nr deleted file mode 100644 index c47bdc2a561..00000000000 --- a/noir/noir-repo/test_programs/benchmarks/bench_sha256_long/src/main.nr +++ /dev/null @@ -1,7 +0,0 @@ -// Input size long enough that we have to compress a few times -// and then pad the last block out. -global INPUT_SIZE: u32 = 2 * 64 + 60; - -fn main(input: [u8; INPUT_SIZE]) -> pub [u8; 32] { - std::hash::sha256(input) -} diff --git a/noir/noir-repo/test_programs/compilation_report.sh b/noir/noir-repo/test_programs/compilation_report.sh index 6f7ef254477..aa1bbef39de 100755 --- a/noir/noir-repo/test_programs/compilation_report.sh +++ b/noir/noir-repo/test_programs/compilation_report.sh @@ -6,7 +6,7 @@ current_dir=$(pwd) base_path="$current_dir/execution_success" # Tests to be profiled for compilation report -tests_to_profile=("sha256_regression" "regression_4709" "ram_blowup_regression" "global_var_regression_entry_points") +tests_to_profile=("regression_4709" "ram_blowup_regression" "global_var_regression_entry_points") echo "[ " > $current_dir/compilation_report.json diff --git a/noir/noir-repo/test_programs/compile_success_empty/comptime_as_field/src/main.nr b/noir/noir-repo/test_programs/compile_success_empty/comptime_as_field/src/main.nr deleted file mode 100644 index f5871bbed81..00000000000 --- a/noir/noir-repo/test_programs/compile_success_empty/comptime_as_field/src/main.nr +++ /dev/null @@ -1,5 +0,0 @@ -fn main() { - comptime { - let _: U128 = U128::from_integer(1); - } -} diff --git a/noir/noir-repo/test_programs/compile_success_empty/comptime_as_field/Nargo.toml b/noir/noir-repo/test_programs/compile_success_empty/comptime_as_primitive/Nargo.toml similarity index 100% rename from noir/noir-repo/test_programs/compile_success_empty/comptime_as_field/Nargo.toml rename to noir/noir-repo/test_programs/compile_success_empty/comptime_as_primitive/Nargo.toml diff --git a/noir/noir-repo/test_programs/compile_success_empty/comptime_as_primitive/src/main.nr b/noir/noir-repo/test_programs/compile_success_empty/comptime_as_primitive/src/main.nr new file mode 100644 index 00000000000..392ccf2ebfc --- /dev/null +++ b/noir/noir-repo/test_programs/compile_success_empty/comptime_as_primitive/src/main.nr @@ -0,0 +1,7 @@ +fn main() { + comptime { + let x: u64 = 1; + let y = x as Field; + let _ = y as u128; + } +} diff --git a/noir/noir-repo/test_programs/compile_success_empty/comptime_from_field/Nargo.toml b/noir/noir-repo/test_programs/compile_success_empty/comptime_from_field/Nargo.toml deleted file mode 100644 index 38a46ba0dbe..00000000000 --- a/noir/noir-repo/test_programs/compile_success_empty/comptime_from_field/Nargo.toml +++ /dev/null @@ -1,6 +0,0 @@ -[package] -name = "comptime_from_field" -type = "bin" -authors = [""] - -[dependencies] diff --git a/noir/noir-repo/test_programs/compile_success_empty/comptime_from_field/src/main.nr b/noir/noir-repo/test_programs/compile_success_empty/comptime_from_field/src/main.nr deleted file mode 100644 index 028722b94b2..00000000000 --- a/noir/noir-repo/test_programs/compile_success_empty/comptime_from_field/src/main.nr +++ /dev/null @@ -1,5 +0,0 @@ -fn main() { - comptime { - let _: Field = U128::from_hex("0x0").to_integer(); - } -} diff --git a/noir/noir-repo/test_programs/compile_success_empty/comptime_function_definition/src/main.nr b/noir/noir-repo/test_programs/compile_success_empty/comptime_function_definition/src/main.nr index 57bcd2ba90e..59b0ee70f7a 100644 --- a/noir/noir-repo/test_programs/compile_success_empty/comptime_function_definition/src/main.nr +++ b/noir/noir-repo/test_programs/compile_success_empty/comptime_function_definition/src/main.nr @@ -208,3 +208,33 @@ mod test_as_typed_expr_4 { } } } + +mod test_as_typed_expr_5 { + trait Trait {} + + impl Trait for i32 {} + + trait Packable { + fn pack(self); + } + + pub struct Foo {} + + impl Packable<10> for Foo + where + T: Trait, + { + fn pack(self) {} + } + + fn foo() { + comptime { + let foo = quote { Foo }.as_type(); + let t = quote { Packable<10> }.as_trait_constraint(); + let _ = foo.get_trait_impl(t).unwrap().methods().filter(|method| { + method.name() == quote { pack } + })[0] + .as_typed_expr(); + } + } +} diff --git a/noir/noir-repo/test_programs/compile_success_empty/comptime_keccak/Nargo.toml b/noir/noir-repo/test_programs/compile_success_empty/comptime_keccak/Nargo.toml deleted file mode 100644 index 47c8654804d..00000000000 --- a/noir/noir-repo/test_programs/compile_success_empty/comptime_keccak/Nargo.toml +++ /dev/null @@ -1,7 +0,0 @@ -[package] -name = "comptime_keccak" -type = "bin" -authors = [""] -compiler_version = ">=0.33.0" - -[dependencies] \ No newline at end of file diff --git a/noir/noir-repo/test_programs/compile_success_empty/comptime_keccak/src/main.nr b/noir/noir-repo/test_programs/compile_success_empty/comptime_keccak/src/main.nr deleted file mode 100644 index dc4e88b7ab2..00000000000 --- a/noir/noir-repo/test_programs/compile_success_empty/comptime_keccak/src/main.nr +++ /dev/null @@ -1,32 +0,0 @@ -// Tests a very simple program. -// -// The features being tested is keccak256 in brillig -fn main() { - comptime { - let x = 0xbd; - let result = [ - 0x5a, 0x50, 0x2f, 0x9f, 0xca, 0x46, 0x7b, 0x26, 0x6d, 0x5b, 0x78, 0x33, 0x65, 0x19, - 0x37, 0xe8, 0x05, 0x27, 0x0c, 0xa3, 0xf3, 0xaf, 0x1c, 0x0d, 0xd2, 0x46, 0x2d, 0xca, - 0x4b, 0x3b, 0x1a, 0xbf, - ]; - // We use the `as` keyword here to denote the fact that we want to take just the first byte from the x Field - // The padding is taken care of by the program - let digest = keccak256([x as u8], 1); - assert(digest == result); - //#1399: variable message size - let message_size = 4; - let hash_a = keccak256([1, 2, 3, 4], message_size); - let hash_b = keccak256([1, 2, 3, 4, 0, 0, 0, 0], message_size); - - assert(hash_a == hash_b); - - let message_size_big = 8; - let hash_c = keccak256([1, 2, 3, 4, 0, 0, 0, 0], message_size_big); - - assert(hash_a != hash_c); - } -} - -comptime fn keccak256(data: [u8; N], msg_len: u32) -> [u8; 32] { - std::hash::keccak256(data, msg_len) -} diff --git a/noir/noir-repo/test_programs/compile_success_empty/comptime_quoted/Nargo.toml b/noir/noir-repo/test_programs/compile_success_empty/comptime_quoted/Nargo.toml new file mode 100644 index 00000000000..67172a1cc99 --- /dev/null +++ b/noir/noir-repo/test_programs/compile_success_empty/comptime_quoted/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "comptime_quoted" +type = "bin" +authors = [""] +compiler_version = ">=0.31.0" + +[dependencies] diff --git a/noir/noir-repo/test_programs/compile_success_empty/comptime_quoted/src/main.nr b/noir/noir-repo/test_programs/compile_success_empty/comptime_quoted/src/main.nr new file mode 100644 index 00000000000..a620d66d86a --- /dev/null +++ b/noir/noir-repo/test_programs/compile_success_empty/comptime_quoted/src/main.nr @@ -0,0 +1,8 @@ +fn main() { + comptime { + let array = quote { [1, 2, 3] }.as_expr().unwrap(); + let expr1 = quote { [1, 2, 3]}; + let expr2 = quote { $array }; + assert_eq(expr1, expr2); + } +} diff --git a/noir/noir-repo/test_programs/execution_report.sh b/noir/noir-repo/test_programs/execution_report.sh index 5c916ef6bd7..c7b12d99681 100755 --- a/noir/noir-repo/test_programs/execution_report.sh +++ b/noir/noir-repo/test_programs/execution_report.sh @@ -6,7 +6,7 @@ current_dir=$(pwd) base_path="$current_dir/execution_success" # Tests to be profiled for execution report -tests_to_profile=("sha256_regression" "regression_4709" "ram_blowup_regression" "global_var_regression_entry_points") +tests_to_profile=("regression_4709" "ram_blowup_regression" "global_var_regression_entry_points") echo "[" > $current_dir/execution_report.json diff --git a/noir/noir-repo/test_programs/execution_success/6/Prover.toml b/noir/noir-repo/test_programs/execution_success/6/Prover.toml index 1c52aef063c..d370032bd53 100644 --- a/noir/noir-repo/test_programs/execution_success/6/Prover.toml +++ b/noir/noir-repo/test_programs/execution_success/6/Prover.toml @@ -3,37 +3,4 @@ # used : https://emn178.github.io/online-tools/sha256.html x = [104, 101, 108, 108, 111] -result = [ - 0x2c, - 0xf2, - 0x4d, - 0xba, - 0x5f, - 0xb0, - 0xa3, - 0x0e, - 0x26, - 0xe8, - 0x3b, - 0x2a, - 0xc5, - 0xb9, - 0xe2, - 0x9e, - 0x1b, - 0x16, - 0x1e, - 0x5c, - 0x1f, - 0xa7, - 0x42, - 0x5e, - 0x73, - 0x04, - 0x33, - 0x62, - 0x93, - 0x8b, - 0x98, - 0x24, -] +result = [234, 143, 22, 61, 179, 134, 130, 146, 94, 68, 145, 197, 229, 141, 75, 179, 80, 110, 248, 193, 78, 183, 138, 134, 233, 8, 197, 98, 74, 103, 32, 15] diff --git a/noir/noir-repo/test_programs/execution_success/6/src/main.nr b/noir/noir-repo/test_programs/execution_success/6/src/main.nr index 5b71174614f..e950e309df2 100644 --- a/noir/noir-repo/test_programs/execution_success/6/src/main.nr +++ b/noir/noir-repo/test_programs/execution_success/6/src/main.nr @@ -1,11 +1,11 @@ -// Sha256 circuit where the input is 5 bytes -// not five field elements since sha256 operates over +// blake3 circuit where the input is 5 bytes +// not five field elements since blake3 operates over // bytes. // // If you do not cast, it will take all the bytes from the field element! fn main(x: [u8; 5], result: pub [u8; 32]) { - let mut digest = std::hash::sha256(x); + let mut digest = std::hash::blake3(x); digest[0] = 5 as u8; - digest = std::hash::sha256(x); + digest = std::hash::blake3(x); assert(digest == result); } diff --git a/noir/noir-repo/test_programs/execution_success/array_dynamic_blackbox_input/Prover.toml b/noir/noir-repo/test_programs/execution_success/array_dynamic_blackbox_input/Prover.toml index cc60eb8a8ba..f852a79a103 100644 --- a/noir/noir-repo/test_programs/execution_success/array_dynamic_blackbox_input/Prover.toml +++ b/noir/noir-repo/test_programs/execution_success/array_dynamic_blackbox_input/Prover.toml @@ -1,4 +1,4 @@ index = "1" leaf = ["51", "109", "224", "175", "60", "42", "79", "222", "117", "255", "174", "79", "126", "242", "74", "34", "100", "35", "20", "200", "109", "89", "191", "219", "41", "10", "118", "217", "165", "224", "215", "109"] path = ["0", "1", "2", "3", "4", "5", "6", "7", "8", "9", "10", "11", "12", "13", "14", "15", "16", "17", "18", "19", "20", "21", "22", "23", "24", "25", "26", "27", "28", "29", "30", "31", "32", "33", "34", "35", "36", "37", "38", "39", "40", "41", "42", "43", "44", "45", "46", "47", "48", "49", "50", "51", "52", "53", "54", "55", "56", "57", "58", "59", "60", "61", "62", "63"] -root = [79, 230, 126, 184, 98, 125, 226, 58, 117, 45, 140, 15, 72, 118, 89, 173, 117, 161, 166, 0, 214, 125, 13, 16, 113, 81, 173, 156, 97, 15, 57, 216] +root = [186, 47, 168, 70, 152, 149, 203, 90, 138, 188, 96, 15, 111, 179, 82, 106, 198, 166, 172, 38, 110, 187, 182, 64, 29, 101, 171, 221, 89, 105, 243, 22] diff --git a/noir/noir-repo/test_programs/execution_success/array_dynamic_blackbox_input/src/main.nr b/noir/noir-repo/test_programs/execution_success/array_dynamic_blackbox_input/src/main.nr index 260d609928b..603ae704d70 100644 --- a/noir/noir-repo/test_programs/execution_success/array_dynamic_blackbox_input/src/main.nr +++ b/noir/noir-repo/test_programs/execution_success/array_dynamic_blackbox_input/src/main.nr @@ -18,7 +18,7 @@ fn compute_root(leaf: [u8; 32], path: [u8; 64], _index: u32, root: [u8; 32]) { hash_input[j + b] = path[offset + j]; } - current = std::hash::sha256(hash_input); + current = std::hash::blake3(hash_input); index = index >> 1; } diff --git a/noir/noir-repo/test_programs/execution_success/array_dynamic_nested_blackbox_input/Prover.toml b/noir/noir-repo/test_programs/execution_success/array_dynamic_nested_blackbox_input/Prover.toml index 1f291532414..85b480415b1 100644 --- a/noir/noir-repo/test_programs/execution_success/array_dynamic_nested_blackbox_input/Prover.toml +++ b/noir/noir-repo/test_programs/execution_success/array_dynamic_nested_blackbox_input/Prover.toml @@ -1,5 +1,5 @@ y = "3" -hash_result = [50, 53, 90, 252, 105, 236, 223, 30, 135, 229, 193, 172, 51, 139, 8, 32, 188, 104, 151, 115, 129, 168, 27, 71, 203, 47, 40, 228, 89, 177, 129, 100] +hash_result = [77, 43, 36, 42, 132, 232, 186, 191, 119, 43, 192, 121, 66, 137, 143, 205, 13, 23, 80, 25, 162, 45, 100, 31, 178, 150, 138, 4, 72, 73, 120, 70] [[x]] a = "1" @@ -20,4 +20,4 @@ a = "7" b = ["8", "9", "22"] [x.bar] -inner = ["106", "107", "108"] \ No newline at end of file +inner = ["106", "107", "108"] diff --git a/noir/noir-repo/test_programs/execution_success/array_dynamic_nested_blackbox_input/src/main.nr b/noir/noir-repo/test_programs/execution_success/array_dynamic_nested_blackbox_input/src/main.nr index 15a2747eaa9..14f110a23a0 100644 --- a/noir/noir-repo/test_programs/execution_success/array_dynamic_nested_blackbox_input/src/main.nr +++ b/noir/noir-repo/test_programs/execution_success/array_dynamic_nested_blackbox_input/src/main.nr @@ -15,6 +15,6 @@ fn main(mut x: [Foo; 3], y: pub Field, hash_result: pub [u8; 32]) { // Make sure that we are passing a dynamic array to the black box function call // by setting the array using a dynamic index here hash_input[y - 1] = 0; - let hash = std::hash::sha256(hash_input); + let hash = std::hash::blake3(hash_input); assert_eq(hash, hash_result); } diff --git a/noir/noir-repo/test_programs/execution_success/brillig_cow_regression/src/main.nr b/noir/noir-repo/test_programs/execution_success/brillig_cow_regression/src/main.nr index 69273bc3dca..4b70f2961b6 100644 --- a/noir/noir-repo/test_programs/execution_success/brillig_cow_regression/src/main.nr +++ b/noir/noir-repo/test_programs/execution_success/brillig_cow_regression/src/main.nr @@ -172,6 +172,6 @@ unconstrained fn main(kernel_data: DataToHash) -> pub [Field; NUM_FIELDS_PER_SHA } } - let sha_digest = std::hash::sha256(hash_input_flattened); - U256::from_bytes32(sha_digest).to_u128_limbs() + let blake3_digest = std::hash::blake3(hash_input_flattened); + U256::from_bytes32(blake3_digest).to_u128_limbs() } diff --git a/noir/noir-repo/test_programs/execution_success/conditional_1/Prover.toml b/noir/noir-repo/test_programs/execution_success/conditional_1/Prover.toml index baad8be126a..b06d750fda0 100644 --- a/noir/noir-repo/test_programs/execution_success/conditional_1/Prover.toml +++ b/noir/noir-repo/test_programs/execution_success/conditional_1/Prover.toml @@ -3,7 +3,7 @@ a=0 x = [104, 101, 108, 108, 111] result = [ - 0x2c, + 234, 0xf2, 0x4d, 0xba, diff --git a/noir/noir-repo/test_programs/execution_success/conditional_1/src/main.nr b/noir/noir-repo/test_programs/execution_success/conditional_1/src/main.nr index eedb8a697d1..e0292c733bd 100644 --- a/noir/noir-repo/test_programs/execution_success/conditional_1/src/main.nr +++ b/noir/noir-repo/test_programs/execution_success/conditional_1/src/main.nr @@ -53,7 +53,7 @@ fn main(a: u32, mut c: [u32; 4], x: [u8; 5], result: pub [u8; 32]) { let mut y = 0; if a == 0 { - let digest = std::hash::sha256(x); + let digest = std::hash::blake3(x); y = digest[0]; } else { y = 5; diff --git a/noir/noir-repo/test_programs/execution_success/conditional_regression_short_circuit/Prover.toml b/noir/noir-repo/test_programs/execution_success/conditional_regression_short_circuit/Prover.toml index baad8be126a..5f098ae39d8 100644 --- a/noir/noir-repo/test_programs/execution_success/conditional_regression_short_circuit/Prover.toml +++ b/noir/noir-repo/test_programs/execution_success/conditional_regression_short_circuit/Prover.toml @@ -2,37 +2,4 @@ c=[2, 4, 3, 0, ] a=0 x = [104, 101, 108, 108, 111] -result = [ - 0x2c, - 0xf2, - 0x4d, - 0xba, - 0x5f, - 0xb0, - 0xa3, - 0x0e, - 0x26, - 0xe8, - 0x3b, - 0x2a, - 0xc5, - 0xb9, - 0xe2, - 0x9e, - 0x1b, - 0x16, - 0x1e, - 0x5c, - 0x1f, - 0xa7, - 0x42, - 0x5e, - 0x73, - 0x04, - 0x33, - 0x62, - 0x93, - 0x8b, - 0x98, - 0x24, -] +result = [234, 143, 22, 61, 179, 134, 130, 146, 94, 68, 145, 197, 229, 141, 75, 179, 80, 110, 248, 193, 78, 183, 138, 134, 233, 8, 197, 98, 74, 103, 32, 15] diff --git a/noir/noir-repo/test_programs/execution_success/conditional_regression_short_circuit/src/main.nr b/noir/noir-repo/test_programs/execution_success/conditional_regression_short_circuit/src/main.nr index de5ad20a642..9312655c8f3 100644 --- a/noir/noir-repo/test_programs/execution_success/conditional_regression_short_circuit/src/main.nr +++ b/noir/noir-repo/test_programs/execution_success/conditional_regression_short_circuit/src/main.nr @@ -24,9 +24,9 @@ fn bar(x: Field) { } fn call_intrinsic(x: [u8; 5], result: [u8; 32]) { - let mut digest = std::hash::sha256(x); + let mut digest = std::hash::blake3(x); digest[0] = 5 as u8; - digest = std::hash::sha256(x); + digest = std::hash::blake3(x); assert(digest == result); } diff --git a/noir/noir-repo/test_programs/execution_success/ecdsa_secp256k1/Prover.toml b/noir/noir-repo/test_programs/execution_success/ecdsa_secp256k1/Prover.toml index 412c7b36e4c..e78fc19cb71 100644 --- a/noir/noir-repo/test_programs/execution_success/ecdsa_secp256k1/Prover.toml +++ b/noir/noir-repo/test_programs/execution_success/ecdsa_secp256k1/Prover.toml @@ -33,46 +33,6 @@ hashed_message = [ 0xc1, 0xe2, ] -message = [ - 0x49, - 0x6e, - 0x73, - 0x74, - 0x72, - 0x75, - 0x63, - 0x74, - 0x69, - 0x6f, - 0x6e, - 0x73, - 0x20, - 0x75, - 0x6e, - 0x63, - 0x6c, - 0x65, - 0x61, - 0x72, - 0x2c, - 0x20, - 0x61, - 0x73, - 0x6b, - 0x20, - 0x61, - 0x67, - 0x61, - 0x69, - 0x6e, - 0x20, - 0x6c, - 0x61, - 0x74, - 0x65, - 0x72, - 0x2e, -] pub_key_x = [ 0xa0, 0x43, diff --git a/noir/noir-repo/test_programs/execution_success/ecdsa_secp256k1/src/main.nr b/noir/noir-repo/test_programs/execution_success/ecdsa_secp256k1/src/main.nr index 00d420089fc..1c94bf8961e 100644 --- a/noir/noir-repo/test_programs/execution_success/ecdsa_secp256k1/src/main.nr +++ b/noir/noir-repo/test_programs/execution_success/ecdsa_secp256k1/src/main.nr @@ -1,14 +1,4 @@ -fn main( - message: [u8; 38], - hashed_message: [u8; 32], - pub_key_x: [u8; 32], - pub_key_y: [u8; 32], - signature: [u8; 64], -) { - // Hash the message, since secp256k1 expects a hashed_message - let expected = std::hash::sha256(message); - assert(hashed_message == expected); - +fn main(hashed_message: [u8; 32], pub_key_x: [u8; 32], pub_key_y: [u8; 32], signature: [u8; 64]) { let valid_signature = std::ecdsa_secp256k1::verify_signature(pub_key_x, pub_key_y, signature, hashed_message); assert(valid_signature); diff --git a/noir/noir-repo/test_programs/execution_success/keccak256/Prover.toml b/noir/noir-repo/test_programs/execution_success/keccak256/Prover.toml deleted file mode 100644 index d65c4011d3f..00000000000 --- a/noir/noir-repo/test_programs/execution_success/keccak256/Prover.toml +++ /dev/null @@ -1,35 +0,0 @@ -x = 0xbd -result = [ - 0x5a, - 0x50, - 0x2f, - 0x9f, - 0xca, - 0x46, - 0x7b, - 0x26, - 0x6d, - 0x5b, - 0x78, - 0x33, - 0x65, - 0x19, - 0x37, - 0xe8, - 0x05, - 0x27, - 0x0c, - 0xa3, - 0xf3, - 0xaf, - 0x1c, - 0x0d, - 0xd2, - 0x46, - 0x2d, - 0xca, - 0x4b, - 0x3b, - 0x1a, - 0xbf, -] diff --git a/noir/noir-repo/test_programs/execution_success/keccak256/src/main.nr b/noir/noir-repo/test_programs/execution_success/keccak256/src/main.nr deleted file mode 100644 index 1e13fa028b7..00000000000 --- a/noir/noir-repo/test_programs/execution_success/keccak256/src/main.nr +++ /dev/null @@ -1,20 +0,0 @@ -// docs:start:keccak256 -fn main(x: Field, result: [u8; 32]) { - // We use the `as` keyword here to denote the fact that we want to take just the first byte from the x Field - // The padding is taken care of by the program - let digest = std::hash::keccak256([x as u8], 1); - assert(digest == result); - - //#1399: variable message size - let message_size = 4; - let hash_a = std::hash::keccak256([1, 2, 3, 4], message_size); - let hash_b = std::hash::keccak256([1, 2, 3, 4, 0, 0, 0, 0], message_size); - - assert(hash_a == hash_b); - - let message_size_big = 8; - let hash_c = std::hash::keccak256([1, 2, 3, 4, 0, 0, 0, 0], message_size_big); - - assert(hash_a != hash_c); -} -// docs:end:keccak256 diff --git a/noir/noir-repo/test_programs/execution_success/merkle_insert/src/main.nr b/noir/noir-repo/test_programs/execution_success/merkle_insert/src/main.nr index 25a455c90b8..42a261ae6c6 100644 --- a/noir/noir-repo/test_programs/execution_success/merkle_insert/src/main.nr +++ b/noir/noir-repo/test_programs/execution_success/merkle_insert/src/main.nr @@ -6,8 +6,23 @@ fn main( leaf: Field, index: Field, ) { - assert(old_root == std::merkle::compute_merkle_root(old_leaf, index, old_hash_path)); + assert(old_root == compute_merkle_root(old_leaf, index, old_hash_path)); - let calculated_root = std::merkle::compute_merkle_root(leaf, index, old_hash_path); + let calculated_root = compute_merkle_root(leaf, index, old_hash_path); assert(new_root == calculated_root); } + +fn compute_merkle_root(leaf: Field, index: Field, hash_path: [Field; N]) -> Field { + let index_bits: [u1; N] = index.to_le_bits(); + let mut current = leaf; + for i in 0..N { + let path_bit = index_bits[i] as bool; + let (hash_left, hash_right) = if path_bit { + (hash_path[i], current) + } else { + (current, hash_path[i]) + }; + current = std::hash::pedersen_hash([hash_left, hash_right]); + } + current +} diff --git a/noir/noir-repo/test_programs/execution_success/ram_blowup_regression/src/main.nr b/noir/noir-repo/test_programs/execution_success/ram_blowup_regression/src/main.nr index 6deb54dd21d..5f63bf03e55 100644 --- a/noir/noir-repo/test_programs/execution_success/ram_blowup_regression/src/main.nr +++ b/noir/noir-repo/test_programs/execution_success/ram_blowup_regression/src/main.nr @@ -20,9 +20,9 @@ pub fn field_from_bytes_32_trunc(bytes32: [u8; 32]) -> Field { low + high * v } -pub fn sha256_to_field(bytes_to_hash: [u8; N]) -> Field { - let sha256_hashed = std::hash::sha256(bytes_to_hash); - let hash_in_a_field = field_from_bytes_32_trunc(sha256_hashed); +pub fn blake3_to_field(bytes_to_hash: [u8; N]) -> Field { + let blake3_hashed = std::hash::blake3(bytes_to_hash); + let hash_in_a_field = field_from_bytes_32_trunc(blake3_hashed); hash_in_a_field } @@ -36,6 +36,6 @@ fn main(tx_effects_hash_input: [Field; TX_EFFECTS_HASH_INPUT_FIELDS]) -> pub Fie } } - let sha_digest = sha256_to_field(hash_input_flattened); - sha_digest + let blake3_digest = blake3_to_field(hash_input_flattened); + blake3_digest } diff --git a/noir/noir-repo/test_programs/execution_success/regression_4449/Prover.toml b/noir/noir-repo/test_programs/execution_success/regression_4449/Prover.toml index 81af476bcc9..12b95c0dbfa 100644 --- a/noir/noir-repo/test_programs/execution_success/regression_4449/Prover.toml +++ b/noir/noir-repo/test_programs/execution_success/regression_4449/Prover.toml @@ -1,3 +1,3 @@ x = 0xbd -result = [204, 59, 83, 197, 18, 1, 128, 43, 247, 28, 104, 225, 106, 13, 20, 187, 42, 26, 67, 150, 48, 75, 238, 168, 121, 247, 142, 160, 71, 222, 97, 188] \ No newline at end of file +result = [3, 128, 126, 121, 21, 242, 202, 74, 58, 183, 180, 171, 169, 186, 245, 81, 206, 26, 69, 29, 25, 207, 152, 152, 52, 33, 40, 106, 200, 237, 90, 156] diff --git a/noir/noir-repo/test_programs/execution_success/regression_4449/src/main.nr b/noir/noir-repo/test_programs/execution_success/regression_4449/src/main.nr index 3fda39bd874..88b80dabc7f 100644 --- a/noir/noir-repo/test_programs/execution_success/regression_4449/src/main.nr +++ b/noir/noir-repo/test_programs/execution_success/regression_4449/src/main.nr @@ -5,7 +5,7 @@ fn main(x: u8, result: [u8; 32]) { for i in 0..70 { let y = x + i; let a = [y, x, 32, 0, y + 1, y - 1, y - 2, 5]; - digest = std::sha256::digest(a); + digest = std::hash::blake3(a); } assert(digest == result); diff --git a/noir/noir-repo/test_programs/execution_success/sha2_byte/Nargo.toml b/noir/noir-repo/test_programs/execution_success/regression_7323/Nargo.toml similarity index 68% rename from noir/noir-repo/test_programs/execution_success/sha2_byte/Nargo.toml rename to noir/noir-repo/test_programs/execution_success/regression_7323/Nargo.toml index efd691fce58..588887df5bb 100644 --- a/noir/noir-repo/test_programs/execution_success/sha2_byte/Nargo.toml +++ b/noir/noir-repo/test_programs/execution_success/regression_7323/Nargo.toml @@ -1,5 +1,5 @@ [package] -name = "sha2_byte" +name = "regression_7323" type = "bin" authors = [""] diff --git a/noir/noir-repo/test_programs/execution_success/regression_7323/Prover.toml b/noir/noir-repo/test_programs/execution_success/regression_7323/Prover.toml new file mode 100644 index 00000000000..fed79d05f4a --- /dev/null +++ b/noir/noir-repo/test_programs/execution_success/regression_7323/Prover.toml @@ -0,0 +1 @@ +x = 5 diff --git a/noir/noir-repo/test_programs/execution_success/regression_7323/src/main.nr b/noir/noir-repo/test_programs/execution_success/regression_7323/src/main.nr new file mode 100644 index 00000000000..9a7bcd6f941 --- /dev/null +++ b/noir/noir-repo/test_programs/execution_success/regression_7323/src/main.nr @@ -0,0 +1,11 @@ +// This program lead to panics previously due to the compiler lowering it to multiple blocks +// which all jumped to the same end block. It runs now due to the compiler lowering to the +// equivalent of a nested series of if-else instead. +fn main(x: Field) { + match x { + 1 => panic(f"Branch 1 should not be taken"), + 2 => panic(f"Branch 2 should not be taken"), + 3 => panic(f"Branch 3 should not be taken"), + _ => (), + } +} diff --git a/noir/noir-repo/test_programs/execution_success/sha256/Nargo.toml b/noir/noir-repo/test_programs/execution_success/sha256/Nargo.toml deleted file mode 100644 index 255d2156ef6..00000000000 --- a/noir/noir-repo/test_programs/execution_success/sha256/Nargo.toml +++ /dev/null @@ -1,6 +0,0 @@ -[package] -name = "sha256" -type = "bin" -authors = [""] - -[dependencies] diff --git a/noir/noir-repo/test_programs/execution_success/sha256/Prover.toml b/noir/noir-repo/test_programs/execution_success/sha256/Prover.toml deleted file mode 100644 index b4bf4162370..00000000000 --- a/noir/noir-repo/test_programs/execution_success/sha256/Prover.toml +++ /dev/null @@ -1,38 +0,0 @@ - -x = 0xbd -result = [ - 0x68, - 0x32, - 0x57, - 0x20, - 0xaa, - 0xbd, - 0x7c, - 0x82, - 0xf3, - 0x0f, - 0x55, - 0x4b, - 0x31, - 0x3d, - 0x05, - 0x70, - 0xc9, - 0x5a, - 0xcc, - 0xbb, - 0x7d, - 0xc4, - 0xb5, - 0xaa, - 0xe1, - 0x12, - 0x04, - 0xc0, - 0x8f, - 0xfe, - 0x73, - 0x2b, -] -input = [0, 0] -toggle = false \ No newline at end of file diff --git a/noir/noir-repo/test_programs/execution_success/sha256/src/main.nr b/noir/noir-repo/test_programs/execution_success/sha256/src/main.nr deleted file mode 100644 index 8e5e46b9837..00000000000 --- a/noir/noir-repo/test_programs/execution_success/sha256/src/main.nr +++ /dev/null @@ -1,27 +0,0 @@ -// Sha256 example -// -// Calls Sha256 from the standard library. -// -// The Compiler sees this special function and creates an ACIR gate -// -// The ACIR SHA256 gate is passed to PLONK who should -// know how to create the necessary constraints. -// -// Not yet here: For R1CS, it is more about manipulating arithmetic gates to get performance -// This can be done in ACIR! -fn main(x: Field, result: [u8; 32], input: [u8; 2], toggle: bool) { - // We use the `as` keyword here to denote the fact that we want to take just the first byte from the x Field - // The padding is taken care of by the program - // docs:start:sha256_var - let digest = std::hash::sha256_var([x as u8], 1); - // docs:end:sha256_var - assert(digest == result); - - let digest = std::hash::sha256([x as u8]); - assert(digest == result); - - // variable size - let size: Field = 1 + toggle as Field; - let var_sha = std::hash::sha256_var(input, size as u64); - assert(var_sha == std::hash::sha256_var(input, 1)); -} diff --git a/noir/noir-repo/test_programs/execution_success/sha256_brillig_performance_regression/Nargo.toml b/noir/noir-repo/test_programs/execution_success/sha256_brillig_performance_regression/Nargo.toml deleted file mode 100644 index f7076311e1d..00000000000 --- a/noir/noir-repo/test_programs/execution_success/sha256_brillig_performance_regression/Nargo.toml +++ /dev/null @@ -1,7 +0,0 @@ -[package] -name = "sha256_brillig_performance_regression" -type = "bin" -authors = [""] -compiler_version = ">=0.33.0" - -[dependencies] diff --git a/noir/noir-repo/test_programs/execution_success/sha256_brillig_performance_regression/Prover.toml b/noir/noir-repo/test_programs/execution_success/sha256_brillig_performance_regression/Prover.toml deleted file mode 100644 index 5bb7f354257..00000000000 --- a/noir/noir-repo/test_programs/execution_success/sha256_brillig_performance_regression/Prover.toml +++ /dev/null @@ -1,16 +0,0 @@ -input_amount = "1" -minimum_output_amount = "2" -secret_hash_for_L1_to_l2_message = "3" -uniswap_fee_tier = "4" - -[aztec_recipient] -inner = "5" - -[caller_on_L1] -inner = "6" - -[input_asset_bridge_portal_address] -inner = "7" - -[output_asset_bridge_portal_address] -inner = "8" diff --git a/noir/noir-repo/test_programs/execution_success/sha256_brillig_performance_regression/src/main.nr b/noir/noir-repo/test_programs/execution_success/sha256_brillig_performance_regression/src/main.nr deleted file mode 100644 index 42cc6d4ff3b..00000000000 --- a/noir/noir-repo/test_programs/execution_success/sha256_brillig_performance_regression/src/main.nr +++ /dev/null @@ -1,104 +0,0 @@ -// Performance regression extracted from an aztec protocol contract. - -unconstrained fn main( - input_asset_bridge_portal_address: EthAddress, - input_amount: Field, - uniswap_fee_tier: Field, - output_asset_bridge_portal_address: EthAddress, - minimum_output_amount: Field, - aztec_recipient: AztecAddress, - secret_hash_for_L1_to_l2_message: Field, - caller_on_L1: EthAddress, -) -> pub Field { - let mut hash_bytes = [0; 260]; // 8 fields of 32 bytes each + 4 bytes fn selector - let input_token_portal_bytes: [u8; 32] = - input_asset_bridge_portal_address.to_field().to_be_bytes(); - let in_amount_bytes: [u8; 32] = input_amount.to_be_bytes(); - let uniswap_fee_tier_bytes: [u8; 32] = uniswap_fee_tier.to_be_bytes(); - let output_token_portal_bytes: [u8; 32] = - output_asset_bridge_portal_address.to_field().to_be_bytes(); - let amount_out_min_bytes: [u8; 32] = minimum_output_amount.to_be_bytes(); - let aztec_recipient_bytes: [u8; 32] = aztec_recipient.to_field().to_be_bytes(); - let secret_hash_for_L1_to_l2_message_bytes: [u8; 32] = - secret_hash_for_L1_to_l2_message.to_be_bytes(); - let caller_on_L1_bytes: [u8; 32] = caller_on_L1.to_field().to_be_bytes(); - - // The purpose of including the following selector is to make the message unique to that specific call. Note that - // it has nothing to do with calling the function. - let selector = comptime { - std::hash::keccak256( - "swap_public(address,uint256,uint24,address,uint256,bytes32,bytes32,address)".as_bytes(), - 75, - ) - }; - - hash_bytes[0] = selector[0]; - hash_bytes[1] = selector[1]; - hash_bytes[2] = selector[2]; - hash_bytes[3] = selector[3]; - - for i in 0..32 { - hash_bytes[i + 4] = input_token_portal_bytes[i]; - hash_bytes[i + 36] = in_amount_bytes[i]; - hash_bytes[i + 68] = uniswap_fee_tier_bytes[i]; - hash_bytes[i + 100] = output_token_portal_bytes[i]; - hash_bytes[i + 132] = amount_out_min_bytes[i]; - hash_bytes[i + 164] = aztec_recipient_bytes[i]; - hash_bytes[i + 196] = secret_hash_for_L1_to_l2_message_bytes[i]; - hash_bytes[i + 228] = caller_on_L1_bytes[i]; - } - - let content_hash = sha256_to_field(hash_bytes); - content_hash -} - -// Convert a 32 byte array to a field element by truncating the final byte -pub fn field_from_bytes_32_trunc(bytes32: [u8; 32]) -> Field { - // Convert it to a field element - let mut v = 1; - let mut high = 0 as Field; - let mut low = 0 as Field; - - for i in 0..15 { - // covers bytes 16..30 (31 is truncated and ignored) - low = low + (bytes32[15 + 15 - i] as Field) * v; - v = v * 256; - // covers bytes 0..14 - high = high + (bytes32[14 - i] as Field) * v; - } - // covers byte 15 - low = low + (bytes32[15] as Field) * v; - - low + high * v -} - -pub fn sha256_to_field(bytes_to_hash: [u8; N]) -> Field { - let sha256_hashed = std::hash::sha256(bytes_to_hash); - let hash_in_a_field = field_from_bytes_32_trunc(sha256_hashed); - - hash_in_a_field -} - -pub trait ToField { - fn to_field(self) -> Field; -} - -pub struct EthAddress { - inner: Field, -} - -impl ToField for EthAddress { - fn to_field(self) -> Field { - self.inner - } -} - -pub struct AztecAddress { - pub inner: Field, -} - -impl ToField for AztecAddress { - fn to_field(self) -> Field { - self.inner - } -} diff --git a/noir/noir-repo/test_programs/execution_success/sha256_regression/Nargo.toml b/noir/noir-repo/test_programs/execution_success/sha256_regression/Nargo.toml deleted file mode 100644 index ce98d000bcb..00000000000 --- a/noir/noir-repo/test_programs/execution_success/sha256_regression/Nargo.toml +++ /dev/null @@ -1,7 +0,0 @@ -[package] -name = "sha256_regression" -type = "bin" -authors = [""] -compiler_version = ">=0.33.0" - -[dependencies] \ No newline at end of file diff --git a/noir/noir-repo/test_programs/execution_success/sha256_regression/Prover.toml b/noir/noir-repo/test_programs/execution_success/sha256_regression/Prover.toml deleted file mode 100644 index ea0a0f2e8a7..00000000000 --- a/noir/noir-repo/test_programs/execution_success/sha256_regression/Prover.toml +++ /dev/null @@ -1,14 +0,0 @@ -msg_just_over_block = [102, 114, 111, 109, 58, 114, 117, 110, 110, 105, 101, 114, 46, 108, 101, 97, 103, 117, 101, 115, 46, 48, 106, 64, 105, 99, 108, 111, 117, 100, 46, 99, 111, 109, 13, 10, 99, 111, 110, 116, 101, 110, 116, 45, 116, 121, 112, 101, 58, 116, 101, 120, 116, 47, 112, 108, 97, 105, 110, 59, 32, 99, 104, 97, 114, 115, 101, 116] -msg_multiple_of_block = [102, 114, 111, 109, 58, 114, 117, 110, 110, 105, 101, 114, 46, 108, 101, 97, 103, 117, 101, 115, 46, 48, 106, 64, 105, 99, 108, 111, 117, 100, 46, 99, 111, 109, 13, 10, 99, 111, 110, 116, 101, 110, 116, 45, 116, 121, 112, 101, 58, 116, 101, 120, 116, 47, 112, 108, 97, 105, 110, 59, 32, 99, 104, 97, 114, 115, 101, 116, 61, 117, 115, 45, 97, 115, 99, 105, 105, 13, 10, 109, 105, 109, 101, 45, 118, 101, 114, 115, 105, 111, 110, 58, 49, 46, 48, 32, 40, 77, 97, 99, 32, 79, 83, 32, 88, 32, 77, 97, 105, 108, 32, 49, 54, 46, 48, 32, 92, 40, 51, 55, 51, 49, 46, 53, 48, 48, 46, 50, 51, 49, 92, 41, 41, 13, 10, 115, 117, 98, 106, 101, 99, 116, 58, 72, 101, 108, 108, 111, 13, 10, 109, 101, 115, 115, 97, 103, 101, 45, 105, 100, 58, 60, 56, 70, 56, 49, 57, 68, 51, 50, 45, 66, 54, 65, 67, 45, 52, 56, 57, 68, 45, 57, 55, 55, 70, 45, 52, 51, 56, 66, 66, 67, 52, 67, 65, 66, 50, 55, 64, 109, 101, 46, 99, 111, 109, 62, 13, 10, 100, 97, 116, 101, 58, 83, 97, 116, 44, 32, 50, 54, 32, 65, 117, 103, 32, 50, 48, 50, 51, 32, 49, 50, 58, 50, 53, 58, 50, 50, 32, 43, 48, 52, 48, 48, 13, 10, 116, 111, 58, 122, 107, 101, 119, 116, 101, 115, 116, 64, 103, 109, 97, 105, 108, 46, 99, 111, 109, 13, 10, 100, 107, 105, 109, 45, 115, 105, 103, 110, 97, 116, 117, 114, 101, 58, 118, 61, 49, 59, 32, 97, 61, 114, 115, 97, 45, 115, 104, 97, 50, 53, 54, 59, 32, 99, 61, 114, 101, 108, 97, 120, 101, 100, 47, 114, 101, 108, 97, 120, 101, 100, 59, 32, 100, 61, 105, 99, 108, 111, 117, 100, 46, 99, 111, 109, 59, 32, 115, 61, 49, 97, 49, 104, 97, 105, 59, 32, 116, 61, 49, 54, 57, 51, 48, 51, 56, 51, 51, 55, 59, 32, 98, 104, 61, 55, 120, 81, 77, 68, 117, 111, 86, 86, 85, 52, 109, 48, 87, 48, 87, 82, 86, 83, 114, 86, 88, 77, 101, 71, 83, 73, 65, 83, 115, 110, 117, 99, 75, 57, 100, 74, 115, 114, 99, 43, 118, 85, 61, 59, 32, 104, 61, 102, 114, 111, 109, 58, 67, 111, 110, 116, 101, 110, 116, 45, 84, 121, 112, 101, 58, 77, 105, 109, 101, 45, 86, 101, 114, 115, 105, 111, 110, 58, 83, 117, 98, 106, 101, 99] -msg_just_under_block = [102, 114, 111, 109, 58, 114, 117, 110, 110, 105, 101, 114, 46, 108, 101, 97, 103, 117, 101, 115, 46, 48, 106, 64, 105, 99, 108, 111, 117, 100, 46, 99, 111, 109, 13, 10, 99, 111, 110, 116, 101, 110, 116, 45, 116, 121, 112, 101, 58, 116, 101, 120, 116, 47, 112, 108, 97, 105, 110, 59] -msg_big_not_block_multiple = [102, 114, 111, 109, 58, 114, 117, 110, 110, 105, 101, 114, 46, 108, 101, 97, 103, 117, 101, 115, 46, 48, 106, 64, 105, 99, 108, 111, 117, 100, 46, 99, 111, 109, 13, 10, 99, 111, 110, 116, 101, 110, 116, 45, 116, 121, 112, 101, 58, 116, 101, 120, 116, 47, 112, 108, 97, 105, 110, 59, 32, 99, 104, 97, 114, 115, 101, 116, 61, 117, 115, 45, 97, 115, 99, 105, 105, 13, 10, 109, 105, 109, 101, 45, 118, 101, 114, 115, 105, 111, 110, 58, 49, 46, 48, 32, 40, 77, 97, 99, 32, 79, 83, 32, 88, 32, 77, 97, 105, 108, 32, 49, 54, 46, 48, 32, 92, 40, 51, 55, 51, 49, 46, 53, 48, 48, 46, 50, 51, 49, 92, 41, 41, 13, 10, 115, 117, 98, 106, 101, 99, 116, 58, 72, 101, 108, 108, 111, 13, 10, 109, 101, 115, 115, 97, 103, 101, 45, 105, 100, 58, 60, 56, 70, 56, 49, 57, 68, 51, 50, 45, 66, 54, 65, 67, 45, 52, 56, 57, 68, 45, 57, 55, 55, 70, 45, 52, 51, 56, 66, 66, 67, 52, 67, 65, 66, 50, 55, 64, 109, 101, 46, 99, 111, 109, 62, 13, 10, 100, 97, 116, 101, 58, 83, 97, 116, 44, 32, 50, 54, 32, 65, 117, 103, 32, 50, 48, 50, 51, 32, 49, 50, 58, 50, 53, 58, 50, 50, 32, 43, 48, 52, 48, 48, 13, 10, 116, 111, 58, 122, 107, 101, 119, 116, 101, 115, 116, 64, 103, 109, 97, 105, 108, 46, 99, 111, 109, 13, 10, 100, 107, 105, 109, 45, 115, 105, 103, 110, 97, 116, 117, 114, 101, 58, 118, 61, 49, 59, 32, 97, 61, 114, 115, 97, 45, 115, 104, 97, 50, 53, 54, 59, 32, 99, 61, 114, 101, 108, 97, 120, 101, 100, 47, 114, 101, 108, 97, 120, 101, 100, 59, 32, 100, 61, 105, 99, 108, 111, 117, 100, 46, 99, 111, 109, 59, 32, 115, 61, 49, 97, 49, 104, 97, 105, 59, 32, 116, 61, 49, 54, 57, 51, 48, 51, 56, 51, 51, 55, 59, 32, 98, 104, 61, 55, 120, 81, 77, 68, 117, 111, 86, 86, 85, 52, 109, 48, 87, 48, 87, 82, 86, 83, 114, 86, 88, 77, 101, 71, 83, 73, 65, 83, 115, 110, 117, 99, 75, 57, 100, 74, 115, 114, 99, 43, 118, 85, 61, 59, 32, 104, 61, 102, 114, 111, 109, 58, 67, 111, 110, 116, 101, 110, 116, 45, 84, 121, 112, 101, 58, 77, 105, 109, 101, 45, 86, 101, 114, 115, 105, 111, 110, 58, 83, 117, 98, 106, 101, 99, 116, 58, 77, 101, 115, 115, 97, 103, 101, 45, 73, 100, 58, 68, 97, 116, 101, 58, 116, 111, 59, 32, 98, 61] -msg_big_with_padding = [48, 130, 1, 37, 2, 1, 0, 48, 11, 6, 9, 96, 134, 72, 1, 101, 3, 4, 2, 1, 48, 130, 1, 17, 48, 37, 2, 1, 1, 4, 32, 176, 223, 31, 133, 108, 84, 158, 102, 70, 11, 165, 175, 196, 12, 201, 130, 25, 131, 46, 125, 156, 194, 28, 23, 55, 133, 157, 164, 135, 136, 220, 78, 48, 37, 2, 1, 2, 4, 32, 190, 82, 180, 235, 222, 33, 79, 50, 152, 136, 142, 35, 116, 224, 6, 242, 156, 141, 128, 248, 10, 61, 98, 86, 248, 45, 207, 210, 90, 232, 175, 38, 48, 37, 2, 1, 3, 4, 32, 0, 194, 104, 108, 237, 246, 97, 230, 116, 198, 69, 110, 26, 87, 17, 89, 110, 199, 108, 250, 36, 21, 39, 87, 110, 102, 250, 213, 174, 131, 171, 174, 48, 37, 2, 1, 11, 4, 32, 136, 155, 87, 144, 111, 15, 152, 127, 85, 25, 154, 81, 20, 58, 51, 75, 193, 116, 234, 0, 60, 30, 29, 30, 183, 141, 72, 247, 255, 203, 100, 124, 48, 37, 2, 1, 12, 4, 32, 41, 234, 106, 78, 31, 11, 114, 137, 237, 17, 92, 71, 134, 47, 62, 78, 189, 233, 201, 214, 53, 4, 47, 189, 201, 133, 6, 121, 34, 131, 64, 142, 48, 37, 2, 1, 13, 4, 32, 91, 222, 210, 193, 62, 222, 104, 82, 36, 41, 138, 253, 70, 15, 148, 208, 156, 45, 105, 171, 241, 195, 185, 43, 217, 162, 146, 201, 222, 89, 238, 38, 48, 37, 2, 1, 14, 4, 32, 76, 123, 216, 13, 51, 227, 72, 245, 59, 193, 238, 166, 103, 49, 23, 164, 171, 188, 194, 197, 156, 187, 249, 28, 198, 95, 69, 15, 182, 56, 54, 38, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0] -msg_big_no_padding = [48, 130, 1, 37, 2, 1, 0, 48, 11, 6, 9, 96, 134, 72, 1, 101, 3, 4, 2, 1, 48, 130, 1, 17, 48, 37, 2, 1, 1, 4, 32, 176, 223, 31, 133, 108, 84, 158, 102, 70, 11, 165, 175, 196, 12, 201, 130, 25, 131, 46, 125, 156, 194, 28, 23, 55, 133, 157, 164, 135, 136, 220, 78, 48, 37, 2, 1, 2, 4, 32, 190, 82, 180, 235, 222, 33, 79, 50, 152, 136, 142, 35, 116, 224, 6, 242, 156, 141, 128, 248, 10, 61, 98, 86, 248, 45, 207, 210, 90, 232, 175, 38, 48, 37, 2, 1, 3, 4, 32, 0, 194, 104, 108, 237, 246, 97, 230, 116, 198, 69, 110, 26, 87, 17, 89, 110, 199, 108, 250, 36, 21, 39, 87, 110, 102, 250, 213, 174, 131, 171, 174, 48, 37, 2, 1, 11, 4, 32, 136, 155, 87, 144, 111, 15, 152, 127, 85, 25, 154, 81, 20, 58, 51, 75, 193, 116, 234, 0, 60, 30, 29, 30, 183, 141, 72, 247, 255, 203, 100, 124, 48, 37, 2, 1, 12, 4, 32, 41, 234, 106, 78, 31, 11, 114, 137, 237, 17, 92, 71, 134, 47, 62, 78, 189, 233, 201, 214, 53, 4, 47, 189, 201, 133, 6, 121, 34, 131, 64, 142, 48, 37, 2, 1, 13, 4, 32, 91, 222, 210, 193, 62, 222, 104, 82, 36, 41, 138, 253, 70, 15, 148, 208, 156, 45, 105, 171, 241, 195, 185, 43, 217, 162, 146, 201, 222, 89, 238, 38, 48, 37, 2, 1, 14, 4, 32, 76, 123, 216, 13, 51, 227, 72, 245, 59, 193, 238, 166, 103, 49, 23, 164, 171, 188, 194, 197, 156, 187, 249, 28, 198, 95, 69, 15, 182, 56, 54, 38] -message_size = 297 - -# Results matched against ethers library -result_just_over_block = [91, 122, 146, 93, 52, 109, 133, 148, 171, 61, 156, 70, 189, 238, 153, 7, 222, 184, 94, 24, 65, 114, 192, 244, 207, 199, 87, 232, 192, 224, 171, 207] -result_multiple_of_block = [116, 90, 151, 31, 78, 22, 138, 180, 211, 189, 69, 76, 227, 200, 155, 29, 59, 123, 154, 60, 47, 153, 203, 129, 157, 251, 48, 2, 79, 11, 65, 47] -result_just_under_block = [143, 140, 76, 173, 222, 123, 102, 68, 70, 149, 207, 43, 39, 61, 34, 79, 216, 252, 213, 165, 74, 16, 110, 74, 29, 64, 138, 167, 30, 1, 9, 119] -result_big = [112, 144, 73, 182, 208, 98, 9, 238, 54, 229, 61, 145, 222, 17, 72, 62, 148, 222, 186, 55, 192, 82, 220, 35, 66, 47, 193, 200, 22, 38, 26, 186] -result_big_with_padding = [32, 85, 108, 174, 127, 112, 178, 182, 8, 43, 134, 123, 192, 211, 131, 66, 184, 240, 212, 181, 240, 180, 106, 195, 24, 117, 54, 129, 19, 10, 250, 53] \ No newline at end of file diff --git a/noir/noir-repo/test_programs/execution_success/sha256_regression/src/main.nr b/noir/noir-repo/test_programs/execution_success/sha256_regression/src/main.nr deleted file mode 100644 index dbbcc07e501..00000000000 --- a/noir/noir-repo/test_programs/execution_success/sha256_regression/src/main.nr +++ /dev/null @@ -1,39 +0,0 @@ -// A bunch of different test cases for sha256_var in the stdlib -fn main( - msg_just_over_block: [u8; 68], - result_just_over_block: pub [u8; 32], - msg_multiple_of_block: [u8; 448], - result_multiple_of_block: pub [u8; 32], - // We want to make sure we are testing a message with a size >= 57 but < 64 - msg_just_under_block: [u8; 60], - result_just_under_block: pub [u8; 32], - msg_big_not_block_multiple: [u8; 472], - result_big: pub [u8; 32], - // This message is only 297 elements and we want to hash only a variable amount - msg_big_with_padding: [u8; 700], - // This is the same as `msg_big_with_padding` but with no padding - msg_big_no_padding: [u8; 297], - message_size: u64, - result_big_with_padding: pub [u8; 32], -) { - let hash = std::hash::sha256_var(msg_just_over_block, msg_just_over_block.len() as u64); - assert_eq(hash, result_just_over_block); - - let hash = std::hash::sha256_var(msg_multiple_of_block, msg_multiple_of_block.len() as u64); - assert_eq(hash, result_multiple_of_block); - - let hash = std::hash::sha256_var(msg_just_under_block, msg_just_under_block.len() as u64); - assert_eq(hash, result_just_under_block); - - let hash = std::hash::sha256_var( - msg_big_not_block_multiple, - msg_big_not_block_multiple.len() as u64, - ); - assert_eq(hash, result_big); - - let hash_padding = std::hash::sha256_var(msg_big_with_padding, message_size); - assert_eq(hash_padding, result_big_with_padding); - - let hash_no_padding = std::hash::sha256_var(msg_big_no_padding, message_size); - assert_eq(hash_no_padding, result_big_with_padding); -} diff --git a/noir/noir-repo/test_programs/execution_success/sha256_var_padding_regression/Nargo.toml b/noir/noir-repo/test_programs/execution_success/sha256_var_padding_regression/Nargo.toml deleted file mode 100644 index a80677c585d..00000000000 --- a/noir/noir-repo/test_programs/execution_success/sha256_var_padding_regression/Nargo.toml +++ /dev/null @@ -1,7 +0,0 @@ -[package] -name = "sha256_var_padding_regression" -type = "bin" -authors = [""] -compiler_version = ">=0.34.0" - -[dependencies] \ No newline at end of file diff --git a/noir/noir-repo/test_programs/execution_success/sha256_var_padding_regression/Prover.toml b/noir/noir-repo/test_programs/execution_success/sha256_var_padding_regression/Prover.toml deleted file mode 100644 index 7b20e870128..00000000000 --- a/noir/noir-repo/test_programs/execution_success/sha256_var_padding_regression/Prover.toml +++ /dev/null @@ -1,2 +0,0 @@ -preimage = [29, 81, 165, 84, 243, 114, 101, 37, 242, 146, 127, 99, 69, 145, 39, 72, 213, 39, 253, 179, 218, 37, 217, 201, 172, 93, 198, 50, 249, 70, 15, 30, 162, 112, 187, 40, 140, 9, 236, 53, 32, 44, 38, 163, 113, 254, 192, 197, 44, 89, 71, 130, 169, 242, 17, 211, 214, 72, 19, 178, 186, 168, 147, 127, 99, 101, 252, 227, 8, 147, 150, 85, 97, 158, 17, 107, 218, 244, 82, 113, 247, 91, 208, 214, 60, 244, 87, 137, 173, 201, 130, 18, 66, 56, 198, 149, 207, 189, 175, 120, 123, 224, 177, 167, 251, 159, 143, 110, 68, 183, 189, 70, 126, 32, 35, 164, 44, 30, 44, 12, 65, 18, 62, 239, 242, 2, 248, 104, 2, 178, 64, 28, 126, 36, 137, 24, 14, 116, 91, 98, 90, 159, 218, 102, 45, 11, 110, 223, 245, 184, 52, 99, 59, 245, 136, 175, 3, 72, 164, 146, 145, 116, 22, 66, 24, 49, 193, 121, 3, 60, 37, 41, 97, 3, 190, 66, 195, 225, 63, 46, 3, 118, 4, 208, 15, 1, 40, 254, 235, 151, 123, 70, 180, 170, 44, 172, 90, 4, 254, 53, 239, 116, 246, 67, 56, 129, 61, 22, 169, 213, 65, 27, 216, 116, 162, 239, 214, 207, 126, 177, 20, 100, 25, 48, 143, 84, 215, 70, 197, 53, 65, 70, 86, 172, 61, 62, 9, 212, 167, 169, 133, 41, 126, 213, 196, 33, 192, 238, 0, 63, 246, 215, 58, 128, 110, 101, 92, 3, 170, 214, 130, 149, 52, 81, 125, 118, 233, 3, 118, 193, 104, 207, 120, 115, 77, 253, 191, 122, 0, 107, 164, 207, 113, 81, 169, 36, 201, 228, 74, 134, 131, 218, 178, 35, 30, 216, 101, 2, 103, 174, 87, 95, 50, 50, 215, 157, 5, 210, 188, 54, 211, 78, 45, 199, 96, 121, 241, 241, 176, 226, 194, 134, 130, 89, 217, 210, 186, 32, 140, 39, 91, 103, 212, 26, 87, 32, 72, 144, 228, 230, 117, 99, 188, 50, 15, 69, 79, 179, 50, 12, 106, 86, 218, 101, 73, 142, 243, 29, 250, 122, 228, 233, 29, 255, 22, 121, 114, 125, 103, 41, 250, 241, 179, 126, 158, 198, 116, 209, 65, 94, 98, 228, 175, 169, 96, 3, 9, 233, 133, 214, 55, 161, 164, 103, 80, 85, 24, 186, 64, 167, 92, 131, 53, 101, 202, 47, 25, 104, 118, 155, 14, 12, 12, 25, 116, 45, 221, 249, 28, 246, 212, 200, 157, 167, 169, 56, 197, 181, 4, 245, 146, 1, 140, 234, 191, 212, 228, 125, 87, 81, 86, 119, 30, 63, 129, 143, 32, 96] -result = [205, 74, 73, 134, 202, 93, 199, 152, 171, 244, 133, 193, 132, 40, 42, 9, 248, 11, 99, 200, 135, 58, 220, 227, 45, 253, 183, 241, 69, 69, 80, 219] \ No newline at end of file diff --git a/noir/noir-repo/test_programs/execution_success/sha256_var_padding_regression/src/main.nr b/noir/noir-repo/test_programs/execution_success/sha256_var_padding_regression/src/main.nr deleted file mode 100644 index 13f87a0efc5..00000000000 --- a/noir/noir-repo/test_programs/execution_success/sha256_var_padding_regression/src/main.nr +++ /dev/null @@ -1,29 +0,0 @@ -// Test to check sha256_var produces same results irrespective of number of padding bytes after message.length -// Ref: https://github.com/noir-lang/noir/issues/6163, https://gist.github.com/jp4g/d5953faae9eadb2909357474f7901e58 -fn main(preimage: [u8; 448], result: [u8; 32]) { - // Construct arrays of different lengths - let mut preimage_511 = [0; 511]; - let mut preimage_512 = [0; 512]; // Next block - let mut preimage_575 = [0; 575]; - let mut preimage_576 = [0; 576]; // Next block - for i in 0..preimage.len() { - preimage_511[i] = preimage[i]; - preimage_512[i] = preimage[i]; - preimage_575[i] = preimage[i]; - preimage_576[i] = preimage[i]; - } - let fixed_length_hash = std::hash::sha256::digest(preimage); - let var_full_length_hash = std::hash::sha256::sha256_var(preimage, preimage.len() as u64); - let var_length_hash_511 = std::hash::sha256::sha256_var(preimage_511, preimage.len() as u64); - let var_length_hash_512 = std::hash::sha256::sha256_var(preimage_512, preimage.len() as u64); - let var_length_hash_575 = std::hash::sha256::sha256_var(preimage_575, preimage.len() as u64); - let var_length_hash_576 = std::hash::sha256::sha256_var(preimage_576, preimage.len() as u64); - - // All of the above should have produced the same hash (result) - assert(fixed_length_hash == result); - assert(var_full_length_hash == result); - assert(var_length_hash_511 == result); - assert(var_length_hash_512 == result); - assert(var_length_hash_575 == result); - assert(var_length_hash_576 == result); -} diff --git a/noir/noir-repo/test_programs/execution_success/sha256_var_size_regression/Nargo.toml b/noir/noir-repo/test_programs/execution_success/sha256_var_size_regression/Nargo.toml deleted file mode 100644 index 3e141ee5d5f..00000000000 --- a/noir/noir-repo/test_programs/execution_success/sha256_var_size_regression/Nargo.toml +++ /dev/null @@ -1,7 +0,0 @@ -[package] -name = "sha256_var_size_regression" -type = "bin" -authors = [""] -compiler_version = ">=0.33.0" - -[dependencies] \ No newline at end of file diff --git a/noir/noir-repo/test_programs/execution_success/sha256_var_size_regression/Prover.toml b/noir/noir-repo/test_programs/execution_success/sha256_var_size_regression/Prover.toml deleted file mode 100644 index df632a42858..00000000000 --- a/noir/noir-repo/test_programs/execution_success/sha256_var_size_regression/Prover.toml +++ /dev/null @@ -1,3 +0,0 @@ -enable = [true, false] -foo = [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1] -toggle = false diff --git a/noir/noir-repo/test_programs/execution_success/sha256_var_size_regression/src/main.nr b/noir/noir-repo/test_programs/execution_success/sha256_var_size_regression/src/main.nr deleted file mode 100644 index 4278cdda8a3..00000000000 --- a/noir/noir-repo/test_programs/execution_success/sha256_var_size_regression/src/main.nr +++ /dev/null @@ -1,17 +0,0 @@ -global NUM_HASHES: u32 = 2; - -fn main(foo: [u8; 95], toggle: bool, enable: [bool; NUM_HASHES]) { - let mut result = [[0; 32]; NUM_HASHES]; - let mut const_result = [[0; 32]; NUM_HASHES]; - let size: Field = 93 + toggle as Field * 2; - for i in 0..NUM_HASHES { - if enable[i] { - result[i] = std::sha256::sha256_var(foo, size as u64); - const_result[i] = std::sha256::sha256_var(foo, 93); - } - } - - for i in 0..NUM_HASHES { - assert_eq(result[i], const_result[i]); - } -} diff --git a/noir/noir-repo/test_programs/execution_success/sha256_var_witness_const_regression/Nargo.toml b/noir/noir-repo/test_programs/execution_success/sha256_var_witness_const_regression/Nargo.toml deleted file mode 100644 index e8f3e6bbe64..00000000000 --- a/noir/noir-repo/test_programs/execution_success/sha256_var_witness_const_regression/Nargo.toml +++ /dev/null @@ -1,7 +0,0 @@ -[package] -name = "sha256_var_witness_const_regression" -type = "bin" -authors = [""] -compiler_version = ">=0.33.0" - -[dependencies] \ No newline at end of file diff --git a/noir/noir-repo/test_programs/execution_success/sha256_var_witness_const_regression/Prover.toml b/noir/noir-repo/test_programs/execution_success/sha256_var_witness_const_regression/Prover.toml deleted file mode 100644 index 7b91051c1a0..00000000000 --- a/noir/noir-repo/test_programs/execution_success/sha256_var_witness_const_regression/Prover.toml +++ /dev/null @@ -1,2 +0,0 @@ -input = [0, 0] -toggle = false \ No newline at end of file diff --git a/noir/noir-repo/test_programs/execution_success/sha256_var_witness_const_regression/src/main.nr b/noir/noir-repo/test_programs/execution_success/sha256_var_witness_const_regression/src/main.nr deleted file mode 100644 index 97c4435d41d..00000000000 --- a/noir/noir-repo/test_programs/execution_success/sha256_var_witness_const_regression/src/main.nr +++ /dev/null @@ -1,9 +0,0 @@ -fn main(input: [u8; 2], toggle: bool) { - let size: Field = 1 + toggle as Field; - assert(!toggle); - - let variable_sha = std::sha256::sha256_var(input, size as u64); - let constant_sha = std::sha256::sha256_var(input, 1); - - assert_eq(variable_sha, constant_sha); -} diff --git a/noir/noir-repo/test_programs/execution_success/sha2_byte/Prover.toml b/noir/noir-repo/test_programs/execution_success/sha2_byte/Prover.toml deleted file mode 100644 index 2f82f14a58d..00000000000 --- a/noir/noir-repo/test_programs/execution_success/sha2_byte/Prover.toml +++ /dev/null @@ -1,5 +0,0 @@ -# Values obtainable from https://emn178.github.io/online-tools/sha256.html and https://emn178.github.io/online-tools/sha512.html -x = 0xbd -result256 = [0x68, 0x32, 0x57, 0x20, 0xaa, 0xbd, 0x7c, 0x82, 0xf3, 0x0f, 0x55, 0x4b, 0x31, 0x3d, 0x05, 0x70, 0xc9, 0x5a, 0xcc, 0xbb, 0x7d, 0xc4, 0xb5, 0xaa, 0xe1, 0x12, 0x04, 0xc0, 0x8f, 0xfe, 0x73, 0x2b] -result512 = [0x29, 0x6e, 0x22, 0x67, 0xd7, 0x4c, 0x27, 0x8d, 0xaa, 0xaa, 0x94, 0x0d, 0x17, 0xb0, 0xcf, 0xb7, 0x4a, 0x50, 0x83, 0xf8, 0xe0, 0x69, 0x72, 0x6d, 0x8c, 0x84, 0x1c, 0xbe, 0x59, 0x6e, 0x04, 0x31, 0xcb, 0x77, 0x41, 0xa5, 0xb5, 0x0f, 0x71, 0x66, 0x6c, 0xfd, 0x54, 0xba, 0xcb, 0x7b, 0x00, 0xae, 0xa8, 0x91, 0x49, 0x9c, 0xf4, 0xef, 0x6a, 0x03, 0xc8, 0xa8, 0x3f, 0xe3, 0x7c, 0x3f, 0x7b, 0xaf] - diff --git a/noir/noir-repo/test_programs/execution_success/sha2_byte/src/main.nr b/noir/noir-repo/test_programs/execution_success/sha2_byte/src/main.nr deleted file mode 100644 index a1663642c69..00000000000 --- a/noir/noir-repo/test_programs/execution_success/sha2_byte/src/main.nr +++ /dev/null @@ -1,8 +0,0 @@ -// Test Noir implementations of SHA256 and SHA512 on a one-byte message. -fn main(x: Field, result256: [u8; 32], result512: [u8; 64]) { - let digest256 = std::hash::sha256([x as u8]); - assert(digest256 == result256); - - let digest512 = std::hash::sha512::digest([x as u8]); - assert(digest512 == result512); -} diff --git a/noir/noir-repo/test_programs/execution_success/keccak256/Nargo.toml b/noir/noir-repo/test_programs/execution_success/shift_right_overflow/Nargo.toml similarity index 63% rename from noir/noir-repo/test_programs/execution_success/keccak256/Nargo.toml rename to noir/noir-repo/test_programs/execution_success/shift_right_overflow/Nargo.toml index 7e48c3b342c..a883299fbc2 100644 --- a/noir/noir-repo/test_programs/execution_success/keccak256/Nargo.toml +++ b/noir/noir-repo/test_programs/execution_success/shift_right_overflow/Nargo.toml @@ -1,6 +1,5 @@ [package] -name = "keccak256" +name = "shift_right_overflow" type = "bin" authors = [""] - [dependencies] diff --git a/noir/noir-repo/test_programs/execution_success/shift_right_overflow/Prover.toml b/noir/noir-repo/test_programs/execution_success/shift_right_overflow/Prover.toml new file mode 100644 index 00000000000..57cb8b2eac8 --- /dev/null +++ b/noir/noir-repo/test_programs/execution_success/shift_right_overflow/Prover.toml @@ -0,0 +1 @@ +x = 9 diff --git a/noir/noir-repo/test_programs/execution_success/shift_right_overflow/src/main.nr b/noir/noir-repo/test_programs/execution_success/shift_right_overflow/src/main.nr new file mode 100644 index 00000000000..c5d23ab5cd9 --- /dev/null +++ b/noir/noir-repo/test_programs/execution_success/shift_right_overflow/src/main.nr @@ -0,0 +1,5 @@ +fn main(x: u8) { + // This would previously overflow in ACIR. Now it returns zero. + let value = 1 >> x; + assert_eq(value, 0); +} diff --git a/noir/noir-repo/test_programs/execution_success/simple_shield/src/main.nr b/noir/noir-repo/test_programs/execution_success/simple_shield/src/main.nr index 35b50150986..82b53dd2cc9 100644 --- a/noir/noir-repo/test_programs/execution_success/simple_shield/src/main.nr +++ b/noir/noir-repo/test_programs/execution_success/simple_shield/src/main.nr @@ -20,7 +20,22 @@ fn main( // Compute output note nullifier let receiver_note_commitment = std::hash::pedersen_commitment([to_pubkey_x, to_pubkey_y]); // Check that the input note nullifier is in the root - assert(note_root == std::merkle::compute_merkle_root(note_commitment.x, index, note_hash_path)); + assert(note_root == compute_merkle_root(note_commitment.x, index, note_hash_path)); [nullifier.x, receiver_note_commitment.x] } + +fn compute_merkle_root(leaf: Field, index: Field, hash_path: [Field; N]) -> Field { + let index_bits: [u1; N] = index.to_le_bits(); + let mut current = leaf; + for i in 0..N { + let path_bit = index_bits[i] as bool; + let (hash_left, hash_right) = if path_bit { + (hash_path[i], current) + } else { + (current, hash_path[i]) + }; + current = std::hash::pedersen_hash([hash_left, hash_right]); + } + current +} diff --git a/noir/noir-repo/test_programs/execution_success/u128/Nargo.toml b/noir/noir-repo/test_programs/execution_success/u128/Nargo.toml deleted file mode 100644 index c1dcd84db04..00000000000 --- a/noir/noir-repo/test_programs/execution_success/u128/Nargo.toml +++ /dev/null @@ -1,6 +0,0 @@ -[package] -name = "u128" -type = "bin" -authors = [""] - -[dependencies] diff --git a/noir/noir-repo/test_programs/execution_success/u128/Prover.toml b/noir/noir-repo/test_programs/execution_success/u128/Prover.toml deleted file mode 100644 index 961db9825a7..00000000000 --- a/noir/noir-repo/test_programs/execution_success/u128/Prover.toml +++ /dev/null @@ -1,7 +0,0 @@ -x = "3" -y = "4" -z = "7" -hexa ="0x1f03a" -[big_int] -lo = 1 -hi = 2 \ No newline at end of file diff --git a/noir/noir-repo/test_programs/execution_success/u128/src/main.nr b/noir/noir-repo/test_programs/execution_success/u128/src/main.nr deleted file mode 100644 index 56ac5b995d9..00000000000 --- a/noir/noir-repo/test_programs/execution_success/u128/src/main.nr +++ /dev/null @@ -1,41 +0,0 @@ -fn main(mut x: u32, y: u32, z: u32, big_int: U128, hexa: str<7>) { - let a = U128::from_u64s_le(x as u64, x as u64); - let b = U128::from_u64s_le(y as u64, x as u64); - let c = a + b; - assert(c.lo == z as Field); - assert(c.hi == 2 * x as Field); - assert(U128::from_hex(hexa).lo == 0x1f03a); - let t1 = U128::from_hex("0x9d9c7a87771f03a23783f9d9c7a8777"); - let t2 = U128::from_hex("0x45a26c708BFCF39041"); - let t = t1 + t2; - assert(t.lo == 0xc5e4b029996e17b8); - assert(t.hi == 0x09d9c7a87771f07f); - let t3 = U128::from_le_bytes(t.to_le_bytes()); - assert(t == t3); - - let t4 = t - t2; - assert(t4 == t1); - - let t5 = U128::from_u64s_le(0, 1); - let t6 = U128::from_u64s_le(1, 0); - assert((t5 - t6).hi == 0); - - assert( - (U128::from_hex("0x71f03a23783f9d9c7a8777") * U128::from_hex("0x8BFCF39041")).hi - == U128::from_hex("0x3e4e0471b873470e247c824e61445537").hi, - ); - let q = U128::from_hex("0x3e4e0471b873470e247c824e61445537") / U128::from_hex("0x8BFCF39041"); - assert(q == U128::from_hex("0x71f03a23783f9d9c7a8777")); - - assert(big_int.hi == 2); - - let mut small_int = U128::from_integer(x); - assert(small_int.lo == x as Field); - assert(x == small_int.to_integer()); - let shift = small_int << (x as u8); - assert(shift == U128::from_integer(x << (x as u8))); - assert(shift >> (x as u8) == small_int); - assert(shift >> 127 == U128::from_integer(0)); - assert(shift << 127 == U128::from_integer(0)); - assert(U128::from_integer(3).to_integer() == 3); -} diff --git a/noir/noir-repo/test_programs/memory_report.sh b/noir/noir-repo/test_programs/memory_report.sh index eb83004affd..ff64449fcf5 100755 --- a/noir/noir-repo/test_programs/memory_report.sh +++ b/noir/noir-repo/test_programs/memory_report.sh @@ -8,7 +8,7 @@ PARSE_MEMORY=$(realpath "$(dirname "$0")/parse_memory.sh") # Tests to be profiled for memory report -tests_to_profile=("keccak256" "workspace" "regression_4709" "ram_blowup_regression" "global_var_regression_entry_points") +tests_to_profile=("workspace" "regression_4709" "ram_blowup_regression" "global_var_regression_entry_points") current_dir=$(pwd) base_path="$current_dir/execution_success" diff --git a/noir/noir-repo/test_programs/noir_test_success/comptime_blackbox/src/main.nr b/noir/noir-repo/test_programs/noir_test_success/comptime_blackbox/src/main.nr index 446692c485b..3b1f2963b9f 100644 --- a/noir/noir-repo/test_programs/noir_test_success/comptime_blackbox/src/main.nr +++ b/noir/noir-repo/test_programs/noir_test_success/comptime_blackbox/src/main.nr @@ -117,13 +117,16 @@ fn test_ecdsa_secp256r1() { /// Test that sha256_compression is implemented. #[test] -fn test_sha256() { +fn test_sha256_compression() { + let input: [u32; 16] = [0xbd; 16]; + let state: [u32; 8] = [0; 8]; + let hash = comptime { - let input: [u8; 1] = [0xbd]; - std::hash::sha256(input) + let input: [u32; 16] = [0xbd; 16]; + let state: [u32; 8] = [0; 8]; + std::hash::sha256_compression(input, state) }; - assert_eq(hash[0], 0x68); - assert_eq(hash[31], 0x2b); + assert_eq(hash, std::hash::sha256_compression(input, state)); } /// Test that `embedded_curve_add` and `multi_scalar_mul` are implemented. diff --git a/noir/noir-repo/test_programs/noir_test_success/global_eval/src/main.nr b/noir/noir-repo/test_programs/noir_test_success/global_eval/src/main.nr index 87a2d50a916..da962bf5203 100644 --- a/noir/noir-repo/test_programs/noir_test_success/global_eval/src/main.nr +++ b/noir/noir-repo/test_programs/noir_test_success/global_eval/src/main.nr @@ -1,20 +1,9 @@ -use std::uint128::U128; - // These definitions require `to_be_bits` and `to_le_bits` to be supported at comptime. global BITS_BE_13: [u1; 4] = (13 as Field).to_be_bits(); global BITS_LE_13: [u1; 4] = (13 as Field).to_le_bits(); -// Examples from #6691 which use the above behind the scenes. -global POW64_A: Field = 2.pow_32(64); -global POW64_B: Field = (U128::one() << 64).to_integer(); - #[test] fn test_be_and_le_bits() { assert_eq(BITS_BE_13, [1, 1, 0, 1]); assert_eq(BITS_LE_13, [1, 0, 1, 1]); } - -#[test] -fn test_pow64() { - assert_eq(POW64_A, POW64_B); -} diff --git a/noir/noir-repo/test_programs/rebuild.sh b/noir/noir-repo/test_programs/rebuild.sh index bf67a894291..526b90caa3d 100755 --- a/noir/noir-repo/test_programs/rebuild.sh +++ b/noir/noir-repo/test_programs/rebuild.sh @@ -47,7 +47,8 @@ process_dir() { export -f process_dir -excluded_dirs=("workspace" "workspace_default_member") +# Reactive `regression_7323` once enums are ready +excluded_dirs=("workspace" "workspace_default_member" "regression_7323") current_dir=$(pwd) base_path="$current_dir/execution_success" dirs_to_process=() diff --git a/noir/noir-repo/tooling/acvm_cli/src/cli/execute_cmd.rs b/noir/noir-repo/tooling/acvm_cli/src/cli/execute_cmd.rs index 703d272310b..c1d4533230e 100644 --- a/noir/noir-repo/tooling/acvm_cli/src/cli/execute_cmd.rs +++ b/noir/noir-repo/tooling/acvm_cli/src/cli/execute_cmd.rs @@ -8,7 +8,7 @@ use bn254_blackbox_solver::Bn254BlackBoxSolver; use clap::Args; use nargo::PrintOutput; -use nargo::{foreign_calls::DefaultForeignCallBuilder, ops::execute_program}; +use nargo::foreign_calls::DefaultForeignCallBuilder; use noir_artifact_cli::errors::CliError; use noir_artifact_cli::fs::artifact::read_bytecode_from_file; use noir_artifact_cli::fs::witness::save_witness_to_dir; @@ -56,7 +56,7 @@ fn run_command(args: ExecuteCommand) -> Result { )?; if args.output_witness.is_some() { save_witness_to_dir( - output_witness, + &output_witness, &args.output_witness.unwrap(), &args.working_directory, )?; @@ -80,7 +80,8 @@ pub(crate) fn execute_program_from_witness( ) -> Result, CliError> { let program: Program = Program::deserialize_program(bytecode).map_err(CliError::CircuitDeserializationError)?; - execute_program( + + nargo::ops::execute_program( &program, inputs_map, &Bn254BlackBoxSolver(pedantic_solving), diff --git a/noir/noir-repo/tooling/acvm_cli/src/main.rs b/noir/noir-repo/tooling/acvm_cli/src/main.rs index 8f9edc48f32..2ac8a96eaba 100644 --- a/noir/noir-repo/tooling/acvm_cli/src/main.rs +++ b/noir/noir-repo/tooling/acvm_cli/src/main.rs @@ -20,18 +20,19 @@ fn main() { .with_span_events(FmtSpan::ACTIVE) .with_writer(debug_file) .with_ansi(false) - .with_env_filter(EnvFilter::from_default_env()) + .with_env_filter(EnvFilter::from_env("NOIR_LOG")) .init(); } else { tracing_subscriber::fmt() .with_span_events(FmtSpan::ACTIVE) + .with_writer(std::io::stderr) .with_ansi(true) .with_env_filter(EnvFilter::from_env("NOIR_LOG")) .init(); } if let Err(report) = cli::start_cli() { - eprintln!("{report}"); + eprintln!("{report:#}"); std::process::exit(1); } } diff --git a/noir/noir-repo/tooling/artifact_cli/src/bin/execute.rs b/noir/noir-repo/tooling/artifact_cli/src/bin/execute.rs index 8bca901e485..5d67ce0a958 100644 --- a/noir/noir-repo/tooling/artifact_cli/src/bin/execute.rs +++ b/noir/noir-repo/tooling/artifact_cli/src/bin/execute.rs @@ -35,6 +35,7 @@ pub fn start_cli() -> eyre::Result<()> { fn main() { tracing_subscriber::fmt() .with_span_events(FmtSpan::ACTIVE) + .with_writer(std::io::stderr) .with_ansi(true) .with_env_filter(EnvFilter::from_env("NOIR_LOG")) .init(); diff --git a/noir/noir-repo/tooling/artifact_cli/src/commands/execute_cmd.rs b/noir/noir-repo/tooling/artifact_cli/src/commands/execute_cmd.rs index f863428149b..6ce96628269 100644 --- a/noir/noir-repo/tooling/artifact_cli/src/commands/execute_cmd.rs +++ b/noir/noir-repo/tooling/artifact_cli/src/commands/execute_cmd.rs @@ -1,18 +1,18 @@ -use std::{collections::BTreeMap, path::PathBuf}; +use std::path::PathBuf; -use acir::{FieldElement, circuit::Program, native_types::WitnessStack}; use bn254_blackbox_solver::Bn254BlackBoxSolver; use clap::Args; -use color_eyre::eyre::{self, bail}; use crate::{ Artifact, errors::CliError, - fs::{inputs::read_inputs_from_file, witness::save_witness_to_dir}, + execution::{self, ExecutionResults}, }; -use nargo::{NargoError, PrintOutput, foreign_calls::DefaultForeignCallBuilder}; -use noirc_abi::{Abi, input_parser::InputValue}; -use noirc_artifacts::debug::DebugArtifact; +use nargo::{ + PrintOutput, + foreign_calls::{DefaultForeignCallBuilder, layers, transcript::ReplayForeignCallExecutor}, +}; +use noirc_driver::CompiledProgram; use super::parse_and_normalize_path; @@ -21,106 +21,106 @@ use super::parse_and_normalize_path; pub struct ExecuteCommand { /// Path to the JSON build artifact (either a program or a contract). #[clap(long, short, value_parser = parse_and_normalize_path)] - artifact_path: PathBuf, + pub artifact_path: PathBuf, /// Path to the Prover.toml file which contains the inputs and the /// optional return value in ABI format. #[clap(long, short, value_parser = parse_and_normalize_path)] - prover_file: PathBuf, + pub prover_file: PathBuf, /// Path to the directory where the output witness should be saved. /// If empty then the results are discarded. #[clap(long, short, value_parser = parse_and_normalize_path)] - output_dir: Option, + pub output_dir: Option, /// Write the execution witness to named file /// /// Defaults to the name of the circuit being executed. #[clap(long, short)] - witness_name: Option, + pub witness_name: Option, /// Name of the function to execute, if the artifact is a contract. #[clap(long)] - contract_fn: Option, + pub contract_fn: Option, + + /// Path to the oracle transcript that is to be replayed during the + /// execution in response to foreign calls. The format is expected + /// to be JSON Lines, with each request/response on a separate line. + /// + /// Note that a transcript might be invalid if the inputs change and + /// the circuit takes a different path during execution. + #[clap(long, conflicts_with = "oracle_resolver")] + pub oracle_file: Option, /// JSON RPC url to solve oracle calls. + #[clap(long, conflicts_with = "oracle_file")] + pub oracle_resolver: Option, + + /// Root directory for the RPC oracle resolver. + #[clap(long, value_parser = parse_and_normalize_path)] + pub oracle_root_dir: Option, + + /// Package name for the RPC oracle resolver #[clap(long)] - oracle_resolver: Option, + pub oracle_package_name: Option, /// Use pedantic ACVM solving, i.e. double-check some black-box function assumptions when solving. #[clap(long, default_value_t = false)] - pedantic_solving: bool, + pub pedantic_solving: bool, } -pub fn run(args: ExecuteCommand) -> eyre::Result<()> { +pub fn run(args: ExecuteCommand) -> Result<(), CliError> { let artifact = Artifact::read_from_file(&args.artifact_path)?; + let artifact_name = args.artifact_path.file_stem().and_then(|s| s.to_str()).unwrap_or_default(); - let circuit = match artifact { - Artifact::Program(program) => Circuit { - name: None, - abi: program.abi, - bytecode: program.bytecode, - debug_symbols: program.debug_symbols, - file_map: program.file_map, - }, + let (circuit, circuit_name): (CompiledProgram, String) = match artifact { + Artifact::Program(program) => (program.into(), artifact_name.to_string()), Artifact::Contract(contract) => { - let names = - contract.functions.iter().map(|f| f.name.clone()).collect::>().join(","); + let names = || contract.functions.iter().map(|f| f.name.clone()).collect::>(); let Some(ref name) = args.contract_fn else { - bail!("--contract-fn missing; options: [{names}]"); + return Err(CliError::MissingContractFn { names: names() }); }; - let Some(function) = contract.functions.into_iter().find(|f| f.name == *name) else { - bail!("unknown --contract-fn '{name}'; options: [{names}]"); + let Some(program) = contract.function_as_compiled_program(name) else { + return Err(CliError::UnknownContractFn { name: name.clone(), names: names() }); }; - Circuit { - name: Some(name.clone()), - abi: function.abi, - bytecode: function.bytecode, - debug_symbols: function.debug_symbols, - file_map: contract.file_map, - } + (program, format!("{artifact_name}::{name}")) } }; match execute(&circuit, &args) { - Ok(solved) => { - save_witness(circuit, args, solved)?; - } - Err(CliError::CircuitExecutionError(err)) => { - show_diagnostic(circuit, err); + Ok(results) => { + execution::save_and_check_witness( + &circuit, + results, + &circuit_name, + args.output_dir.as_deref(), + args.witness_name.as_deref(), + )?; } Err(e) => { - bail!("failed to execute the circuit: {e}"); + if let CliError::CircuitExecutionError(ref err) = e { + execution::show_diagnostic(&circuit, err); + } + // Still returning the error to facilitate command forwarding, to indicate that the command failed. + return Err(e); } } Ok(()) } -/// Parameters necessary to execute a circuit, display execution failures, etc. -struct Circuit { - name: Option, - abi: Abi, - bytecode: Program, - debug_symbols: noirc_errors::debug_info::ProgramDebugInfo, - file_map: BTreeMap, -} - -struct SolvedWitnesses { - expected_return: Option, - actual_return: Option, - witness_stack: WitnessStack, -} - /// Execute a circuit and return the output witnesses. -fn execute(circuit: &Circuit, args: &ExecuteCommand) -> Result { - let (input_map, expected_return) = read_inputs_from_file(&args.prover_file, &circuit.abi)?; - - let initial_witness = circuit.abi.encode(&input_map, None)?; +fn execute(circuit: &CompiledProgram, args: &ExecuteCommand) -> Result { + // Build a custom foreign call executor that replays the Oracle transcript, + // and use it as a base for the default executor. Using it as the innermost rather + // than top layer so that any extra `print` added for debugging is handled by the + // default, rather than trying to match it to the transcript. + let transcript_executor = match args.oracle_file { + Some(ref path) => layers::Either::Left(ReplayForeignCallExecutor::from_file(path)?), + None => layers::Either::Right(layers::Empty), + }; - // TODO: Build a custom foreign call executor that reads from the Oracle transcript, - // and use it as a base for the default executor; see `DefaultForeignCallBuilder::build_with_base` let mut foreign_call_executor = DefaultForeignCallBuilder { output: PrintOutput::Stdout, enable_mocks: false, @@ -128,82 +128,9 @@ fn execute(circuit: &Circuit, args: &ExecuteCommand) -> Result) { - if let Some(diagnostic) = nargo::errors::try_to_diagnose_runtime_error( - &err, - &circuit.abi, - &circuit.debug_symbols.debug_infos, - ) { - let debug_artifact = DebugArtifact { - debug_symbols: circuit.debug_symbols.debug_infos, - file_map: circuit.file_map, - }; - diagnostic.report(&debug_artifact, false); - } -} - -/// Print information about the witness and compare to expectations, -/// returning errors if something isn't right. -fn save_witness( - circuit: Circuit, - args: ExecuteCommand, - solved: SolvedWitnesses, -) -> eyre::Result<()> { - let artifact = args.artifact_path.file_stem().and_then(|s| s.to_str()).unwrap_or_default(); - let name = circuit - .name - .as_ref() - .map(|name| format!("{artifact}.{name}")) - .unwrap_or_else(|| artifact.to_string()); - - println!("[{}] Circuit witness successfully solved", name); - - if let Some(ref witness_dir) = args.output_dir { - let witness_path = save_witness_to_dir( - solved.witness_stack, - &args.witness_name.unwrap_or_else(|| name.clone()), - witness_dir, - )?; - println!("[{}] Witness saved to {}", name, witness_path.display()); - } + .build_with_base(transcript_executor); - // Check that the circuit returned a non-empty result if the ABI expects a return value. - if let Some(ref expected) = circuit.abi.return_type { - if solved.actual_return.is_none() { - bail!("Missing return witness; expected a value of type {expected:?}"); - } - } - - // Check that if the prover file contained a `return` entry then that's what we got. - if let Some(expected) = solved.expected_return { - match solved.actual_return { - None => { - bail!("Missing return witness;\nexpected:\n{expected:?}"); - } - Some(actual) if actual != expected => { - bail!("Unexpected return witness;\nexpected:\n{expected:?}\ngot:\n{actual:?}"); - } - _ => {} - } - } + let blackbox_solver = Bn254BlackBoxSolver(args.pedantic_solving); - Ok(()) + execution::execute(circuit, &blackbox_solver, &mut foreign_call_executor, &args.prover_file) } diff --git a/noir/noir-repo/tooling/artifact_cli/src/commands/mod.rs b/noir/noir-repo/tooling/artifact_cli/src/commands/mod.rs index 78f3b19292f..9049b3695b7 100644 --- a/noir/noir-repo/tooling/artifact_cli/src/commands/mod.rs +++ b/noir/noir-repo/tooling/artifact_cli/src/commands/mod.rs @@ -1,3 +1,4 @@ +//! This module is for commands that we might want to invoke from `nargo` as-is. use std::path::PathBuf; use color_eyre::eyre; diff --git a/noir/noir-repo/tooling/artifact_cli/src/errors.rs b/noir/noir-repo/tooling/artifact_cli/src/errors.rs index 5f302f78695..1dfaac850b0 100644 --- a/noir/noir-repo/tooling/artifact_cli/src/errors.rs +++ b/noir/noir-repo/tooling/artifact_cli/src/errors.rs @@ -1,6 +1,10 @@ use acir::FieldElement; -use nargo::NargoError; -use noirc_abi::errors::{AbiError, InputParserError}; +use nargo::{NargoError, foreign_calls::transcript::TranscriptError}; +use noirc_abi::{ + AbiReturnType, + errors::{AbiError, InputParserError}, + input_parser::InputValue, +}; use std::path::PathBuf; use thiserror::Error; @@ -35,6 +39,10 @@ pub enum CliError { #[error("Failed to deserialize inputs")] InputDeserializationError(#[from] InputParserError), + /// Error related to oracle transcript deserialization + #[error(transparent)] + TranscriptError(#[from] TranscriptError), + /// Error related to ABI encoding #[error(transparent)] AbiError(#[from] AbiError), @@ -61,4 +69,16 @@ pub enum CliError { #[error("Failed to serialize output witness: {0}")] OutputWitnessSerializationFailed(#[from] toml::ser::Error), + + #[error("Unexpected return value: expected {expected:?}; got {actual:?}")] + UnexpectedReturn { expected: InputValue, actual: Option }, + + #[error("Missing return witnesses; expected {expected:?}")] + MissingReturn { expected: AbiReturnType }, + + #[error("Missing contract function name; options: {names:?}")] + MissingContractFn { names: Vec }, + + #[error("Unknown contract function '{name}'; options: {names:?}")] + UnknownContractFn { name: String, names: Vec }, } diff --git a/noir/noir-repo/tooling/artifact_cli/src/execution.rs b/noir/noir-repo/tooling/artifact_cli/src/execution.rs new file mode 100644 index 00000000000..2e19ab55161 --- /dev/null +++ b/noir/noir-repo/tooling/artifact_cli/src/execution.rs @@ -0,0 +1,135 @@ +use std::path::Path; + +use acir::{FieldElement, native_types::WitnessStack}; +use acvm::BlackBoxFunctionSolver; +use nargo::{NargoError, foreign_calls::ForeignCallExecutor}; +use noirc_abi::input_parser::InputValue; +use noirc_artifacts::debug::DebugArtifact; +use noirc_driver::CompiledProgram; + +use crate::{ + errors::CliError, + fs::{inputs::read_inputs_from_file, witness::save_witness_to_dir}, +}; + +/// Results of a circuit execution. +#[derive(Clone, Debug)] +pub struct ExecutionResults { + pub witness_stack: WitnessStack, + pub return_values: ReturnValues, +} + +/// The decoded `return` witnesses. +#[derive(Clone, Debug)] +pub struct ReturnValues { + /// The `return` value from the `Prover.toml` file, if present. + pub expected_return: Option, + /// The `return` value from the circuit execution. + pub actual_return: Option, +} + +/// Execute a circuit and return the output witnesses. +pub fn execute( + circuit: &CompiledProgram, + blackbox_solver: &B, + foreign_call_executor: &mut E, + prover_file: &Path, +) -> Result +where + B: BlackBoxFunctionSolver, + E: ForeignCallExecutor, +{ + let (input_map, expected_return) = read_inputs_from_file(prover_file, &circuit.abi)?; + + let initial_witness = circuit.abi.encode(&input_map, None)?; + + let witness_stack = nargo::ops::execute_program( + &circuit.program, + initial_witness, + blackbox_solver, + foreign_call_executor, + )?; + + let main_witness = + &witness_stack.peek().expect("Should have at least one witness on the stack").witness; + + let (_, actual_return) = circuit.abi.decode(main_witness)?; + + Ok(ExecutionResults { + witness_stack, + return_values: ReturnValues { actual_return, expected_return }, + }) +} + +/// Print an error stack trace, if possible. +pub fn show_diagnostic(circuit: &CompiledProgram, err: &NargoError) { + if let Some(diagnostic) = + nargo::errors::try_to_diagnose_runtime_error(err, &circuit.abi, &circuit.debug) + { + let debug_artifact = DebugArtifact { + debug_symbols: circuit.debug.clone(), + file_map: circuit.file_map.clone(), + }; + + diagnostic.report(&debug_artifact, false); + } +} + +/// Print some information and save the witness if an output directory is specified, +/// then checks if the expected return values were the ones we expected. +pub fn save_and_check_witness( + circuit: &CompiledProgram, + results: ExecutionResults, + circuit_name: &str, + witness_dir: Option<&Path>, + witness_name: Option<&str>, +) -> Result<(), CliError> { + println!("[{}] Circuit witness successfully solved", circuit_name); + // Save first, so that we can potentially look at the output if the expectations fail. + if let Some(witness_dir) = witness_dir { + save_witness(&results.witness_stack, circuit_name, witness_dir, witness_name)?; + } + check_witness(circuit, results.return_values) +} + +/// Save the witness stack to a file. +pub fn save_witness( + witness_stack: &WitnessStack, + circuit_name: &str, + witness_dir: &Path, + witness_name: Option<&str>, +) -> Result<(), CliError> { + let witness_name = witness_name.unwrap_or(circuit_name); + let witness_path = save_witness_to_dir(witness_stack, witness_name, witness_dir)?; + println!("[{}] Witness saved to {}", circuit_name, witness_path.display()); + Ok(()) +} + +/// Compare return values to expectations, returning errors if something unexpected was returned. +pub fn check_witness( + circuit: &CompiledProgram, + return_values: ReturnValues, +) -> Result<(), CliError> { + // Check that the circuit returned a non-empty result if the ABI expects a return value. + if let Some(ref expected) = circuit.abi.return_type { + if return_values.actual_return.is_none() { + return Err(CliError::MissingReturn { expected: expected.clone() }); + } + } + + // Check that if the prover file contained a `return` entry then that's what we got. + if let Some(expected) = return_values.expected_return { + match return_values.actual_return { + None => { + return Err(CliError::UnexpectedReturn { expected, actual: None }); + } + Some(actual) => { + if actual != expected { + return Err(CliError::UnexpectedReturn { expected, actual: Some(actual) }); + } + } + } + } + + Ok(()) +} diff --git a/noir/noir-repo/tooling/artifact_cli/src/fs/witness.rs b/noir/noir-repo/tooling/artifact_cli/src/fs/witness.rs index cf1fe75aad7..fee31ec1f22 100644 --- a/noir/noir-repo/tooling/artifact_cli/src/fs/witness.rs +++ b/noir/noir-repo/tooling/artifact_cli/src/fs/witness.rs @@ -7,7 +7,7 @@ use crate::errors::{CliError, FilesystemError}; /// Write `witness.gz` to the output directory. pub fn save_witness_to_dir( - witnesses: WitnessStack, + witnesses: &WitnessStack, witness_name: &str, witness_dir: &Path, ) -> Result { diff --git a/noir/noir-repo/tooling/artifact_cli/src/lib.rs b/noir/noir-repo/tooling/artifact_cli/src/lib.rs index 2cd2341b7b7..ac4316f5801 100644 --- a/noir/noir-repo/tooling/artifact_cli/src/lib.rs +++ b/noir/noir-repo/tooling/artifact_cli/src/lib.rs @@ -2,6 +2,7 @@ use noirc_artifacts::{contract::ContractArtifact, program::ProgramArtifact}; pub mod commands; pub mod errors; +pub mod execution; pub mod fs; /// A parsed JSON build artifact. diff --git a/noir/noir-repo/tooling/debugger/ignored-tests.txt b/noir/noir-repo/tooling/debugger/ignored-tests.txt index e0548fe1e1a..1515e58a90f 100644 --- a/noir/noir-repo/tooling/debugger/ignored-tests.txt +++ b/noir/noir-repo/tooling/debugger/ignored-tests.txt @@ -5,5 +5,6 @@ macros reference_counts references regression_4709 +regression_7323 reference_only_used_as_alias brillig_rc_regression_6123 diff --git a/noir/noir-repo/tooling/inspector/src/cli/info_cmd.rs b/noir/noir-repo/tooling/inspector/src/cli/info_cmd.rs index b221042c79f..34107ebea3a 100644 --- a/noir/noir-repo/tooling/inspector/src/cli/info_cmd.rs +++ b/noir/noir-repo/tooling/inspector/src/cli/info_cmd.rs @@ -3,7 +3,6 @@ use std::path::PathBuf; use clap::Args; use color_eyre::eyre; use noir_artifact_cli::Artifact; -use noirc_artifacts::program::ProgramArtifact; use noirc_artifacts_info::{InfoReport, count_opcodes_and_gates_in_program, show_info_report}; #[derive(Debug, Clone, Args)] @@ -39,19 +38,12 @@ pub(crate) fn run(args: InfoCommand) -> eyre::Result<()> { .into_iter() .filter(|f| args.contract_fn.as_ref().map(|n| *n == f.name).unwrap_or(true)) .map(|f| { - // We have to cheat to be able to call `count_opcodes_and_gates_in_program`. let package_name = format!("{}::{}", contract.name, f.name); - let program = ProgramArtifact { - noir_version: contract.noir_version.clone(), - hash: f.hash, - abi: f.abi, - bytecode: f.bytecode, - debug_symbols: f.debug_symbols, - file_map: contract.file_map.clone(), - names: f.names, - brillig_names: f.brillig_names, - }; - count_opcodes_and_gates_in_program(program, package_name, None) + let program = f.into_compiled_program( + contract.noir_version.clone(), + contract.file_map.clone(), + ); + count_opcodes_and_gates_in_program(program.into(), package_name, None) }) .collect::>(), }; diff --git a/noir/noir-repo/tooling/inspector/src/main.rs b/noir/noir-repo/tooling/inspector/src/main.rs index 8270fedbf2c..4d6513b8394 100644 --- a/noir/noir-repo/tooling/inspector/src/main.rs +++ b/noir/noir-repo/tooling/inspector/src/main.rs @@ -2,7 +2,7 @@ mod cli; fn main() { if let Err(report) = cli::start_cli() { - eprintln!("{report:?}"); + eprintln!("{report:#}"); std::process::exit(1); } } diff --git a/noir/noir-repo/tooling/lsp/src/notifications/mod.rs b/noir/noir-repo/tooling/lsp/src/notifications/mod.rs index dd3315705fc..b7ba8cd4761 100644 --- a/noir/noir-repo/tooling/lsp/src/notifications/mod.rs +++ b/noir/noir-repo/tooling/lsp/src/notifications/mod.rs @@ -11,7 +11,7 @@ use fxhash::FxHashMap as HashMap; use lsp_types::{DiagnosticRelatedInformation, DiagnosticTag, Url}; use noirc_driver::check_crate; use noirc_errors::reporter::CustomLabel; -use noirc_errors::{DiagnosticKind, FileDiagnostic, Location}; +use noirc_errors::{CustomDiagnostic, DiagnosticKind, Location}; use crate::types::{ Diagnostic, DiagnosticSeverity, DidChangeConfigurationParams, DidChangeTextDocumentParams, @@ -191,16 +191,16 @@ fn publish_diagnostics( package_root_dir: &PathBuf, files: &FileMap, fm: &FileManager, - file_diagnostics: Vec, + custom_diagnostics: Vec, ) { let mut diagnostics_per_url: HashMap> = HashMap::default(); - for file_diagnostic in file_diagnostics.into_iter() { - let file_id = file_diagnostic.file_id; - let path = fm.path(file_id).expect("file must exist to have emitted diagnostic"); + for custom_diagnostic in custom_diagnostics.into_iter() { + let file = custom_diagnostic.file; + let path = fm.path(file).expect("file must exist to have emitted diagnostic"); if let Ok(uri) = Url::from_file_path(path) { if let Some(diagnostic) = - file_diagnostic_to_diagnostic(file_diagnostic, files, fm, uri.clone()) + custom_diagnostic_to_diagnostic(custom_diagnostic, files, fm, uri.clone()) { diagnostics_per_url.entry(uri).or_default().push(diagnostic); } @@ -232,21 +232,18 @@ fn publish_diagnostics( state.files_with_errors.insert(package_root_dir.clone(), new_files_with_errors); } -fn file_diagnostic_to_diagnostic( - file_diagnostic: FileDiagnostic, +fn custom_diagnostic_to_diagnostic( + diagnostic: CustomDiagnostic, files: &FileMap, fm: &FileManager, uri: Url, ) -> Option { - let file_id = file_diagnostic.file_id; - let diagnostic = file_diagnostic.diagnostic; - if diagnostic.secondaries.is_empty() { return None; } let span = diagnostic.secondaries.first().unwrap().location.span; - let range = byte_span_to_range(files, file_id, span.into())?; + let range = byte_span_to_range(files, diagnostic.file, span.into())?; let severity = match diagnostic.kind { DiagnosticKind::Error => DiagnosticSeverity::ERROR, diff --git a/noir/noir-repo/tooling/lsp/src/requests/code_action/import_trait.rs b/noir/noir-repo/tooling/lsp/src/requests/code_action/import_trait.rs index 379b627f878..e29558c617b 100644 --- a/noir/noir-repo/tooling/lsp/src/requests/code_action/import_trait.rs +++ b/noir/noir-repo/tooling/lsp/src/requests/code_action/import_trait.rs @@ -42,7 +42,7 @@ impl CodeActionFinder<'_> { }; let trait_methods = - self.interner.lookup_trait_methods(&typ, &method_call.method_name.0.contents, true); + self.interner.lookup_trait_methods(typ, &method_call.method_name.0.contents, true); let trait_ids: HashSet<_> = trait_methods.iter().map(|(_, trait_id)| *trait_id).collect(); for trait_id in trait_ids { diff --git a/noir/noir-repo/tooling/lsp/src/requests/code_action/tests.rs b/noir/noir-repo/tooling/lsp/src/requests/code_action/tests.rs index 23026d16d94..35d19324020 100644 --- a/noir/noir-repo/tooling/lsp/src/requests/code_action/tests.rs +++ b/noir/noir-repo/tooling/lsp/src/requests/code_action/tests.rs @@ -1,6 +1,9 @@ #![cfg(test)] -use crate::{notifications::on_did_open_text_document, test_utils, tests::apply_text_edits}; +use crate::{ + notifications::on_did_open_text_document, test_utils, tests::apply_text_edits, + utils::get_cursor_line_and_column, +}; use lsp_types::{ CodeActionContext, CodeActionOrCommand, CodeActionParams, CodeActionResponse, @@ -10,16 +13,12 @@ use lsp_types::{ use super::on_code_action_request; -async fn get_code_action(src: &str) -> CodeActionResponse { +/// Given a string with ">|<" (cursor) in it, returns all code actions that are available +/// at that position together with the string with ">|<" removed. +async fn get_code_action(src: &str) -> (CodeActionResponse, String) { let (mut state, noir_text_document) = test_utils::init_lsp_server("document_symbol").await; - let (line, column) = src - .lines() - .enumerate() - .find_map(|(line_index, line)| line.find(">|<").map(|char_index| (line_index, char_index))) - .expect("Expected to find one >|< in the source code"); - - let src = src.replace(">|<", ""); + let (line, column, src) = get_cursor_line_and_column(src); on_did_open_text_document( &mut state, @@ -35,7 +34,7 @@ async fn get_code_action(src: &str) -> CodeActionResponse { let position = Position { line: line as u32, character: column as u32 }; - on_code_action_request( + let response = on_code_action_request( &mut state, CodeActionParams { text_document: TextDocumentIdentifier { uri: noir_text_document }, @@ -47,11 +46,12 @@ async fn get_code_action(src: &str) -> CodeActionResponse { ) .await .expect("Could not execute on_code_action_request") - .expect("Expected to get a CodeActionResponse, got None") + .expect("Expected to get a CodeActionResponse, got None"); + (response, src) } pub(crate) async fn assert_code_action(title: &str, src: &str, expected: &str) { - let actions = get_code_action(src).await; + let (actions, src) = get_code_action(src).await; let action = actions .iter() .filter_map(|action| { diff --git a/noir/noir-repo/tooling/lsp/src/requests/completion.rs b/noir/noir-repo/tooling/lsp/src/requests/completion.rs index eab8522693f..65207b4d95d 100644 --- a/noir/noir-repo/tooling/lsp/src/requests/completion.rs +++ b/noir/noir-repo/tooling/lsp/src/requests/completion.rs @@ -579,7 +579,7 @@ impl<'a> NodeFinder<'a> { Type::DataType(struct_type, generics) => { self.complete_struct_fields(&struct_type.borrow(), generics, prefix, self_prefix); } - Type::MutableReference(typ) => { + Type::Reference(typ, _) => { return self.complete_type_fields_and_methods( typ, prefix, @@ -1448,7 +1448,7 @@ impl Visitor for NodeFinder<'_> { let prefix = ""; let self_prefix = false; self.complete_type_fields_and_methods( - &typ, + typ, prefix, FunctionCompletionKind::Name, self_prefix, @@ -1481,7 +1481,7 @@ impl Visitor for NodeFinder<'_> { let prefix = prefix[0..offset].to_string(); let self_prefix = false; self.complete_type_fields_and_methods( - &typ, + typ, &prefix, FunctionCompletionKind::Name, self_prefix, @@ -1658,7 +1658,7 @@ impl Visitor for NodeFinder<'_> { let prefix = ""; let self_prefix = false; self.complete_type_fields_and_methods( - &typ, + typ, prefix, FunctionCompletionKind::NameAndParameters, self_prefix, @@ -1731,7 +1731,7 @@ impl Visitor for NodeFinder<'_> { let prefix = ident.to_string().to_case(Case::Snake); let self_prefix = false; self.complete_type_fields_and_methods( - &typ, + typ, &prefix, FunctionCompletionKind::NameAndParameters, self_prefix, diff --git a/noir/noir-repo/tooling/lsp/src/requests/completion/completion_items.rs b/noir/noir-repo/tooling/lsp/src/requests/completion/completion_items.rs index bc266c03f76..0bd984dcbea 100644 --- a/noir/noir-repo/tooling/lsp/src/requests/completion/completion_items.rs +++ b/noir/noir-repo/tooling/lsp/src/requests/completion/completion_items.rs @@ -188,8 +188,8 @@ impl NodeFinder<'_> { let func_self_type = if let Some((pattern, typ, _)) = func_meta.parameters.0.first() { if self.hir_pattern_is_self_type(pattern) { - if let Type::MutableReference(mut_typ) = typ { - let typ: &Type = mut_typ; + if let Type::Reference(elem_type, _) = typ { + let typ: &Type = elem_type; Some(typ) } else { Some(typ) @@ -222,9 +222,8 @@ impl NodeFinder<'_> { // Check that the pattern type is the same as self type. // We do this because some types (only Field and integer types) // have their methods in the same HashMap. - - if let Type::MutableReference(mut_typ) = self_type { - self_type = mut_typ; + if let Type::Reference(elem_type, _) = self_type { + self_type = elem_type; } if self_type != func_self_type { @@ -597,7 +596,7 @@ fn func_meta_type_to_string(func_meta: &FuncMeta, name: &str, has_self_type: boo } fn type_to_self_string(typ: &Type, string: &mut String) { - if let Type::MutableReference(..) = typ { + if let Type::Reference(..) = typ { string.push_str("&mut self"); } else { string.push_str("self"); diff --git a/noir/noir-repo/tooling/lsp/src/requests/completion/tests.rs b/noir/noir-repo/tooling/lsp/src/requests/completion/tests.rs index 3958b92deb0..64c91dd673d 100644 --- a/noir/noir-repo/tooling/lsp/src/requests/completion/tests.rs +++ b/noir/noir-repo/tooling/lsp/src/requests/completion/tests.rs @@ -16,6 +16,7 @@ mod completion_tests { }, test_utils, tests::apply_text_edits, + utils::get_cursor_line_and_column, }; use lsp_types::{ @@ -26,19 +27,12 @@ mod completion_tests { }; use tokio::test; - async fn get_completions(src: &str) -> Vec { + /// Given a string with ">|<" (cursor) in it, returns all completions that are available + /// at that position together with the string with ">|<" removed. + async fn get_completions(src: &str) -> (Vec, String) { let (mut state, noir_text_document) = test_utils::init_lsp_server("document_symbol").await; - let (line, column) = src - .lines() - .enumerate() - .filter_map(|(line_index, line)| { - line.find(">|<").map(|char_index| (line_index, char_index)) - }) - .next() - .expect("Expected to find one >|< in the source code"); - - let src = src.replace(">|<", ""); + let (line, column, src) = get_cursor_line_and_column(src); on_did_open_text_document( &mut state, @@ -67,7 +61,9 @@ mod completion_tests { .await .expect("Could not execute on_completion_request"); - if let Some(CompletionResponse::Array(items)) = response { items } else { vec![] } + let items = + if let Some(CompletionResponse::Array(items)) = response { items } else { vec![] }; + (items, src) } fn assert_items_match(mut items: Vec, mut expected: Vec) { @@ -90,12 +86,12 @@ mod completion_tests { } async fn assert_completion(src: &str, expected: Vec) { - let items = get_completions(src).await; + let (items, _) = get_completions(src).await; assert_items_match(items, expected); } async fn assert_completion_excluding_auto_import(src: &str, expected: Vec) { - let items = get_completions(src).await; + let (items, _) = get_completions(src).await; let items = items.into_iter().filter(|item| item.additional_text_edits.is_none()).collect(); assert_items_match(items, expected); } @@ -760,7 +756,7 @@ mod completion_tests { fn foo(x: i>|<) {} "#; - let items = get_completions(src).await; + let (items, _) = get_completions(src).await; let items = items.into_iter().filter(|item| item.label.starts_with('i')).collect(); assert_items_match( @@ -782,7 +778,7 @@ mod completion_tests { } "#; - let items = get_completions(src).await; + let (items, _) = get_completions(src).await; assert!(items.iter().any(|item| item.label == "i8")); } @@ -794,7 +790,7 @@ mod completion_tests { } "#; - let items = get_completions(src).await; + let (items, _) = get_completions(src).await; assert!( items .iter() @@ -1341,7 +1337,7 @@ mod completion_tests { } "#; - let items = get_completions(src).await; + let (items, _) = get_completions(src).await; let items = items.into_iter().filter(|item| item.kind == Some(CompletionItemKind::FIELD)); let items = items.collect(); @@ -1431,7 +1427,7 @@ fn main() { } "#; - let mut items = get_completions(src).await; + let (mut items, src) = get_completions(src).await; assert_eq!(items.len(), 1); let item = items.remove(0); @@ -1444,8 +1440,7 @@ fn main() { }) ); - let changed = - apply_text_edits(&src.replace(">|<", ""), &item.additional_text_edits.unwrap()); + let changed = apply_text_edits(&src, &item.additional_text_edits.unwrap()); assert_eq!(changed, expected); assert_eq!(item.sort_text, Some(auto_import_sort_text())); } @@ -1477,7 +1472,7 @@ mod foo { } } "#; - let mut items = get_completions(src).await; + let (mut items, src) = get_completions(src).await; assert_eq!(items.len(), 1); let item = items.remove(0); @@ -1490,8 +1485,7 @@ mod foo { }) ); - let changed = - apply_text_edits(&src.replace(">|<", ""), &item.additional_text_edits.unwrap()); + let changed = apply_text_edits(&src, &item.additional_text_edits.unwrap()); assert_eq!(changed, expected); } @@ -1522,7 +1516,7 @@ mod foo { } } }"#; - let mut items = get_completions(src).await; + let (mut items, src) = get_completions(src).await; assert_eq!(items.len(), 1); let item = items.remove(0); @@ -1535,8 +1529,7 @@ mod foo { }) ); - let changed = - apply_text_edits(&src.replace(">|<", ""), &item.additional_text_edits.unwrap()); + let changed = apply_text_edits(&src, &item.additional_text_edits.unwrap()); assert_eq!(changed, expected); } @@ -1574,13 +1567,12 @@ use foo::bar::hello_world; fn main() { hel }"#; - let mut items = get_completions(src).await; + let (mut items, src) = get_completions(src).await; assert_eq!(items.len(), 1); let item = items.remove(0); - let changed = - apply_text_edits(&src.replace(">|<", ""), &item.additional_text_edits.unwrap()); + let changed = apply_text_edits(&src, &item.additional_text_edits.unwrap()); assert_eq!(changed, expected); } @@ -1622,13 +1614,12 @@ mod other { hel } }"#; - let mut items = get_completions(src).await; + let (mut items, src) = get_completions(src).await; assert_eq!(items.len(), 1); let item = items.remove(0); - let changed = - apply_text_edits(&src.replace(">|<", ""), &item.additional_text_edits.unwrap()); + let changed = apply_text_edits(&src, &item.additional_text_edits.unwrap()); assert_eq!(changed, expected); } @@ -1648,7 +1639,7 @@ mod other { hel>|< } "#; - let items = get_completions(src).await; + let (items, _) = get_completions(src).await; assert!(items.is_empty()); } @@ -1667,7 +1658,7 @@ mod other { hel>|< } "#; - let items = get_completions(src).await; + let (items, _) = get_completions(src).await; assert!(items.is_empty()); } @@ -1684,7 +1675,7 @@ mod other { hel>|< } "#; - let items = get_completions(src).await; + let (items, _) = get_completions(src).await; assert!(items.is_empty()); } @@ -1703,7 +1694,7 @@ mod other { hello_w>|< } "#; - let mut items = get_completions(src).await; + let (mut items, _) = get_completions(src).await; assert_eq!(items.len(), 1); let item = items.remove(0); @@ -1737,7 +1728,7 @@ mod foo { } }"#; - let mut items = get_completions(src).await; + let (mut items, src) = get_completions(src).await; assert_eq!(items.len(), 1); let item = items.remove(0); @@ -1751,8 +1742,7 @@ mod foo { }) ); - let changed = - apply_text_edits(&src.replace(">|<", ""), &item.additional_text_edits.unwrap()); + let changed = apply_text_edits(&src, &item.additional_text_edits.unwrap()); assert_eq!(changed, expected); } @@ -1784,12 +1774,11 @@ fn main() { two_hello_ }"#; - let mut items = get_completions(src).await; + let (mut items, src) = get_completions(src).await; assert_eq!(items.len(), 1); let item = items.remove(0); - let changed = - apply_text_edits(&src.replace(">|<", ""), &item.additional_text_edits.unwrap()); + let changed = apply_text_edits(&src, &item.additional_text_edits.unwrap()); assert_eq!(changed, expected); assert_eq!(item.sort_text, Some(auto_import_sort_text())); } @@ -1822,12 +1811,11 @@ fn main() { two_hello_ }"#; - let mut items = get_completions(src).await; + let (mut items, src) = get_completions(src).await; assert_eq!(items.len(), 1); let item = items.remove(0); - let changed = - apply_text_edits(&src.replace(">|<", ""), &item.additional_text_edits.unwrap()); + let changed = apply_text_edits(&src, &item.additional_text_edits.unwrap()); assert_eq!(changed, expected); assert_eq!(item.sort_text, Some(auto_import_sort_text())); } @@ -1862,12 +1850,11 @@ fn main() { two_hello_ }"#; - let mut items = get_completions(src).await; + let (mut items, src) = get_completions(src).await; assert_eq!(items.len(), 1); let item = items.remove(0); - let changed = - apply_text_edits(&src.replace(">|<", ""), &item.additional_text_edits.unwrap()); + let changed = apply_text_edits(&src, &item.additional_text_edits.unwrap()); assert_eq!(changed, expected); assert_eq!(item.sort_text, Some(auto_import_sort_text())); } @@ -1908,12 +1895,11 @@ fn main() { two_hello_ }"#; - let mut items = get_completions(src).await; + let (mut items, src) = get_completions(src).await; assert_eq!(items.len(), 1); let item = items.remove(0); - let changed = - apply_text_edits(&src.replace(">|<", ""), &item.additional_text_edits.unwrap()); + let changed = apply_text_edits(&src, &item.additional_text_edits.unwrap()); assert_eq!(changed, expected); assert_eq!(item.sort_text, Some(auto_import_sort_text())); } @@ -1946,12 +1932,11 @@ fn main() { two_hello_ }"#; - let mut items = get_completions(src).await; + let (mut items, src) = get_completions(src).await; assert_eq!(items.len(), 1); let item = items.remove(0); - let changed = - apply_text_edits(&src.replace(">|<", ""), &item.additional_text_edits.unwrap()); + let changed = apply_text_edits(&src, &item.additional_text_edits.unwrap()); assert_eq!(changed, expected); assert_eq!(item.sort_text, Some(auto_import_sort_text())); } @@ -1986,12 +1971,11 @@ fn main() { two_hello_ }"#; - let mut items = get_completions(src).await; + let (mut items, src) = get_completions(src).await; assert_eq!(items.len(), 1); let item = items.remove(0); - let changed = - apply_text_edits(&src.replace(">|<", ""), &item.additional_text_edits.unwrap()); + let changed = apply_text_edits(&src, &item.additional_text_edits.unwrap()); assert_eq!(changed, expected); assert_eq!(item.sort_text, Some(auto_import_sort_text())); } @@ -2022,12 +2006,11 @@ fn main() { two_hello_ }"#; - let mut items = get_completions(src).await; + let (mut items, src) = get_completions(src).await; assert_eq!(items.len(), 1); let item = items.remove(0); - let changed = - apply_text_edits(&src.replace(">|<", ""), &item.additional_text_edits.unwrap()); + let changed = apply_text_edits(&src, &item.additional_text_edits.unwrap()); assert_eq!(changed, expected); assert_eq!(item.sort_text, Some(auto_import_sort_text())); } @@ -2097,7 +2080,7 @@ fn main() { } } "#; - let items = get_completions(src).await; + let (items, _) = get_completions(src).await; assert_eq!(items.len(), 1); let item = &items[0]; @@ -2115,17 +2098,17 @@ fn main() { async fn test_auto_import_from_std() { let src = r#" fn main() { - compute_merkle_roo>|< + zeroe>|< } "#; - let items = get_completions(src).await; + let (items, _) = get_completions(src).await; assert_eq!(items.len(), 1); let item = &items[0]; - assert_eq!(item.label, "compute_merkle_root(…)"); + assert_eq!(item.label, "zeroed()"); assert_eq!( item.label_details.as_ref().unwrap().detail, - Some("(use std::merkle::compute_merkle_root)".to_string()), + Some("(use std::mem::zeroed)".to_string()), ); } @@ -2264,7 +2247,7 @@ fn main() { x.>|< } "#; - let items = get_completions(src).await; + let (items, _) = get_completions(src).await; if items.iter().any(|item| item.label == "__assert_max_bit_size") { panic!("Private method __assert_max_bit_size was suggested"); } @@ -2304,7 +2287,7 @@ fn main() { } "#; - let items = get_completions(src).await; + let (items, _) = get_completions(src).await; assert_eq!(items.len(), 1); let item = &items[0]; @@ -2331,7 +2314,7 @@ fn main() { } "#; - let items = get_completions(src).await; + let (items, _) = get_completions(src).await; assert_eq!(items.len(), 1); let item = &items[0]; @@ -2356,7 +2339,7 @@ fn main() { fn main() {} "#; - let items = get_completions(src).await; + let (items, _) = get_completions(src).await; assert_eq!(items.len(), 1); let item = &items[0]; @@ -2684,9 +2667,9 @@ fn main() { } "#; - let completions = get_completions(src).await; - assert_eq!(completions.len(), 1); - assert_eq!(completions[0].label, "unquote!(…)"); + let (items, _) = get_completions(src).await; + assert_eq!(items.len(), 1); + assert_eq!(items[0].label, "unquote!(…)"); } #[test] @@ -2855,7 +2838,7 @@ fn main() { foo.b>|< } "#; - let items = get_completions(src).await; + let (items, _) = get_completions(src).await; assert_eq!(items.len(), 1); assert!(items[0].label == "bar_baz()"); } @@ -2874,7 +2857,7 @@ fn main() { x.fo>|< } "#; - let items = get_completions(src).await; + let (items, _) = get_completions(src).await; assert_eq!(items.len(), 1); } @@ -2925,7 +2908,7 @@ fn main() { } "#; - let items = get_completions(src).await; + let (items, _) = get_completions(src).await; assert_eq!(items.len(), 2); } @@ -2946,14 +2929,13 @@ fn main() { Field::fooba>|< } "#; - let mut items = get_completions(src).await; + let (mut items, src) = get_completions(src).await; assert_eq!(items.len(), 1); let item = items.remove(0); assert_eq!(item.label_details.unwrap().detail, Some("(use moo::Foo)".to_string())); - let new_code = - apply_text_edits(&src.replace(">|<", ""), &item.additional_text_edits.unwrap()); + let new_code = apply_text_edits(&src, &item.additional_text_edits.unwrap()); let expected = r#"use moo::Foo; @@ -2992,14 +2974,13 @@ fn main() { x.fooba>|< } "#; - let mut items = get_completions(src).await; + let (mut items, src) = get_completions(src).await; assert_eq!(items.len(), 1); let item = items.remove(0); assert_eq!(item.label_details.unwrap().detail, Some("(use moo::Foo)".to_string())); - let new_code = - apply_text_edits(&src.replace(">|<", ""), &item.additional_text_edits.unwrap()); + let new_code = apply_text_edits(&src, &item.additional_text_edits.unwrap()); let expected = r#"use moo::Foo; @@ -3043,14 +3024,13 @@ fn main() { x.fooba>|< } "#; - let mut items = get_completions(src).await; + let (mut items, src) = get_completions(src).await; assert_eq!(items.len(), 1); let item = items.remove(0); assert_eq!(item.label_details.unwrap().detail, Some("(use moo::Bar)".to_string())); - let new_code = - apply_text_edits(&src.replace(">|<", ""), &item.additional_text_edits.unwrap()); + let new_code = apply_text_edits(&src, &item.additional_text_edits.unwrap()); let expected = r#"use moo::Bar; @@ -3088,7 +3068,7 @@ fn main() { Enum::Var>|< } "#; - let items = get_completions(src).await; + let (items, _) = get_completions(src).await; assert_eq!(items.len(), 1); let item = &items[0]; @@ -3120,7 +3100,7 @@ fn main() { Enum::Var>|< } "#; - let items = get_completions(src).await; + let (items, _) = get_completions(src).await; assert_eq!(items.len(), 1); let item = &items[0]; @@ -3149,7 +3129,7 @@ fn main() { ThisIsA>|< } "#; - let items = get_completions(src).await; + let (items, _) = get_completions(src).await; assert_eq!(items.len(), 1); let item = &items[0]; @@ -3172,7 +3152,7 @@ fn main() { SomeStru>|< }"#; - let mut items = get_completions(src).await; + let (mut items, src) = get_completions(src).await; assert_eq!(items.len(), 1); let item = items.remove(0); @@ -3200,8 +3180,7 @@ fn main() { SomeStru }"#; - let changed = - apply_text_edits(&src.replace(">|<", ""), &item.additional_text_edits.unwrap()); + let changed = apply_text_edits(&src, &item.additional_text_edits.unwrap()); assert_eq!(changed, expected); } @@ -3221,7 +3200,7 @@ fn main() { SomeStru>|< }"#; - let mut items = get_completions(src).await; + let (mut items, src) = get_completions(src).await; assert_eq!(items.len(), 1); let item = items.remove(0); @@ -3249,8 +3228,7 @@ fn main() { SomeStru }"#; - let changed = - apply_text_edits(&src.replace(">|<", ""), &item.additional_text_edits.unwrap()); + let changed = apply_text_edits(&src, &item.additional_text_edits.unwrap()); assert_eq!(changed, expected); } @@ -3272,7 +3250,7 @@ fn main() { SomeStru>|< }"#; - let mut items = get_completions(src).await; + let (mut items, src) = get_completions(src).await; assert_eq!(items.len(), 1); let item = items.remove(0); @@ -3302,8 +3280,7 @@ fn main() { SomeStru }"#; - let changed = - apply_text_edits(&src.replace(">|<", ""), &item.additional_text_edits.unwrap()); + let changed = apply_text_edits(&src, &item.additional_text_edits.unwrap()); assert_eq!(changed, expected); } @@ -3325,7 +3302,7 @@ fn main() { SomeStru>|< }"#; - let items = get_completions(src).await; + let (items, _) = get_completions(src).await; assert_eq!(items.len(), 0); } @@ -3349,7 +3326,7 @@ fn main() { SomeStru>|< }"#; - let mut items = get_completions(src).await; + let (mut items, src) = get_completions(src).await; assert_eq!(items.len(), 1); let item = items.remove(0); @@ -3381,8 +3358,7 @@ fn main() { SomeStru }"#; - let changed = - apply_text_edits(&src.replace(">|<", ""), &item.additional_text_edits.unwrap()); + let changed = apply_text_edits(&src, &item.additional_text_edits.unwrap()); assert_eq!(changed, expected); } } diff --git a/noir/noir-repo/tooling/lsp/src/requests/document_symbol.rs b/noir/noir-repo/tooling/lsp/src/requests/document_symbol.rs index 1d36aabdbe7..4827d8827af 100644 --- a/noir/noir-repo/tooling/lsp/src/requests/document_symbol.rs +++ b/noir/noir-repo/tooling/lsp/src/requests/document_symbol.rs @@ -660,7 +660,7 @@ mod document_symbol_tests { deprecated: None, range: Range { start: Position { line: 15, character: 7 }, - end: Position { line: 15, character: 24 }, + end: Position { line: 15, character: 25 }, }, selection_range: Range { start: Position { line: 15, character: 7 }, diff --git a/noir/noir-repo/tooling/lsp/src/requests/hover/from_reference.rs b/noir/noir-repo/tooling/lsp/src/requests/hover/from_reference.rs index 3bc3b3bded7..a8ed06c0896 100644 --- a/noir/noir-repo/tooling/lsp/src/requests/hover/from_reference.rs +++ b/noir/noir-repo/tooling/lsp/src/requests/hover/from_reference.rs @@ -498,7 +498,7 @@ fn format_function(id: FuncId, args: &ProcessRequestCallbackArgs) -> String { let is_self = pattern_is_self(pattern, args.interner); // `&mut self` is represented as a mutable reference type, not as a mutable pattern - if is_self && matches!(typ, Type::MutableReference(..)) { + if is_self && matches!(typ, Type::Reference(..)) { string.push_str("&mut "); } @@ -814,7 +814,7 @@ impl TypeLinksGatherer<'_> { self.gather_type_links(return_type); self.gather_type_links(env); } - Type::MutableReference(typ) => self.gather_type_links(typ), + Type::Reference(typ, _) => self.gather_type_links(typ), Type::InfixExpr(lhs, _, rhs, _) => { self.gather_type_links(lhs); self.gather_type_links(rhs); diff --git a/noir/noir-repo/tooling/lsp/src/requests/hover/from_visitor.rs b/noir/noir-repo/tooling/lsp/src/requests/hover/from_visitor.rs index 97ead183cd2..2b58a31e012 100644 --- a/noir/noir-repo/tooling/lsp/src/requests/hover/from_visitor.rs +++ b/noir/noir-repo/tooling/lsp/src/requests/hover/from_visitor.rs @@ -70,7 +70,7 @@ impl Visitor for HoverFinder<'_> { } } -fn format_integer(typ: Type, value: SignedField) -> String { +fn format_integer(typ: &Type, value: SignedField) -> String { let value_base_10 = value.field.to_string(); // For simplicity we parse the value as a BigInt to convert it to hex @@ -98,7 +98,7 @@ mod tests { let typ = Type::FieldElement; let value = SignedField::positive(0_u128); let expected = " Field\n---\nvalue of literal: `0 (0x00)`"; - assert_eq!(format_integer(typ, value), expected); + assert_eq!(format_integer(&typ, value), expected); } #[test] @@ -106,7 +106,7 @@ mod tests { let typ = Type::Integer(Signedness::Unsigned, IntegerBitSize::ThirtyTwo); let value = SignedField::positive(123456_u128); let expected = " u32\n---\nvalue of literal: `123456 (0x1e240)`"; - assert_eq!(format_integer(typ, value), expected); + assert_eq!(format_integer(&typ, value), expected); } #[test] @@ -114,6 +114,6 @@ mod tests { let typ = Type::Integer(Signedness::Signed, IntegerBitSize::SixtyFour); let value = SignedField::new(987654_u128.into(), true); let expected = " i64\n---\nvalue of literal: `-987654 (-0xf1206)`"; - assert_eq!(format_integer(typ, value), expected); + assert_eq!(format_integer(&typ, value), expected); } } diff --git a/noir/noir-repo/tooling/lsp/src/requests/inlay_hint.rs b/noir/noir-repo/tooling/lsp/src/requests/inlay_hint.rs index be7722e744d..37cccb789a2 100644 --- a/noir/noir-repo/tooling/lsp/src/requests/inlay_hint.rs +++ b/noir/noir-repo/tooling/lsp/src/requests/inlay_hint.rs @@ -255,7 +255,7 @@ impl<'a> InlayHintCollector<'a> { self.push_type_hint( object_lsp_location, - &typ, + typ, false, // not editable false, // don't include colon ); @@ -511,7 +511,11 @@ fn push_type_parts(typ: &Type, parts: &mut Vec, files: &File parts.push(string_part(") -> ")); push_type_parts(return_type, parts, files); } - Type::MutableReference(typ) => { + Type::Reference(typ, false) => { + parts.push(string_part("&")); + push_type_parts(typ, parts, files); + } + Type::Reference(typ, true) => { parts.push(string_part("&mut ")); push_type_parts(typ, parts, files); } diff --git a/noir/noir-repo/tooling/lsp/src/requests/mod.rs b/noir/noir-repo/tooling/lsp/src/requests/mod.rs index 9126ab38e10..a5ffd1155fa 100644 --- a/noir/noir-repo/tooling/lsp/src/requests/mod.rs +++ b/noir/noir-repo/tooling/lsp/src/requests/mod.rs @@ -19,6 +19,7 @@ use nargo_fmt::Config; use noirc_frontend::ast::Ident; use noirc_frontend::graph::CrateId; use noirc_frontend::hir::def_map::{CrateDefMap, ModuleId}; +use noirc_frontend::node_interner::ReferenceId; use noirc_frontend::parser::ParserError; use noirc_frontend::usage_tracker::UsageTracker; use noirc_frontend::{graph::Dependency, node_interner::NodeInterner}; @@ -590,68 +591,104 @@ pub(crate) fn find_all_references_in_workspace( include_self_type_name: bool, ) -> Option> { // First find the node that's referenced by the given location, if any - let referenced = interner.find_referenced(location); + let referenced = interner.find_referenced(location)?; + let name = get_reference_name(referenced, interner)?; - if let Some(referenced) = referenced { - // If we found the referenced node, find its location - let referenced_location = interner.reference_location(referenced); + // If we found the referenced node, find its location + let referenced_location = interner.reference_location(referenced); - // Now we find all references that point to this location, in all interners - // (there's one interner per package, and all interners in a workspace rely on the - // same FileManager so a Location/FileId in one package is the same as in another package) - let mut locations = find_all_references( + // Now we find all references that point to this location, in all interners + // (there's one interner per package, and all interners in a workspace rely on the + // same FileManager so a Location/FileId in one package is the same as in another package) + let mut locations = find_all_references( + referenced_location, + interner, + include_declaration, + include_self_type_name, + ); + for cache_data in package_cache.values() { + locations.extend(find_all_references( referenced_location, - interner, - files, + &cache_data.node_interner, include_declaration, include_self_type_name, - ); - for cache_data in package_cache.values() { - locations.extend(find_all_references( - referenced_location, - &cache_data.node_interner, - files, - include_declaration, - include_self_type_name, - )); - } - - // The LSP client usually removes duplicate locations, but we do it here just in case they don't - locations.sort_by_key(|location| { - ( - location.uri.to_string(), - location.range.start.line, - location.range.start.character, - location.range.end.line, - location.range.end.character, - ) - }); - locations.dedup(); - - if locations.is_empty() { None } else { Some(locations) } - } else { - None + )); } + + // Only keep locations whose span, when read from the file, matches "name" + // (it might not match because of macro expansions) + locations.retain(|location| { + let Some(file) = files.get_file(location.file) else { + return false; + }; + + let Some(substring) = + file.source().get(location.span.start() as usize..location.span.end() as usize) + else { + return false; + }; + + substring == name + }); + + let mut locations = locations + .iter() + .filter_map(|location| to_lsp_location(files, location.file, location.span)) + .collect::>(); + + // The LSP client usually removes duplicate locations, but we do it here just in case they don't + locations.sort_by_key(|location| { + ( + location.uri.to_string(), + location.range.start.line, + location.range.start.character, + location.range.end.line, + location.range.end.character, + ) + }); + locations.dedup(); + + if locations.is_empty() { None } else { Some(locations) } } pub(crate) fn find_all_references( referenced_location: noirc_errors::Location, interner: &NodeInterner, - files: &FileMap, include_declaration: bool, include_self_type_name: bool, -) -> Vec { +) -> Vec { interner .find_all_references(referenced_location, include_declaration, include_self_type_name) - .map(|locations| { - locations - .iter() - .filter_map(|location| to_lsp_location(files, location.file, location.span)) - .collect() - }) .unwrap_or_default() } +fn get_reference_name(reference: ReferenceId, interner: &NodeInterner) -> Option { + match reference { + ReferenceId::Module(module_id) => { + Some(interner.try_module_attributes(&module_id)?.name.clone()) + } + ReferenceId::Type(type_id) => Some(interner.get_type(type_id).borrow().name.to_string()), + ReferenceId::StructMember(type_id, index) => { + Some(interner.get_type(type_id).borrow().field_at(index).name.to_string()) + } + ReferenceId::EnumVariant(type_id, index) => { + Some(interner.get_type(type_id).borrow().variant_at(index).name.to_string()) + } + ReferenceId::Trait(trait_id) => Some(interner.get_trait(trait_id).name.to_string()), + ReferenceId::Global(global_id) => Some(interner.get_global(global_id).ident.to_string()), + ReferenceId::Function(func_id) => Some(interner.function_name(&func_id).to_string()), + ReferenceId::Alias(type_alias_id) => { + Some(interner.get_type_alias(type_alias_id).borrow().name.to_string()) + } + ReferenceId::Local(definition_id) => { + Some(interner.definition_name(definition_id).to_string()) + } + ReferenceId::Reference(location, _) => { + get_reference_name(interner.find_referenced(location)?, interner) + } + } +} + /// Represents a trait reexported from a given module with a name. pub(crate) struct TraitReexport { pub(super) module_id: ModuleId, diff --git a/noir/noir-repo/tooling/lsp/src/requests/references.rs b/noir/noir-repo/tooling/lsp/src/requests/references.rs index fbe69c99871..73188b18ada 100644 --- a/noir/noir-repo/tooling/lsp/src/requests/references.rs +++ b/noir/noir-repo/tooling/lsp/src/requests/references.rs @@ -28,10 +28,12 @@ pub(crate) fn on_references_request( #[cfg(test)] mod references_tests { use super::*; - use crate::notifications; use crate::test_utils::{self, search_in_file}; + use crate::utils::get_cursor_line_and_column; + use crate::{notifications, on_did_open_text_document}; use lsp_types::{ - PartialResultParams, Position, Range, ReferenceContext, TextDocumentPositionParams, Url, + DidOpenTextDocumentParams, PartialResultParams, Position, Range, ReferenceContext, + TextDocumentIdentifier, TextDocumentItem, TextDocumentPositionParams, Url, WorkDoneProgressParams, }; use tokio::test; @@ -55,9 +57,7 @@ mod references_tests { let params = ReferenceParams { text_document_position: TextDocumentPositionParams { - text_document: lsp_types::TextDocumentIdentifier { - uri: noir_text_document.clone(), - }, + text_document: TextDocumentIdentifier { uri: noir_text_document.clone() }, position: target_position, }, work_done_progress_params: WorkDoneProgressParams { work_done_token: None }, @@ -123,7 +123,7 @@ mod references_tests { let params = ReferenceParams { text_document_position: TextDocumentPositionParams { - text_document: lsp_types::TextDocumentIdentifier { uri: one_lib.clone() }, + text_document: TextDocumentIdentifier { uri: one_lib.clone() }, position: Position { line: 0, character: 7 }, }, work_done_progress_params: WorkDoneProgressParams { work_done_token: None }, @@ -170,4 +170,56 @@ mod references_tests { } ); } + + #[test] + async fn ignores_macro_expansions() { + let src = " + #[foo] + struct Fo>| Quoted { + let t = s.as_type(); + quote { + impl Trait for $t {} + } + } + "; + + let (mut state, noir_text_document) = test_utils::init_lsp_server("document_symbol").await; + + let (line, column, src) = get_cursor_line_and_column(src); + + on_did_open_text_document( + &mut state, + DidOpenTextDocumentParams { + text_document: TextDocumentItem { + uri: noir_text_document.clone(), + language_id: "noir".to_string(), + version: 0, + text: src.to_string(), + }, + }, + ); + + let result = on_references_request( + &mut state, + ReferenceParams { + text_document_position: TextDocumentPositionParams { + text_document: TextDocumentIdentifier { uri: noir_text_document }, + position: Position { line: line as u32, character: column as u32 }, + }, + work_done_progress_params: WorkDoneProgressParams { work_done_token: None }, + partial_result_params: PartialResultParams { partial_result_token: None }, + context: ReferenceContext { include_declaration: true }, + }, + ) + .await; + let locations = result.unwrap().unwrap(); + assert_eq!(locations.len(), 1); + assert_eq!(locations[0].range.start.line, 2); // Just the one for "struct Foo" + } } diff --git a/noir/noir-repo/tooling/lsp/src/requests/signature_help.rs b/noir/noir-repo/tooling/lsp/src/requests/signature_help.rs index 1709106e121..6706bb098bf 100644 --- a/noir/noir-repo/tooling/lsp/src/requests/signature_help.rs +++ b/noir/noir-repo/tooling/lsp/src/requests/signature_help.rs @@ -98,8 +98,8 @@ impl<'a> SignatureFinder<'a> { } // Otherwise, the call must be a reference to an fn type - if let Some(mut typ) = self.interner.type_at_location(location) { - typ = typ.follow_bindings(); + if let Some(typ) = self.interner.type_at_location(location) { + let mut typ = typ.follow_bindings(); if let Type::Forall(_, forall_typ) = typ { typ = *forall_typ; } @@ -146,11 +146,13 @@ impl<'a> SignatureFinder<'a> { } if has_self && index == 0 { - if let Type::MutableReference(..) = typ { - label.push_str("&mut self"); - } else { - label.push_str("self"); + if let Type::Reference(_, mutable) = typ { + label.push('&'); + if *mutable { + label.push_str("mut "); + } } + label.push_str("self"); } else { let parameter_start = label.chars().count(); diff --git a/noir/noir-repo/tooling/lsp/src/requests/signature_help/tests.rs b/noir/noir-repo/tooling/lsp/src/requests/signature_help/tests.rs index a5cf7c32e1e..b102a3f03b1 100644 --- a/noir/noir-repo/tooling/lsp/src/requests/signature_help/tests.rs +++ b/noir/noir-repo/tooling/lsp/src/requests/signature_help/tests.rs @@ -2,6 +2,7 @@ mod signature_help_tests { use crate::{ notifications::on_did_open_text_document, requests::on_signature_help_request, test_utils, + utils::get_cursor_line_and_column, }; use lsp_types::{ @@ -14,15 +15,7 @@ mod signature_help_tests { async fn get_signature_help(src: &str) -> SignatureHelp { let (mut state, noir_text_document) = test_utils::init_lsp_server("document_symbol").await; - let (line, column) = src - .lines() - .enumerate() - .find_map(|(line_index, line)| { - line.find(">|<").map(|char_index| (line_index, char_index)) - }) - .expect("Expected to find one >|< in the source code"); - - let src = src.replace(">|<", ""); + let (line, column, src) = get_cursor_line_and_column(src); on_did_open_text_document( &mut state, diff --git a/noir/noir-repo/tooling/lsp/src/requests/test_run.rs b/noir/noir-repo/tooling/lsp/src/requests/test_run.rs index f0d49c4d864..6b7a53e4596 100644 --- a/noir/noir-repo/tooling/lsp/src/requests/test_run.rs +++ b/noir/noir-repo/tooling/lsp/src/requests/test_run.rs @@ -119,7 +119,7 @@ fn on_test_run_request_inner( TestStatus::CompileError(diag) => NargoTestRunResult { id: params.id.clone(), result: "error".to_string(), - message: Some(diag.diagnostic.message), + message: Some(diag.message), }, }; Ok(result) diff --git a/noir/noir-repo/tooling/lsp/src/trait_impl_method_stub_generator.rs b/noir/noir-repo/tooling/lsp/src/trait_impl_method_stub_generator.rs index 3a60a882aea..b24f4dd7d87 100644 --- a/noir/noir-repo/tooling/lsp/src/trait_impl_method_stub_generator.rs +++ b/noir/noir-repo/tooling/lsp/src/trait_impl_method_stub_generator.rs @@ -359,7 +359,11 @@ impl<'a> TraitImplMethodStubGenerator<'a> { self.append_type(ret); } } - Type::MutableReference(typ) => { + Type::Reference(typ, false) => { + self.string.push('&'); + self.append_type(typ); + } + Type::Reference(typ, true) => { self.string.push_str("&mut "); self.append_type(typ); } diff --git a/noir/noir-repo/tooling/lsp/src/utils.rs b/noir/noir-repo/tooling/lsp/src/utils.rs index ca607128bf2..5fe5d14bc6d 100644 --- a/noir/noir-repo/tooling/lsp/src/utils.rs +++ b/noir/noir-repo/tooling/lsp/src/utils.rs @@ -53,3 +53,19 @@ pub(crate) fn character_to_line_offset(line: &str, character: u32) -> Option|<" (cursor) in it, returns: +/// 1. The line where the cursor is (zero-based) +/// 2. The column where the cursor is (zero-based) +/// 3. that string with ">|<" removed +#[cfg(test)] +pub(crate) fn get_cursor_line_and_column(src: &str) -> (usize, usize, String) { + let (line, column) = src + .lines() + .enumerate() + .find_map(|(line_index, line)| line.find(">|<").map(|char_index| (line_index, char_index))) + .expect("Expected to find one >|< in the source code"); + + let src = src.replace(">|<", ""); + (line, column, src) +} diff --git a/noir/noir-repo/tooling/lsp/src/with_file.rs b/noir/noir-repo/tooling/lsp/src/with_file.rs index 1f0d02db421..3ba8aecf74a 100644 --- a/noir/noir-repo/tooling/lsp/src/with_file.rs +++ b/noir/noir-repo/tooling/lsp/src/with_file.rs @@ -485,8 +485,8 @@ fn unresolved_type_data_with_file(typ: UnresolvedTypeData, file: FileId) -> Unre generic_type_args_with_file(generic_type_args, file), ) } - UnresolvedTypeData::MutableReference(typ) => { - UnresolvedTypeData::MutableReference(Box::new(unresolved_type_with_file(*typ, file))) + UnresolvedTypeData::Reference(typ, mutable) => { + UnresolvedTypeData::Reference(Box::new(unresolved_type_with_file(*typ, file)), mutable) } UnresolvedTypeData::Tuple(types) => { UnresolvedTypeData::Tuple(unresolved_types_with_file(types, file)) diff --git a/noir/noir-repo/tooling/nargo/src/errors.rs b/noir/noir-repo/tooling/nargo/src/errors.rs index cdf11a0e9e7..f1743af79ca 100644 --- a/noir/noir-repo/tooling/nargo/src/errors.rs +++ b/noir/noir-repo/tooling/nargo/src/errors.rs @@ -9,9 +9,7 @@ use acvm::{ pwg::{ErrorLocation, OpcodeResolutionError}, }; use noirc_abi::{Abi, AbiErrorType, display_abi_error}; -use noirc_errors::{ - CustomDiagnostic, FileDiagnostic, debug_info::DebugInfo, reporter::ReportedErrors, -}; +use noirc_errors::{CustomDiagnostic, debug_info::DebugInfo, reporter::ReportedErrors}; pub use noirc_errors::Location; @@ -230,7 +228,7 @@ pub fn try_to_diagnose_runtime_error( nargo_err: &NargoError, abi: &Abi, debug: &[DebugInfo], -) -> Option { +) -> Option { let source_locations = match nargo_err { NargoError::ExecutionError(execution_error) => { extract_locations_from_error(execution_error, debug)? @@ -242,5 +240,5 @@ pub fn try_to_diagnose_runtime_error( let location = *source_locations.last()?; let message = extract_message_from_error(&abi.error_types, nargo_err); let error = CustomDiagnostic::simple_error(message, String::new(), location); - Some(error.with_call_stack(source_locations).in_file(location.file)) + Some(error.with_call_stack(source_locations)) } diff --git a/noir/noir-repo/tooling/nargo/src/foreign_calls/mod.rs b/noir/noir-repo/tooling/nargo/src/foreign_calls/mod.rs index f17a97cecd4..8d33b07dd08 100644 --- a/noir/noir-repo/tooling/nargo/src/foreign_calls/mod.rs +++ b/noir/noir-repo/tooling/nargo/src/foreign_calls/mod.rs @@ -4,6 +4,7 @@ use thiserror::Error; pub mod layers; pub mod mocker; pub mod print; +pub mod transcript; pub mod default; #[cfg(feature = "rpc")] @@ -83,4 +84,7 @@ pub enum ForeignCallError { #[error("Assert message resolved after an unsatisfied constrain. {0}")] ResolvedAssertMessage(String), + + #[error("Failed to replay oracle transcript: {0}")] + TranscriptError(String), } diff --git a/noir/noir-repo/tooling/nargo/src/foreign_calls/print.rs b/noir/noir-repo/tooling/nargo/src/foreign_calls/print.rs index a225fe31dcb..96db9232d0d 100644 --- a/noir/noir-repo/tooling/nargo/src/foreign_calls/print.rs +++ b/noir/noir-repo/tooling/nargo/src/foreign_calls/print.rs @@ -16,6 +16,7 @@ pub enum PrintOutput<'a> { String(&'a mut String), } +/// Handle `println` calls. #[derive(Debug, Default)] pub struct PrintForeignCallExecutor<'a> { output: PrintOutput<'a>, diff --git a/noir/noir-repo/tooling/nargo/src/foreign_calls/transcript.rs b/noir/noir-repo/tooling/nargo/src/foreign_calls/transcript.rs new file mode 100644 index 00000000000..155835c3fc1 --- /dev/null +++ b/noir/noir-repo/tooling/nargo/src/foreign_calls/transcript.rs @@ -0,0 +1,156 @@ +use std::{ + collections::VecDeque, + path::{Path, PathBuf}, +}; + +use acvm::{AcirField, acir::brillig::ForeignCallResult, pwg::ForeignCallWaitInfo}; +use serde::{Deserialize, Serialize}; +use serde_json::json; + +use crate::PrintOutput; + +use super::{ForeignCallError, ForeignCallExecutor}; + +#[derive(Debug, thiserror::Error)] +pub enum TranscriptError { + #[error(transparent)] + IoError(#[from] std::io::Error), + + #[error(transparent)] + DeserializationError(#[from] serde_json::Error), +} + +#[derive(Debug, PartialEq, Clone, Serialize, Deserialize)] +struct LogItem { + call: ForeignCallWaitInfo, + result: ForeignCallResult, +} + +/// Log foreign calls during the execution, for testing purposes. +pub struct LoggingForeignCallExecutor<'a, E> { + pub executor: E, + pub output: PrintOutput<'a>, +} + +impl<'a, E> LoggingForeignCallExecutor<'a, E> { + pub fn new(executor: E, output: PrintOutput<'a>) -> Self { + Self { executor, output } + } +} + +impl ForeignCallExecutor for LoggingForeignCallExecutor<'_, E> +where + F: AcirField + Serialize, + E: ForeignCallExecutor, +{ + fn execute( + &mut self, + foreign_call: &ForeignCallWaitInfo, + ) -> Result, ForeignCallError> { + let result = self.executor.execute(foreign_call); + if let Ok(ref result) = result { + let log_item = || { + // Match the JSON structure of `LogItem` without having to clone. + let json = json!({"call": foreign_call, "result": result}); + serde_json::to_string(&json).expect("failed to serialize foreign call") + }; + match &mut self.output { + PrintOutput::None => (), + PrintOutput::Stdout => println!("{}", log_item()), + PrintOutput::String(s) => { + s.push_str(&log_item()); + s.push('\n'); + } + } + } + result + } +} + +/// Log foreign calls to stdout as soon as they are made, or buffer them and write to a file at the end. +pub enum ForeignCallLog { + None, + Stdout, + File(PathBuf, String), +} + +impl ForeignCallLog { + /// Instantiate based on an env var. + pub fn from_env(key: &str) -> Self { + match std::env::var(key) { + Err(_) => Self::None, + Ok(s) if s == "stdout" => Self::Stdout, + Ok(s) => Self::File(PathBuf::from(s), String::new()), + } + } + + /// Create a [PrintOutput] based on the log setting. + pub fn print_output(&mut self) -> PrintOutput { + match self { + ForeignCallLog::None => PrintOutput::None, + ForeignCallLog::Stdout => PrintOutput::Stdout, + ForeignCallLog::File(_, s) => PrintOutput::String(s), + } + } + + /// Any final logging. + pub fn write_log(self) -> std::io::Result<()> { + if let ForeignCallLog::File(path, contents) = self { + std::fs::write(path, contents)?; + } + Ok(()) + } +} + +/// Replay an oracle transcript which was logged with [LoggingForeignCallExecutor]. +/// +/// This is expected to be the last executor in the stack, e.g. prints can be handled above it. +pub struct ReplayForeignCallExecutor { + transcript: VecDeque>, +} + +impl Deserialize<'a>> ReplayForeignCallExecutor { + pub fn from_file(path: &Path) -> Result { + let contents = std::fs::read_to_string(path)?; + + let transcript = + contents.lines().map(serde_json::from_str).collect::, _>>()?; + + Ok(Self { transcript }) + } +} + +impl ForeignCallExecutor for ReplayForeignCallExecutor +where + F: AcirField, +{ + fn execute( + &mut self, + foreign_call: &ForeignCallWaitInfo, + ) -> Result, ForeignCallError> { + let error = |msg| Err(ForeignCallError::TranscriptError(msg)); + // Verify without popping. + if let Some(next) = self.transcript.front() { + if next.call.function != foreign_call.function { + let msg = format!( + "unexpected foreign call; expected '{}', got '{}'", + next.call.function, foreign_call.function + ); + return error(msg); + } + if next.call.inputs != foreign_call.inputs { + let msg = format!( + "unexpected foreign call inputs to '{}'; expected {:?}, got {:?}", + next.call.function, next.call.inputs, foreign_call.inputs + ); + return error(msg); + } + } + // Consume the next call. + if let Some(next) = self.transcript.pop_front() { + Ok(next.result) + } else { + error("unexpected foreign call; no more calls in transcript".to_string()) + } + } +} diff --git a/noir/noir-repo/tooling/nargo/src/ops/check.rs b/noir/noir-repo/tooling/nargo/src/ops/check.rs index f22def8bd91..129aa1bd788 100644 --- a/noir/noir-repo/tooling/nargo/src/ops/check.rs +++ b/noir/noir-repo/tooling/nargo/src/ops/check.rs @@ -1,6 +1,6 @@ use acvm::compiler::CircuitSimulator; use noirc_driver::{CompiledProgram, ErrorsAndWarnings}; -use noirc_errors::{CustomDiagnostic, FileDiagnostic}; +use noirc_errors::CustomDiagnostic; /// Run each function through a circuit simulator to check that they are solvable. #[tracing::instrument(level = "trace", skip_all)] @@ -8,13 +8,10 @@ pub fn check_program(compiled_program: &CompiledProgram) -> Result<(), ErrorsAnd for (i, circuit) in compiled_program.program.functions.iter().enumerate() { let mut simulator = CircuitSimulator::default(); if !simulator.check_circuit(circuit) { - let diag = FileDiagnostic { - file_id: fm::FileId::dummy(), - diagnostic: CustomDiagnostic::from_message(&format!( - "Circuit \"{}\" is not solvable", - compiled_program.names[i] - )), - }; + let diag = CustomDiagnostic::from_message( + &format!("Circuit \"{}\" is not solvable", compiled_program.names[i]), + fm::FileId::dummy(), + ); return Err(vec![diag]); } } diff --git a/noir/noir-repo/tooling/nargo/src/ops/execute.rs b/noir/noir-repo/tooling/nargo/src/ops/execute.rs index 699c54e3f52..b12871c75e4 100644 --- a/noir/noir-repo/tooling/nargo/src/ops/execute.rs +++ b/noir/noir-repo/tooling/nargo/src/ops/execute.rs @@ -13,7 +13,7 @@ use crate::NargoError; use crate::errors::ExecutionError; use crate::foreign_calls::ForeignCallExecutor; -struct ProgramExecutor<'a, F, B: BlackBoxFunctionSolver, E: ForeignCallExecutor> { +struct ProgramExecutor<'a, F: AcirField, B: BlackBoxFunctionSolver, E: ForeignCallExecutor> { functions: &'a [Circuit], unconstrained_functions: &'a [BrilligBytecode], diff --git a/noir/noir-repo/tooling/nargo/src/ops/test.rs b/noir/noir-repo/tooling/nargo/src/ops/test.rs index 8eba3000f3d..c4adaa5cfaa 100644 --- a/noir/noir-repo/tooling/nargo/src/ops/test.rs +++ b/noir/noir-repo/tooling/nargo/src/ops/test.rs @@ -8,13 +8,17 @@ use acvm::{ }; use noirc_abi::Abi; use noirc_driver::{CompileError, CompileOptions, DEFAULT_EXPRESSION_WIDTH, compile_no_check}; -use noirc_errors::{FileDiagnostic, debug_info::DebugInfo}; +use noirc_errors::{CustomDiagnostic, debug_info::DebugInfo}; use noirc_frontend::hir::{Context, def_map::TestFunction}; use crate::{ NargoError, errors::try_to_diagnose_runtime_error, - foreign_calls::{ForeignCallError, ForeignCallExecutor, layers, print::PrintOutput}, + foreign_calls::{ + ForeignCallError, ForeignCallExecutor, layers, + print::PrintOutput, + transcript::{ForeignCallLog, LoggingForeignCallExecutor}, + }, }; use super::execute_program; @@ -22,9 +26,9 @@ use super::execute_program; #[derive(Debug)] pub enum TestStatus { Pass, - Fail { message: String, error_diagnostic: Option }, + Fail { message: String, error_diagnostic: Option }, Skipped, - CompileError(FileDiagnostic), + CompileError(CustomDiagnostic), } impl TestStatus { @@ -60,11 +64,21 @@ where let compiled_program = crate::ops::transform_program(compiled_program, target_width); if test_function_has_no_arguments { + let ignore_foreign_call_failures = + std::env::var("NARGO_IGNORE_TEST_FAILURES_FROM_FOREIGN_CALLS") + .is_ok_and(|var| &var == "true"); + + let mut foreign_call_log = ForeignCallLog::from_env("NARGO_TEST_FOREIGN_CALL_LOG"); + // Run the backend to ensure the PWG evaluates functions like std::hash::pedersen, // otherwise constraints involving these expressions will not error. // Use a base layer that doesn't handle anything, which we handle in the `execute` below. - let inner_executor = build_foreign_call_executor(output, layers::Unhandled); - let mut foreign_call_executor = TestForeignCallExecutor::new(inner_executor); + let foreign_call_executor = build_foreign_call_executor(output, layers::Unhandled); + let foreign_call_executor = TestForeignCallExecutor::new(foreign_call_executor); + let mut foreign_call_executor = LoggingForeignCallExecutor::new( + foreign_call_executor, + foreign_call_log.print_output(), + ); let circuit_execution = execute_program( &compiled_program.program, @@ -80,9 +94,8 @@ where &circuit_execution, ); - let ignore_foreign_call_failures = - std::env::var("NARGO_IGNORE_TEST_FAILURES_FROM_FOREIGN_CALLS") - .is_ok_and(|var| &var == "true"); + let foreign_call_executor = foreign_call_executor.executor; + foreign_call_log.write_log().expect("failed to write foreign call log"); if let TestStatus::Fail { .. } = status { if ignore_foreign_call_failures @@ -218,7 +231,7 @@ fn test_status_program_compile_pass( fn check_expected_failure_message( test_function: &TestFunction, failed_assertion: Option, - error_diagnostic: Option, + error_diagnostic: Option, ) -> TestStatus { // Extract the expected failure message, if there was one // @@ -235,9 +248,7 @@ fn check_expected_failure_message( // expected_failure_message let expected_failure_message_matches = failed_assertion .as_ref() - .or_else(|| { - error_diagnostic.as_ref().map(|file_diagnostic| &file_diagnostic.diagnostic.message) - }) + .or_else(|| error_diagnostic.as_ref().map(|file_diagnostic| &file_diagnostic.message)) .map(|message| message.contains(expected_failure_message)) .unwrap_or(false); if expected_failure_message_matches { diff --git a/noir/noir-repo/tooling/nargo_cli/Cargo.toml b/noir/noir-repo/tooling/nargo_cli/Cargo.toml index 5aa37cffcda..a80828356ff 100644 --- a/noir/noir-repo/tooling/nargo_cli/Cargo.toml +++ b/noir/noir-repo/tooling/nargo_cli/Cargo.toml @@ -99,12 +99,7 @@ sha3.workspace = true iai = "0.1.1" test-case.workspace = true lazy_static.workspace = true -light-poseidon = "0.2.0" - -ark-bn254-v04 = { package = "ark-bn254", version = "^0.4.0", default-features = false, features = [ - "curve", -] } -ark-ff-v04 = { package = "ark-ff", version = "^0.4.0", default-features = false } +light-poseidon = "0.3.0" [[bench]] name = "criterion" diff --git a/noir/noir-repo/tooling/nargo_cli/build.rs b/noir/noir-repo/tooling/nargo_cli/build.rs index 904e404d7c5..b9faf018dfd 100644 --- a/noir/noir-repo/tooling/nargo_cli/build.rs +++ b/noir/noir-repo/tooling/nargo_cli/build.rs @@ -438,7 +438,6 @@ fn generate_compile_success_no_bug_tests(test_file: &mut File, test_data_dir: &P &test_dir, "compile", r#" - nargo.arg("--enable-brillig-constraints-check"); nargo.assert().success().stderr(predicate::str::contains("bug:").not()); "#, &MatrixConfig::default(), @@ -468,7 +467,6 @@ fn generate_compile_success_with_bug_tests(test_file: &mut File, test_data_dir: &test_dir, "compile", r#" - nargo.arg("--enable-brillig-constraints-check"); nargo.assert().success().stderr(predicate::str::contains("bug:")); "#, &MatrixConfig::default(), diff --git a/noir/noir-repo/tooling/nargo_cli/src/cli/compile_cmd.rs b/noir/noir-repo/tooling/nargo_cli/src/cli/compile_cmd.rs index 2ee19f5edc0..45a0b7e15e4 100644 --- a/noir/noir-repo/tooling/nargo_cli/src/cli/compile_cmd.rs +++ b/noir/noir-repo/tooling/nargo_cli/src/cli/compile_cmd.rs @@ -325,6 +325,7 @@ mod tests { use nargo::ops::compile_program; use nargo_toml::PackageSelection; use noirc_driver::{CompileOptions, CrateName}; + use noirc_frontend::elaborator::UnstableFeature; use rayon::iter::{IntoParallelRefIterator, ParallelIterator}; use crate::cli::test_cmd::formatters::diagnostic_to_string; @@ -403,12 +404,17 @@ mod tests { let binary_packages = workspace.into_iter().filter(|package| package.is_binary()); for package in binary_packages { + let options = CompileOptions { + unstable_features: vec![UnstableFeature::Enums], + ..Default::default() + }; + let (program_0, _warnings) = compile_program( &file_manager, &parsed_files, workspace, package, - &CompileOptions::default(), + &options, None, ) .unwrap_or_else(|err| { diff --git a/noir/noir-repo/tooling/nargo_cli/src/cli/debug_cmd.rs b/noir/noir-repo/tooling/nargo_cli/src/cli/debug_cmd.rs index f12e83297e6..f9303180fc0 100644 --- a/noir/noir-repo/tooling/nargo_cli/src/cli/debug_cmd.rs +++ b/noir/noir-repo/tooling/nargo_cli/src/cli/debug_cmd.rs @@ -215,7 +215,7 @@ fn run_async( if let Some(witness_name) = witness_name { let witness_path = - save_witness_to_dir(solved_witness_stack, witness_name, target_dir)?; + save_witness_to_dir(&solved_witness_stack, witness_name, target_dir)?; println!("[{}] Witness saved to {}", package.name, witness_path.display()); } diff --git a/noir/noir-repo/tooling/nargo_cli/src/cli/execute_cmd.rs b/noir/noir-repo/tooling/nargo_cli/src/cli/execute_cmd.rs index c7c3227795d..7ba95a16eeb 100644 --- a/noir/noir-repo/tooling/nargo_cli/src/cli/execute_cmd.rs +++ b/noir/noir-repo/tooling/nargo_cli/src/cli/execute_cmd.rs @@ -1,24 +1,11 @@ use std::path::PathBuf; -use acvm::FieldElement; -use acvm::acir::native_types::WitnessStack; -use bn254_blackbox_solver::Bn254BlackBoxSolver; use clap::Args; -use nargo::PrintOutput; use nargo::constants::PROVER_INPUT_FILE; -use nargo::errors::try_to_diagnose_runtime_error; -use nargo::foreign_calls::DefaultForeignCallBuilder; -use nargo::package::Package; use nargo::workspace::Workspace; use nargo_toml::PackageSelection; -use noir_artifact_cli::fs::artifact::read_program_from_file; -use noir_artifact_cli::fs::inputs::read_inputs_from_file; -use noir_artifact_cli::fs::witness::save_witness_to_dir; -use noirc_abi::InputMap; -use noirc_abi::input_parser::InputValue; -use noirc_artifacts::debug::DebugArtifact; -use noirc_driver::{CompileOptions, CompiledProgram}; +use noirc_driver::CompileOptions; use super::compile_cmd::compile_workspace_full; use super::{LockType, PackageOptions, WorkspaceCommand}; @@ -44,8 +31,12 @@ pub(crate) struct ExecuteCommand { compile_options: CompileOptions, /// JSON RPC url to solve oracle calls - #[clap(long)] + #[clap(long, conflicts_with = "oracle_file")] oracle_resolver: Option, + + /// Path to the oracle transcript. + #[clap(long, conflicts_with = "oracle_resolver")] + oracle_file: Option, } impl WorkspaceCommand for ExecuteCommand { @@ -60,127 +51,30 @@ impl WorkspaceCommand for ExecuteCommand { } pub(crate) fn run(args: ExecuteCommand, workspace: Workspace) -> Result<(), CliError> { - let target_dir = &workspace.target_directory_path(); - // Compile the full workspace in order to generate any build artifacts. compile_workspace_full(&workspace, &args.compile_options)?; let binary_packages = workspace.into_iter().filter(|package| package.is_binary()); for package in binary_packages { let program_artifact_path = workspace.package_build_path(package); - let program: CompiledProgram = read_program_from_file(&program_artifact_path)?.into(); - let abi = program.abi.clone(); - - let results = execute_program_and_decode( - program, - package, - &args.prover_name, - args.oracle_resolver.as_deref(), - Some(workspace.root_dir.clone()), - Some(package.name.to_string()), - args.compile_options.pedantic_solving, - )?; - - println!("[{}] Circuit witness successfully solved", package.name); - if let Some(ref return_value) = results.actual_return { - println!("[{}] Circuit output: {return_value:?}", package.name); - } - - let package_name = package.name.clone().into(); - let witness_name = args.witness_name.as_ref().unwrap_or(&package_name); - let witness_path = save_witness_to_dir(results.witness_stack, witness_name, target_dir)?; - println!("[{}] Witness saved to {}", package.name, witness_path.display()); - - // Sanity checks on the return value after the witness has been saved, so it can be inspected if necessary. - if let Some(expected) = results.expected_return { - if results.actual_return.as_ref() != Some(&expected) { - return Err(CliError::UnexpectedReturn { expected, actual: results.actual_return }); - } - } - // We can expect that if the circuit returns something, it should be non-empty after execution. - if let Some(ref expected) = abi.return_type { - if results.actual_return.is_none() { - return Err(CliError::MissingReturn { expected: expected.clone() }); - } - } + let prover_file = package.root_dir.join(&args.prover_name).with_extension("toml"); + + let cmd = noir_artifact_cli::commands::execute_cmd::ExecuteCommand { + artifact_path: program_artifact_path, + prover_file, + output_dir: Some(workspace.target_directory_path()), + witness_name: Some( + args.witness_name.clone().unwrap_or_else(|| package.name.to_string()), + ), + contract_fn: None, + oracle_file: args.oracle_file.clone(), + oracle_resolver: args.oracle_resolver.clone(), + oracle_root_dir: Some(workspace.root_dir.clone()), + oracle_package_name: Some(package.name.to_string()), + pedantic_solving: args.compile_options.pedantic_solving, + }; + + noir_artifact_cli::commands::execute_cmd::run(cmd)?; } Ok(()) } - -fn execute_program_and_decode( - program: CompiledProgram, - package: &Package, - prover_name: &str, - foreign_call_resolver_url: Option<&str>, - root_path: Option, - package_name: Option, - pedantic_solving: bool, -) -> Result { - // Parse the initial witness values from Prover.toml - let (inputs_map, expected_return) = read_inputs_from_file( - &package.root_dir.join(prover_name).with_extension("toml"), - &program.abi, - )?; - let witness_stack = execute_program( - &program, - &inputs_map, - foreign_call_resolver_url, - root_path, - package_name, - pedantic_solving, - )?; - // Get the entry point witness for the ABI - let main_witness = - &witness_stack.peek().expect("Should have at least one witness on the stack").witness; - let (_, actual_return) = program.abi.decode(main_witness)?; - - Ok(ExecutionResults { expected_return, actual_return, witness_stack }) -} - -struct ExecutionResults { - expected_return: Option, - actual_return: Option, - witness_stack: WitnessStack, -} - -pub(crate) fn execute_program( - compiled_program: &CompiledProgram, - inputs_map: &InputMap, - foreign_call_resolver_url: Option<&str>, - root_path: Option, - package_name: Option, - pedantic_solving: bool, -) -> Result, CliError> { - let initial_witness = compiled_program.abi.encode(inputs_map, None)?; - - let solved_witness_stack_err = nargo::ops::execute_program( - &compiled_program.program, - initial_witness, - &Bn254BlackBoxSolver(pedantic_solving), - &mut DefaultForeignCallBuilder { - output: PrintOutput::Stdout, - enable_mocks: false, - resolver_url: foreign_call_resolver_url.map(|s| s.to_string()), - root_path, - package_name, - } - .build(), - ); - match solved_witness_stack_err { - Ok(solved_witness_stack) => Ok(solved_witness_stack), - Err(err) => { - let debug_artifact = DebugArtifact { - debug_symbols: compiled_program.debug.clone(), - file_map: compiled_program.file_map.clone(), - }; - - if let Some(diagnostic) = - try_to_diagnose_runtime_error(&err, &compiled_program.abi, &compiled_program.debug) - { - diagnostic.report(&debug_artifact, false); - } - - Err(CliError::NargoError(err)) - } - } -} diff --git a/noir/noir-repo/tooling/nargo_cli/src/cli/export_cmd.rs b/noir/noir-repo/tooling/nargo_cli/src/cli/export_cmd.rs index 9fe682bef7f..373dfce86a9 100644 --- a/noir/noir-repo/tooling/nargo_cli/src/cli/export_cmd.rs +++ b/noir/noir-repo/tooling/nargo_cli/src/cli/export_cmd.rs @@ -1,7 +1,7 @@ use nargo::errors::CompileError; use nargo::ops::report_errors; use noir_artifact_cli::fs::artifact::save_program_to_file; -use noirc_errors::FileDiagnostic; +use noirc_errors::CustomDiagnostic; use noirc_frontend::hir::ParsedFiles; use rayon::prelude::*; @@ -82,7 +82,7 @@ fn compile_exported_functions( |(function_name, function_id)| -> Result<(String, CompiledProgram), CompileError> { // TODO: We should to refactor how to deal with compilation errors to avoid this. let program = compile_no_check(&mut context, compile_options, function_id, None, false) - .map_err(|error| vec![FileDiagnostic::from(error)]); + .map_err(|error| vec![CustomDiagnostic::from(error)]); let program = report_errors( program.map(|program| (program, Vec::new())), diff --git a/noir/noir-repo/tooling/nargo_cli/src/cli/fmt_cmd.rs b/noir/noir-repo/tooling/nargo_cli/src/cli/fmt_cmd.rs index b16ce9d1f7d..53551c13950 100644 --- a/noir/noir-repo/tooling/nargo_cli/src/cli/fmt_cmd.rs +++ b/noir/noir-repo/tooling/nargo_cli/src/cli/fmt_cmd.rs @@ -56,11 +56,8 @@ pub(crate) fn run(args: FormatCommand, workspace: Workspace) -> Result<(), CliEr let is_all_warnings = errors.iter().all(ParserError::is_warning); if !is_all_warnings { let errors = errors - .into_iter() - .map(|error| { - let error = CustomDiagnostic::from(&error); - error.in_file(file_id) - }) + .iter() + .map(CustomDiagnostic::from) .collect(); let _ = report_errors::<()>( diff --git a/noir/noir-repo/tooling/nargo_cli/src/cli/test_cmd/formatters.rs b/noir/noir-repo/tooling/nargo_cli/src/cli/test_cmd/formatters.rs index b62e2e2ad9c..68628129245 100644 --- a/noir/noir-repo/tooling/nargo_cli/src/cli/test_cmd/formatters.rs +++ b/noir/noir-repo/tooling/nargo_cli/src/cli/test_cmd/formatters.rs @@ -2,7 +2,7 @@ use std::{io::Write, panic::RefUnwindSafe, time::Duration}; use fm::FileManager; use nargo::ops::TestStatus; -use noirc_errors::{FileDiagnostic, reporter::stack_trace}; +use noirc_errors::{CustomDiagnostic, reporter::stack_trace}; use serde_json::{Map, json}; use termcolor::{Color, ColorChoice, ColorSpec, StandardStream, StandardStreamLock, WriteColor}; @@ -438,7 +438,7 @@ impl Formatter for JsonFormatter { stdout.push_str(message.trim()); if let Some(diagnostic) = error_diagnostic { - if !(diagnostic.diagnostic.is_warning() && silence_warnings) { + if !(diagnostic.is_warning() && silence_warnings) { stdout.push('\n'); stdout.push_str(&diagnostic_to_string(diagnostic, file_manager)); } @@ -450,7 +450,7 @@ impl Formatter for JsonFormatter { TestStatus::CompileError(diagnostic) => { json.insert("event".to_string(), json!("failed")); - if !(diagnostic.diagnostic.is_warning() && silence_warnings) { + if !(diagnostic.is_warning() && silence_warnings) { if !stdout.is_empty() { stdout.push('\n'); } @@ -515,12 +515,11 @@ fn package_start(package_name: &str, test_count: usize) -> std::io::Result<()> { } pub(crate) fn diagnostic_to_string( - file_diagnostic: &FileDiagnostic, + custom_diagnostic: &CustomDiagnostic, file_manager: &FileManager, ) -> String { let file_map = file_manager.as_file_map(); - let custom_diagnostic = &file_diagnostic.diagnostic; let mut message = String::new(); message.push_str(custom_diagnostic.message.trim()); @@ -529,7 +528,7 @@ pub(crate) fn diagnostic_to_string( message.push_str(note.trim()); } - if let Ok(name) = file_map.get_name(file_diagnostic.file_id) { + if let Ok(name) = file_map.get_name(custom_diagnostic.file) { message.push('\n'); message.push_str(&format!("at {name}")); } diff --git a/noir/noir-repo/tooling/nargo_cli/src/errors.rs b/noir/noir-repo/tooling/nargo_cli/src/errors.rs index 74ede00f0d0..fb241bcbd29 100644 --- a/noir/noir-repo/tooling/nargo_cli/src/errors.rs +++ b/noir/noir-repo/tooling/nargo_cli/src/errors.rs @@ -2,7 +2,7 @@ use acvm::FieldElement; use nargo::{NargoError, errors::CompileError}; use nargo_toml::ManifestError; use noir_debugger::errors::DapError; -use noirc_abi::{AbiReturnType, errors::AbiError, input_parser::InputValue}; +use noirc_abi::errors::AbiError; use std::path::PathBuf; use thiserror::Error; @@ -42,10 +42,4 @@ pub(crate) enum CliError { /// Error from the compilation pipeline #[error(transparent)] CompileError(#[from] CompileError), - - #[error("Unexpected return value: expected {expected:?}; got {actual:?}")] - UnexpectedReturn { expected: InputValue, actual: Option }, - - #[error("Missing return witnesses; expected {expected:?}")] - MissingReturn { expected: AbiReturnType }, } diff --git a/noir/noir-repo/tooling/nargo_cli/src/main.rs b/noir/noir-repo/tooling/nargo_cli/src/main.rs index f4cc74447bc..33e18ce6f94 100644 --- a/noir/noir-repo/tooling/nargo_cli/src/main.rs +++ b/noir/noir-repo/tooling/nargo_cli/src/main.rs @@ -17,6 +17,9 @@ use color_eyre::config::HookBuilder; use tracing_appender::rolling; use tracing_subscriber::{EnvFilter, fmt::format::FmtSpan}; +// TODO: Currently only used by benches. +use noir_artifact_cli as _; + const PANIC_MESSAGE: &str = "This is a bug. We may have already fixed this in newer versions of Nargo so try searching for similar issues at https://github.com/noir-lang/noir/issues/.\nIf there isn't an open issue for this bug, consider opening one at https://github.com/noir-lang/noir/issues/new?labels=bug&template=bug_report.yml"; fn main() { @@ -28,7 +31,7 @@ fn main() { panic_hook.install(); if let Err(report) = cli::start_cli() { - eprintln!("{report}"); + eprintln!("{report:#}"); std::process::exit(1); } } @@ -42,6 +45,6 @@ fn setup_tracing() { let debug_file = rolling::daily(log_dir, "nargo-log"); subscriber.with_writer(debug_file).with_ansi(false).json().init(); } else { - subscriber.with_ansi(true).init(); + subscriber.with_writer(std::io::stderr).with_ansi(true).init(); } } diff --git a/noir/noir-repo/tooling/nargo_cli/tests/stdlib-props.rs b/noir/noir-repo/tooling/nargo_cli/tests/stdlib-props.rs index 2e153b85ba8..7c3794d03ab 100644 --- a/noir/noir-repo/tooling/nargo_cli/tests/stdlib-props.rs +++ b/noir/noir-repo/tooling/nargo_cli/tests/stdlib-props.rs @@ -10,7 +10,6 @@ use noirc_driver::{ }; use noirc_frontend::hir::Context; use proptest::prelude::*; -use sha3::Digest; /// Inputs and expected output of a snippet encoded in ABI format. #[derive(Debug)] @@ -78,8 +77,8 @@ fn run_snippet_proptest( Err(e) => panic!("failed to compile program; brillig = {force_brillig}:\n{source}\n{e:?}"), }; - let pedandic_solving = true; - let blackbox_solver = bn254_blackbox_solver::Bn254BlackBoxSolver(pedandic_solving); + let pedantic_solving = true; + let blackbox_solver = bn254_blackbox_solver::Bn254BlackBoxSolver(pedantic_solving); let foreign_call_executor = RefCell::new(DefaultForeignCallBuilder::default().build()); // Generate multiple input/output @@ -105,64 +104,6 @@ fn run_snippet_proptest( }); } -/// Run property tests on a code snippet which is assumed to execute a hashing function with the following signature: -/// -/// ```ignore -/// fn main(input: [u8; {max_len}], message_size: u32) -> pub [u8; 32] -/// ``` -/// -/// The calls are executed with and without forcing brillig, because it seems common for hash functions to run different -/// code paths based on `runtime::is_unconstrained()`. -fn run_hash_proptest( - // Different generic maximum input sizes to try. - max_lengths: &[usize], - // Some hash functions allow inputs which are less than the generic parameters, others don't. - variable_length: bool, - // Make the source code specialized for a given expected input size. - source: impl Fn(usize) -> String, - // Rust implementation of the hash function. - hash: fn(&[u8]) -> [u8; N], -) { - for max_len in max_lengths { - let max_len = *max_len; - // The maximum length is used to pick the generic version of the method. - let source = source(max_len); - // Hash functions runs differently depending on whether the code is unconstrained or not. - for force_brillig in [false, true] { - let length_strategy = - if variable_length { (0..=max_len).boxed() } else { Just(max_len).boxed() }; - // The actual input length can be up to the maximum. - let strategy = length_strategy - .prop_flat_map(|len| prop::collection::vec(any::(), len)) - .prop_map(move |mut msg| { - // The output is the hash of the data as it is. - let output = hash(&msg); - - // The input has to be padded to the maximum length. - let msg_size = msg.len(); - msg.resize(max_len, 0u8); - - let mut inputs = vec![("input", bytes_input(&msg))]; - - // Omit the `message_size` if the hash function doesn't support it. - if variable_length { - inputs.push(( - "message_size", - InputValue::Field(FieldElement::from(msg_size)), - )); - } - - SnippetInputOutput::new(inputs, bytes_input(&output)).with_description(format!( - "force_brillig = {force_brillig}, max_len = {max_len}" - )) - }) - .boxed(); - - run_snippet_proptest(source.clone(), force_brillig, strategy); - } - } -} - /// This is just a simple test to check that property testing works. #[test] fn fuzz_basic() { @@ -187,72 +128,6 @@ fn fuzz_basic() { run_snippet_proptest(program.to_string(), false, strategy); } -#[test] -fn fuzz_keccak256_equivalence() { - run_hash_proptest( - // XXX: Currently it fails with inputs >= 135 bytes - &[0, 1, 100, 134], - true, - |max_len| { - format!( - "fn main(input: [u8; {max_len}], message_size: u32) -> pub [u8; 32] {{ - std::hash::keccak256(input, message_size) - }}" - ) - }, - |data| sha3::Keccak256::digest(data).into(), - ); -} - -#[test] -#[should_panic] // Remove once fixed -fn fuzz_keccak256_equivalence_over_135() { - run_hash_proptest( - &[135, 150], - true, - |max_len| { - format!( - "fn main(input: [u8; {max_len}], message_size: u32) -> pub [u8; 32] {{ - std::hash::keccak256(input, message_size) - }}" - ) - }, - |data| sha3::Keccak256::digest(data).into(), - ); -} - -#[test] -fn fuzz_sha256_equivalence() { - run_hash_proptest( - &[0, 1, 200, 511, 512], - true, - |max_len| { - format!( - "fn main(input: [u8; {max_len}], message_size: u64) -> pub [u8; 32] {{ - std::hash::sha256_var(input, message_size) - }}" - ) - }, - |data| sha2::Sha256::digest(data).into(), - ); -} - -#[test] -fn fuzz_sha512_equivalence() { - run_hash_proptest( - &[0, 1, 200], - false, - |max_len| { - format!( - "fn main(input: [u8; {max_len}]) -> pub [u8; 64] {{ - std::hash::sha512::digest(input) - }}" - ) - }, - |data| sha2::Sha512::digest(data).into(), - ); -} - #[test] fn fuzz_poseidon2_equivalence() { use bn254_blackbox_solver::poseidon_hash; @@ -290,17 +165,13 @@ fn fuzz_poseidon2_equivalence() { #[test] fn fuzz_poseidon_equivalence() { - use ark_ff_v04::{BigInteger, PrimeField}; use light_poseidon::{Poseidon, PoseidonHasher}; let poseidon_hash = |inputs: &[FieldElement]| { - let mut poseidon = Poseidon::::new_circom(inputs.len()).unwrap(); - let frs: Vec = inputs - .iter() - .map(|f| ark_bn254_v04::Fr::from_be_bytes_mod_order(&f.to_be_bytes())) - .collect::>(); - let hash = poseidon.hash(&frs).expect("failed to hash"); - FieldElement::from_be_bytes_reduce(&hash.into_bigint().to_bytes_be()) + let mut poseidon = Poseidon::::new_circom(inputs.len()).unwrap(); + let frs: Vec = inputs.iter().map(|f| f.into_repr()).collect::>(); + let hash: ark_bn254::Fr = poseidon.hash(&frs).expect("failed to hash"); + FieldElement::from_repr(hash) }; // Noir has hashes up to length 16, but the reference library won't work with more than 12. @@ -332,12 +203,6 @@ fn fuzz_poseidon_equivalence() { } } -fn bytes_input(bytes: &[u8]) -> InputValue { - InputValue::Vec( - bytes.iter().map(|b| InputValue::Field(FieldElement::from(*b as u32))).collect(), - ) -} - fn field_vec_strategy(len: usize) -> impl Strategy> { // Generate Field elements from random 32 byte vectors. let field = prop::collection::vec(any::(), 32) diff --git a/noir/noir-repo/tooling/nargo_fmt/src/formatter/expression.rs b/noir/noir-repo/tooling/nargo_fmt/src/formatter/expression.rs index 6b46a4557a2..eb20245e6b6 100644 --- a/noir/noir-repo/tooling/nargo_fmt/src/formatter/expression.rs +++ b/noir/noir-repo/tooling/nargo_fmt/src/formatter/expression.rs @@ -155,7 +155,7 @@ impl ChunkFormatter<'_, '_> { group.text(self.chunk(|formatter| { if is_slice { - formatter.write_token(Token::Ampersand); + formatter.write_token(Token::SliceStart); } formatter.write_left_bracket(); })); @@ -715,7 +715,7 @@ impl ChunkFormatter<'_, '_> { fn format_prefix(&mut self, prefix: PrefixExpression) -> ChunkGroup { let mut group = ChunkGroup::new(); group.text(self.chunk(|formatter| { - if let UnaryOp::MutableReference = prefix.operator { + if let UnaryOp::Reference { mutable: true } = prefix.operator { formatter.write_current_token(); formatter.bump(); formatter.skip_comments_and_whitespace(); diff --git a/noir/noir-repo/tooling/nargo_fmt/src/formatter/types.rs b/noir/noir-repo/tooling/nargo_fmt/src/formatter/types.rs index 6a0e66bc1f9..04e78c04162 100644 --- a/noir/noir-repo/tooling/nargo_fmt/src/formatter/types.rs +++ b/noir/noir-repo/tooling/nargo_fmt/src/formatter/types.rs @@ -75,10 +75,12 @@ impl Formatter<'_> { self.format_path(path); self.format_generic_type_args(generic_type_args); } - UnresolvedTypeData::MutableReference(typ) => { + UnresolvedTypeData::Reference(typ, mutable) => { self.write_token(Token::Ampersand); - self.write_keyword(Keyword::Mut); - self.write_space(); + if mutable { + self.write_keyword(Keyword::Mut); + self.write_space(); + } self.format_type(*typ); } UnresolvedTypeData::Tuple(types) => { diff --git a/noir/noir-repo/tooling/noirc_abi/src/printable_type.rs b/noir/noir-repo/tooling/noirc_abi/src/printable_type.rs index e13cab06e9f..75c977cd91a 100644 --- a/noir/noir-repo/tooling/noirc_abi/src/printable_type.rs +++ b/noir/noir-repo/tooling/noirc_abi/src/printable_type.rs @@ -69,7 +69,7 @@ pub fn decode_value( decode_value(field_iterator, env); func_ref } - PrintableType::MutableReference { typ } => { + PrintableType::Reference { typ, .. } => { // we decode the reference, but it's not really used for printing decode_value(field_iterator, typ) } diff --git a/noir/noir-repo/tooling/noirc_artifacts/src/contract.rs b/noir/noir-repo/tooling/noirc_artifacts/src/contract.rs index c2e44e54266..9f8f7019ff1 100644 --- a/noir/noir-repo/tooling/noirc_artifacts/src/contract.rs +++ b/noir/noir-repo/tooling/noirc_artifacts/src/contract.rs @@ -1,6 +1,6 @@ use acvm::{FieldElement, acir::circuit::Program}; use noirc_abi::{Abi, AbiType, AbiValue}; -use noirc_driver::{CompiledContract, CompiledContractOutputs, ContractFunction}; +use noirc_driver::{CompiledContract, CompiledContractOutputs, CompiledProgram, ContractFunction}; use serde::{Deserialize, Serialize}; use noirc_driver::DebugFile; @@ -49,6 +49,14 @@ impl From for ContractArtifact { } } +impl ContractArtifact { + pub fn function_as_compiled_program(&self, function_name: &str) -> Option { + self.functions.iter().find(|f| f.name == function_name).map(|f| { + f.clone().into_compiled_program(self.noir_version.clone(), self.file_map.clone()) + }) + } +} + /// Each function in the contract will be compiled as a separate noir program. /// /// A contract function unlike a regular Noir program however can have additional properties. @@ -85,6 +93,26 @@ pub struct ContractFunctionArtifact { pub brillig_names: Vec, } +impl ContractFunctionArtifact { + pub fn into_compiled_program( + self, + noir_version: String, + file_map: BTreeMap, + ) -> CompiledProgram { + CompiledProgram { + noir_version, + hash: self.hash, + program: self.bytecode, + abi: self.abi, + debug: self.debug_symbols.debug_infos, + file_map, + warnings: Vec::new(), + names: self.names, + brillig_names: self.brillig_names, + } + } +} + impl From for ContractFunctionArtifact { fn from(func: ContractFunction) -> Self { ContractFunctionArtifact { diff --git a/noir/noir-repo/tooling/profiler/src/cli/execution_flamegraph_cmd.rs b/noir/noir-repo/tooling/profiler/src/cli/execution_flamegraph_cmd.rs index c9d7eca50f5..8aaf1c0ad85 100644 --- a/noir/noir-repo/tooling/profiler/src/cli/execution_flamegraph_cmd.rs +++ b/noir/noir-repo/tooling/profiler/src/cli/execution_flamegraph_cmd.rs @@ -30,13 +30,22 @@ pub(crate) struct ExecutionFlamegraphCommand { /// The output folder for the flamegraph svg files #[clap(long, short)] - output: PathBuf, + output: Option, /// Use pedantic ACVM solving, i.e. double-check some black-box function /// assumptions when solving. /// This is disabled by default. #[clap(long, default_value = "false")] pedantic_solving: bool, + + /// A single number representing the total opcodes executed. + /// Outputs to stdout and skips generating a flamegraph. + #[clap(long, default_value = "false")] + sample_count: bool, + + /// Enables additional logging + #[clap(long, default_value = "false")] + verbose: bool, } pub(crate) fn run(args: ExecutionFlamegraphCommand) -> eyre::Result<()> { @@ -46,6 +55,8 @@ pub(crate) fn run(args: ExecutionFlamegraphCommand) -> eyre::Result<()> { &InfernoFlamegraphGenerator { count_name: "samples".to_string() }, &args.output, args.pedantic_solving, + args.sample_count, + args.verbose, ) } @@ -53,20 +64,29 @@ fn run_with_generator( artifact_path: &Path, prover_toml_path: &Path, flamegraph_generator: &impl FlamegraphGenerator, - output_path: &Path, + output_path: &Option, pedantic_solving: bool, + print_sample_count: bool, + verbose: bool, ) -> eyre::Result<()> { let program = read_program_from_file(artifact_path).context("Error reading program from file")?; ensure_brillig_entry_point(&program)?; + if !print_sample_count && output_path.is_none() { + return report_error("Missing --output argument for when building a flamegraph") + .map_err(Into::into); + } + let (inputs_map, _) = read_inputs_from_file(&prover_toml_path.with_extension("toml"), &program.abi)?; let initial_witness = program.abi.encode(&inputs_map, None)?; - println!("Executing..."); + if verbose { + println!("Executing..."); + } let solved_witness_stack_err = nargo::ops::execute_program_with_profiling( &program.bytecode, @@ -94,9 +114,21 @@ fn run_with_generator( } }; - println!("Executed"); + if verbose { + println!("Executed"); + } + + if print_sample_count { + println!("{}", profiling_samples.len()); + return Ok(()); + } - println!("Collecting {} samples", profiling_samples.len()); + // We place this logging output before the transforming and collection of the samples. + // This is done because large traces can take some time, and can make it look + // as if the profiler has stalled. + if verbose { + println!("Generating flamegraph for {} samples...", profiling_samples.len()); + } let profiling_samples: Vec = profiling_samples .iter_mut() @@ -118,24 +150,28 @@ fn run_with_generator( }) .collect(); - let debug_artifact: DebugArtifact = program.into(); - - println!("Generating flamegraph with {} samples", profiling_samples.len()); + let output_path = + output_path.as_ref().expect("Should have already checked for the output path"); + let debug_artifact: DebugArtifact = program.into(); flamegraph_generator.generate_flamegraph( profiling_samples, &debug_artifact.debug_symbols[0], &debug_artifact, artifact_path.to_str().unwrap(), "main", - &Path::new(&output_path).join(Path::new(&format!("{}_brillig_trace.svg", "main"))), + &Path::new(output_path).join(Path::new(&format!("{}_brillig_trace.svg", "main"))), )?; + if verbose { + println!("Generated flamegraph"); + } + Ok(()) } fn ensure_brillig_entry_point(artifact: &ProgramArtifact) -> Result<(), CliError> { - let err_msg = "Command only supports fully unconstrained Noir programs e.g. `unconstrained fn main() { .. }".to_owned(); + let err_msg = "Command only supports fully unconstrained Noir programs e.g. `unconstrained fn main() { .. }"; let program = &artifact.bytecode; if program.functions.len() != 1 || program.unconstrained_functions.len() != 1 { return report_error(err_msg); @@ -152,3 +188,86 @@ fn ensure_brillig_entry_point(artifact: &ProgramArtifact) -> Result<(), CliError Ok(()) } + +#[cfg(test)] +mod tests { + use acir::circuit::{Circuit, Program, brillig::BrilligBytecode}; + use color_eyre::eyre; + use fm::codespan_files::Files; + use noirc_artifacts::program::ProgramArtifact; + use noirc_driver::CrateName; + use noirc_errors::debug_info::{DebugInfo, ProgramDebugInfo}; + use std::{collections::BTreeMap, path::Path, str::FromStr}; + + use crate::flamegraph::Sample; + + #[derive(Default)] + struct TestFlamegraphGenerator {} + + impl super::FlamegraphGenerator for TestFlamegraphGenerator { + fn generate_flamegraph<'files, S: Sample>( + &self, + _samples: Vec, + _debug_symbols: &DebugInfo, + _files: &'files impl Files<'files, FileId = fm::FileId>, + _artifact_name: &str, + _function_name: &str, + output_path: &Path, + ) -> eyre::Result<()> { + let output_file = std::fs::File::create(output_path).unwrap(); + std::io::Write::write_all(&mut std::io::BufWriter::new(output_file), b"success") + .unwrap(); + + Ok(()) + } + } + + #[test] + fn error_reporter_smoke_test() { + // This test purposefully uses an artifact that does not represent a Brillig entry point. + // The goal is to see that our program fails gracefully and does not panic. + let temp_dir = tempfile::tempdir().unwrap(); + + let prover_toml_path = temp_dir.path().join("Prover.toml"); + + let artifact = ProgramArtifact { + noir_version: "0.0.0".to_string(), + hash: 27, + abi: noirc_abi::Abi::default(), + bytecode: Program { + functions: vec![Circuit::default()], + unconstrained_functions: vec![ + BrilligBytecode::default(), + BrilligBytecode::default(), + ], + }, + debug_symbols: ProgramDebugInfo { debug_infos: vec![DebugInfo::default()] }, + file_map: BTreeMap::default(), + names: vec!["main".to_string()], + brillig_names: Vec::new(), + }; + + // Write the artifact to a file + let artifact_path = noir_artifact_cli::fs::artifact::save_program_to_file( + &artifact, + &CrateName::from_str("test").unwrap(), + temp_dir.path(), + ) + .unwrap(); + + let flamegraph_generator = TestFlamegraphGenerator::default(); + + assert!( + super::run_with_generator( + &artifact_path, + &prover_toml_path, + &flamegraph_generator, + &Some(temp_dir.into_path()), + false, + false, + false + ) + .is_err() + ); + } +} diff --git a/noir/noir-repo/tooling/profiler/src/cli/gates_flamegraph_cmd.rs b/noir/noir-repo/tooling/profiler/src/cli/gates_flamegraph_cmd.rs index a82e5ea3e2a..736cddf7cf4 100644 --- a/noir/noir-repo/tooling/profiler/src/cli/gates_flamegraph_cmd.rs +++ b/noir/noir-repo/tooling/profiler/src/cli/gates_flamegraph_cmd.rs @@ -125,7 +125,7 @@ fn run_with_provider( #[cfg(test)] mod tests { use acir::circuit::{Circuit, Program}; - use color_eyre::eyre::{self}; + use color_eyre::eyre; use fm::codespan_files::Files; use noirc_artifacts::program::ProgramArtifact; use noirc_errors::debug_info::{DebugInfo, ProgramDebugInfo}; diff --git a/noir/noir-repo/tooling/profiler/src/cli/mod.rs b/noir/noir-repo/tooling/profiler/src/cli/mod.rs index b91dd6990aa..0b4e0a92b27 100644 --- a/noir/noir-repo/tooling/profiler/src/cli/mod.rs +++ b/noir/noir-repo/tooling/profiler/src/cli/mod.rs @@ -1,5 +1,5 @@ use clap::{Parser, Subcommand}; -use color_eyre::eyre; +use color_eyre::eyre::{self}; use const_format::formatcp; mod execution_flamegraph_cmd; diff --git a/noir/noir-repo/tooling/profiler/src/cli/opcodes_flamegraph_cmd.rs b/noir/noir-repo/tooling/profiler/src/cli/opcodes_flamegraph_cmd.rs index 649331891a2..d6d00b07503 100644 --- a/noir/noir-repo/tooling/profiler/src/cli/opcodes_flamegraph_cmd.rs +++ b/noir/noir-repo/tooling/profiler/src/cli/opcodes_flamegraph_cmd.rs @@ -1,5 +1,6 @@ use std::path::{Path, PathBuf}; +use acir::AcirField; use acir::circuit::brillig::BrilligFunctionId; use acir::circuit::{Circuit, Opcode, OpcodeLocation}; use clap::Args; @@ -134,7 +135,7 @@ fn run_with_generator( Ok(()) } -fn locate_brillig_call( +fn locate_brillig_call( brillig_fn_index: usize, acir_functions: &[Circuit], ) -> Option<(usize, usize)> { @@ -160,7 +161,7 @@ mod tests { brillig::{BrilligBytecode, BrilligFunctionId}, }, }; - use color_eyre::eyre::{self}; + use color_eyre::eyre; use fm::codespan_files::Files; use noirc_artifacts::program::ProgramArtifact; use noirc_errors::debug_info::{DebugInfo, ProgramDebugInfo}; diff --git a/noir/noir-repo/tooling/profiler/src/errors.rs b/noir/noir-repo/tooling/profiler/src/errors.rs index 6a028931f5e..951199436aa 100644 --- a/noir/noir-repo/tooling/profiler/src/errors.rs +++ b/noir/noir-repo/tooling/profiler/src/errors.rs @@ -1,5 +1,5 @@ -use fm::FileMap; -use noirc_errors::{CustomDiagnostic, Location}; +use fm::{FileId, FileMap}; +use noirc_errors::CustomDiagnostic; use thiserror::Error; #[derive(Debug, Error)] @@ -9,8 +9,8 @@ pub(crate) enum CliError { } /// Report an error from the CLI that is not reliant on a stack trace. -pub(crate) fn report_error(message: String) -> Result<(), CliError> { - let error = CustomDiagnostic::simple_error(message.clone(), String::new(), Location::dummy()); +pub(crate) fn report_error(message: &str) -> Result<(), CliError> { + let error = CustomDiagnostic::from_message(message, FileId::dummy()); noirc_errors::reporter::report(&FileMap::default(), &error, false); Err(CliError::Generic) } diff --git a/noir/noir-repo/tooling/profiler/src/flamegraph.rs b/noir/noir-repo/tooling/profiler/src/flamegraph.rs index b56e8a43312..16857eb2454 100644 --- a/noir/noir-repo/tooling/profiler/src/flamegraph.rs +++ b/noir/noir-repo/tooling/profiler/src/flamegraph.rs @@ -3,7 +3,7 @@ use std::{collections::BTreeMap, io::BufWriter}; use acir::circuit::OpcodeLocation; use acir::circuit::brillig::BrilligFunctionId; -use color_eyre::eyre::{self}; +use color_eyre::eyre; use fm::codespan_files::Files; use fxhash::FxHashMap as HashMap; use inferno::flamegraph::{Options, TextTruncateDirection, from_lines}; diff --git a/noir/noir-repo/tooling/profiler/src/gates_provider.rs b/noir/noir-repo/tooling/profiler/src/gates_provider.rs index 3f07f3e4be6..044e2a3642c 100644 --- a/noir/noir-repo/tooling/profiler/src/gates_provider.rs +++ b/noir/noir-repo/tooling/profiler/src/gates_provider.rs @@ -1,7 +1,7 @@ use std::path::{Path, PathBuf}; use std::process::Command; -use color_eyre::eyre::{self}; +use color_eyre::eyre; use serde::{Deserialize, Serialize}; pub(crate) trait GatesProvider { diff --git a/noir/noir-repo/tooling/profiler/src/main.rs b/noir/noir-repo/tooling/profiler/src/main.rs index e4a5bc153d2..1ac17617825 100644 --- a/noir/noir-repo/tooling/profiler/src/main.rs +++ b/noir/noir-repo/tooling/profiler/src/main.rs @@ -22,18 +22,19 @@ fn main() { .with_span_events(FmtSpan::ACTIVE) .with_writer(debug_file) .with_ansi(false) - .with_env_filter(EnvFilter::from_default_env()) + .with_env_filter(EnvFilter::from_env("NOIR_LOG")) .init(); } else { tracing_subscriber::fmt() .with_span_events(FmtSpan::ACTIVE) + .with_writer(std::io::stderr) .with_ansi(true) .with_env_filter(EnvFilter::from_env("NOIR_LOG")) .init(); } if let Err(report) = cli::start_cli() { - eprintln!("{report}"); + eprintln!("{report:#}"); std::process::exit(1); } } diff --git a/noir/noir-repo/tooling/profiler/src/opcode_formatter.rs b/noir/noir-repo/tooling/profiler/src/opcode_formatter.rs index 2276bcb4403..591c825e452 100644 --- a/noir/noir-repo/tooling/profiler/src/opcode_formatter.rs +++ b/noir/noir-repo/tooling/profiler/src/opcode_formatter.rs @@ -2,7 +2,7 @@ use acir::AcirField; use acir::brillig::{BinaryFieldOp, BinaryIntOp, BlackBoxOp, Opcode as BrilligOpcode}; use acir::circuit::{Opcode as AcirOpcode, opcodes::BlackBoxFuncCall}; -fn format_blackbox_function(call: &BlackBoxFuncCall) -> String { +fn format_blackbox_function(call: &BlackBoxFuncCall) -> String { match call { BlackBoxFuncCall::AES128Encrypt { .. } => "aes128_encrypt".to_string(), BlackBoxFuncCall::AND { .. } => "and".to_string(), @@ -49,7 +49,7 @@ fn format_blackbox_op(call: &BlackBoxOp) -> String { } } -fn format_acir_opcode_kind(opcode: &AcirOpcode) -> String { +fn format_acir_opcode_kind(opcode: &AcirOpcode) -> String { match opcode { AcirOpcode::AssertZero(_) => "arithmetic".to_string(), AcirOpcode::BlackBoxFuncCall(call) => { diff --git a/noir/noir-repo/utils/protobuf/Cargo.toml b/noir/noir-repo/utils/protobuf/Cargo.toml new file mode 100644 index 00000000000..fdd8fb0ba2e --- /dev/null +++ b/noir/noir-repo/utils/protobuf/Cargo.toml @@ -0,0 +1,13 @@ +[package] +name = "noir_protobuf" +version.workspace = true +authors.workspace = true +edition.workspace = true +rust-version.workspace = true +license.workspace = true + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[dependencies] +color-eyre.workspace = true +prost.workspace = true diff --git a/noir/noir-repo/utils/protobuf/src/lib.rs b/noir/noir-repo/utils/protobuf/src/lib.rs new file mode 100644 index 00000000000..c3d72086c83 --- /dev/null +++ b/noir/noir-repo/utils/protobuf/src/lib.rs @@ -0,0 +1,162 @@ +use color_eyre::eyre::{self, Context, bail, eyre}; + +/// A protobuf codec to convert between a domain type `T` +/// and its protobuf representation `R`. +/// +/// It is to be implemented on a `Self` independent of `T` and `R`, +/// so that `T` can be in a third party crate, and `Self` can be +/// generic in the `F` _field_ type as well, which would be cumbersome +/// if we had to implement traits on `R` because `T` is in another +/// crate from the schema, or to scatter the `.proto` schema around +/// so that the traits can be co-defined with `T` which is what can +/// actually be generic in `F`. +pub trait ProtoCodec { + /// Convert domain type `T` to protobuf representation `R`. + fn encode(value: &T) -> R; + /// Encode a field as `Some`. + fn encode_some(value: &T) -> Option { + Some(Self::encode(value)) + } + /// Encode an `enum` to the `i32` value that `prost` represents it with. + fn encode_enum(value: &T) -> i32 + where + R: Into, + { + Self::encode(value).into() + } + /// Encode multiple values as a vector. + fn encode_vec<'a, I>(values: I) -> Vec + where + I: IntoIterator, + T: 'a, + { + values.into_iter().map(Self::encode).collect() + } + + /// Try to convert protobuf representation `R` to domain type `T`. + fn decode(value: &R) -> eyre::Result; + /// Decode a field and attach the name of the field if it fails. + fn decode_wrap(value: &R, msg: &'static str) -> eyre::Result { + Self::decode(value).wrap_err(msg) + } + /// Decode multiple values into a vector. + fn decode_vec(values: &[R]) -> eyre::Result> { + values.iter().map(Self::decode).collect() + } + /// Decode multiple values into a vector, attaching a field name to any errors. + fn decode_vec_wrap(values: &[R], msg: &'static str) -> eyre::Result> { + Self::decode_vec(values).wrap_err(msg) + } + /// Decode a fixed size array. + fn decode_arr(values: &[R]) -> eyre::Result<[T; N]> { + match Self::decode_vec(values)?.try_into() { + Ok(arr) => Ok(arr), + Err(vec) => { + bail!("expected {N} items, got {}", vec.len()); + } + } + } + /// Decode a fixed size array, attaching a field name to any errors + fn decode_arr_wrap(values: &[R], msg: &'static str) -> eyre::Result<[T; N]> { + Self::decode_arr(values).wrap_err(msg) + } + /// Decode a boxed fixed size array. + fn decode_box_arr(values: &[R]) -> eyre::Result> { + Self::decode_arr(values).map(Box::new) + } + /// Decode a boxed fixed size array, attaching a field name to any errors + fn decode_box_arr_wrap( + values: &[R], + msg: &'static str, + ) -> eyre::Result> { + Self::decode_box_arr(values).wrap_err(msg) + } + /// Decode an optional field as a required one; fails if it's `None`. + fn decode_some(value: &Option) -> eyre::Result { + match value { + Some(value) => Self::decode(value), + None => Err(eyre!("missing field")), + } + } + /// Decode an optional field as a required one, attaching a field name to any errors. + /// Returns error if the field is missing. + fn decode_some_wrap(value: &Option, msg: &'static str) -> eyre::Result { + Self::decode_some(value).wrap_err(msg) + } + /// Decode an optional field, attaching a field name to any errors. + /// Return `None` if the field is missing. + fn decode_opt_wrap(value: &Option, msg: &'static str) -> eyre::Result> { + value.as_ref().map(|value| Self::decode_wrap(value, msg)).transpose() + } + /// Decode the numeric representation of an enum into the domain type. + /// Return an error if the value cannot be recognized. + fn decode_enum(value: i32) -> eyre::Result + where + R: TryFrom, + { + let r = R::try_from(value)?; + Self::decode(&r) + } + /// Decode the numeric representation of an enum, attaching the field name to any errors. + fn decode_enum_wrap(value: i32, msg: &'static str) -> eyre::Result + where + R: TryFrom, + { + Self::decode_enum(value).wrap_err(msg) + } + + /// Encode a domain type to protobuf and serialize it to bytes. + fn serialize_to_vec(value: &T) -> Vec + where + R: prost::Message, + { + Self::encode(value).encode_to_vec() + } + /// Deserialize a buffer into protobuf and then decode into the domain type. + fn deserialize_from_vec(buf: &[u8]) -> eyre::Result + where + R: prost::Message + Default, + { + let repr = R::decode(buf).wrap_err("failed to decode into protobuf")?; + Self::decode(&repr).wrap_err("failed to decode protobuf into domain") + } +} + +/// Decode repeated items by mapping a function over them, attaching an error message if it fails. +/// Useful when a lambda needs to be applied before we can use one of the type class methods. +pub fn decode_vec_map_wrap(rs: &[R], msg: &'static str, f: F) -> eyre::Result> +where + F: Fn(&R) -> eyre::Result, +{ + rs.iter().map(f).collect::>>().wrap_err(msg) +} + +/// Decode an optional item, returning an error if it's `None`. +/// Useful when a lambda needs to be applied before we can use one of the type class methods. +pub fn decode_some_map(r: &Option, f: F) -> eyre::Result +where + F: Fn(&R) -> eyre::Result, +{ + match r { + Some(r) => f(r), + None => Err(eyre!("missing field")), + } +} + +/// Decode an optional item, attaching a field name to any errors. +/// Useful when a lambda needs to be applied before we can use one of the type class methods. +pub fn decode_some_map_wrap(r: &Option, msg: &'static str, f: F) -> eyre::Result +where + F: Fn(&R) -> eyre::Result, +{ + decode_some_map(r, f).wrap_err(msg) +} + +/// Decode a `oneof` field, returning an error if it's missing. +/// Useful when a lambda needs to be applied before we can use one of the type class methods. +pub fn decode_oneof_map(r: &Option, f: F) -> eyre::Result +where + F: Fn(&R) -> eyre::Result, +{ + decode_some_map_wrap(r, "oneof value", f) +} diff --git a/playground/bootstrap.sh b/playground/bootstrap.sh index 0c97ed63ed3..69d8efdd266 100755 --- a/playground/bootstrap.sh +++ b/playground/bootstrap.sh @@ -34,9 +34,9 @@ function release { echo_header "playground release" if [ $(dist_tag) != "latest" ]; then # TODO attach to github release - do_or_dryrun yarn netlify deploy --site aztec-docs-dev --dir=dist + do_or_dryrun yarn netlify deploy --site aztec-playground --dir=dist else - do_or_dryrun yarn netlify deploy --site aztec-docs-dev --prod --dir=dist + do_or_dryrun yarn netlify deploy --site aztec-playground --prod --dir=dist fi } diff --git a/playground/package.json b/playground/package.json index e0167538089..3c04319c370 100644 --- a/playground/package.json +++ b/playground/package.json @@ -18,13 +18,9 @@ "dependencies": { "@aztec/accounts": "link:../yarn-project/accounts", "@aztec/aztec.js": "link:../yarn-project/aztec.js", - "@aztec/bb-prover": "link:../yarn-project/bb-prover", "@aztec/foundation": "link:../yarn-project/foundation", - "@aztec/key-store": "link:../yarn-project/key-store", "@aztec/kv-store": "link:../yarn-project/kv-store", - "@aztec/protocol-contracts": "link:../yarn-project/protocol-contracts", "@aztec/pxe": "link:../yarn-project/pxe", - "@aztec/simulator": "link:../yarn-project/simulator", "@aztec/stdlib": "link:../yarn-project/stdlib", "@emotion/react": "^11.14.0", "@emotion/styled": "^11.14.0", diff --git a/playground/src/aztecEnv.ts b/playground/src/aztecEnv.ts index ee113de98d8..5aceaff7e5a 100644 --- a/playground/src/aztecEnv.ts +++ b/playground/src/aztecEnv.ts @@ -4,19 +4,12 @@ import { AztecAddress } from '@aztec/aztec.js/addresses'; import { AccountWalletWithSecretKey } from '@aztec/aztec.js/wallet'; import { Contract } from '@aztec/aztec.js/contracts'; import { type PXE } from '@aztec/aztec.js/interfaces/pxe'; -import { PXEService } from '@aztec/pxe/service'; -import { type PXEServiceConfig, getPXEServiceConfig } from '@aztec/pxe/config'; -import { KVPxeDatabase } from '@aztec/pxe/database'; -import { KeyStore } from '@aztec/key-store'; -import { L2TipsStore } from '@aztec/kv-store/stores'; +import { createPXEService, type PXEServiceConfig, getPXEServiceConfig } from '@aztec/pxe/client/lazy'; import { createStore } from '@aztec/kv-store/indexeddb'; -import { BBWASMLazyPrivateKernelProver } from '@aztec/bb-prover/wasm/lazy'; -import { WASMSimulator } from '@aztec/simulator/client'; import { createContext } from 'react'; import { NetworkDB, WalletDB } from './utils/storage'; import { type ContractFunctionInteractionTx } from './utils/txs'; import { type Logger, createLogger } from '@aztec/aztec.js/log'; -import { LazyProtocolContractsProvider } from '@aztec/protocol-contracts/providers/lazy'; const logLevel = ['silent', 'fatal', 'error', 'warn', 'info', 'verbose', 'debug', 'trace'] as const; @@ -151,44 +144,19 @@ export class AztecEnv { const config = getPXEServiceConfig(); config.dataDirectory = 'pxe'; config.proverEnabled = true; - - const simulationProvider = new WASMSimulator(); - const proofCreator = new BBWASMLazyPrivateKernelProver( - simulationProvider, - 16, - WebLogger.getInstance().createLogger('bb:wasm:lazy'), - ); const l1Contracts = await aztecNode.getL1ContractAddresses(); const configWithContracts = { ...config, l1Contracts, } as PXEServiceConfig; - const store = await createStore( - 'pxe_data', - configWithContracts, - WebLogger.getInstance().createLogger('pxe:data:indexeddb'), - ); - - const keyStore = new KeyStore(store); - - const db = await KVPxeDatabase.create(store); - const tips = new L2TipsStore(store, 'pxe'); - - const protocolContractsProvider = new LazyProtocolContractsProvider(); - - const pxe = new PXEService( - keyStore, - aztecNode, - db, - tips, - proofCreator, - simulationProvider, - protocolContractsProvider, - config, - WebLogger.getInstance().createLogger('pxe:service'), - ); - await pxe.init(); + const pxe = await createPXEService(aztecNode, configWithContracts, { + loggers: { + store: WebLogger.getInstance().createLogger('pxe:data:indexeddb'), + pxe: WebLogger.getInstance().createLogger('pxe:service'), + prover: WebLogger.getInstance().createLogger('bb:wasm:lazy'), + } + }); return pxe; } } diff --git a/playground/yarn.lock b/playground/yarn.lock index a57e177b6bb..af2050e1559 100644 --- a/playground/yarn.lock +++ b/playground/yarn.lock @@ -17,24 +17,12 @@ __metadata: languageName: node linkType: soft -"@aztec/bb-prover@link:../yarn-project/bb-prover::locator=%40aztec%2Fplayground%40workspace%3A.": - version: 0.0.0-use.local - resolution: "@aztec/bb-prover@link:../yarn-project/bb-prover::locator=%40aztec%2Fplayground%40workspace%3A." - languageName: node - linkType: soft - "@aztec/foundation@link:../yarn-project/foundation::locator=%40aztec%2Fplayground%40workspace%3A.": version: 0.0.0-use.local resolution: "@aztec/foundation@link:../yarn-project/foundation::locator=%40aztec%2Fplayground%40workspace%3A." languageName: node linkType: soft -"@aztec/key-store@link:../yarn-project/key-store::locator=%40aztec%2Fplayground%40workspace%3A.": - version: 0.0.0-use.local - resolution: "@aztec/key-store@link:../yarn-project/key-store::locator=%40aztec%2Fplayground%40workspace%3A." - languageName: node - linkType: soft - "@aztec/kv-store@link:../yarn-project/kv-store::locator=%40aztec%2Fplayground%40workspace%3A.": version: 0.0.0-use.local resolution: "@aztec/kv-store@link:../yarn-project/kv-store::locator=%40aztec%2Fplayground%40workspace%3A." @@ -47,13 +35,9 @@ __metadata: dependencies: "@aztec/accounts": "link:../yarn-project/accounts" "@aztec/aztec.js": "link:../yarn-project/aztec.js" - "@aztec/bb-prover": "link:../yarn-project/bb-prover" "@aztec/foundation": "link:../yarn-project/foundation" - "@aztec/key-store": "link:../yarn-project/key-store" "@aztec/kv-store": "link:../yarn-project/kv-store" - "@aztec/protocol-contracts": "link:../yarn-project/protocol-contracts" "@aztec/pxe": "link:../yarn-project/pxe" - "@aztec/simulator": "link:../yarn-project/simulator" "@aztec/stdlib": "link:../yarn-project/stdlib" "@emotion/react": "npm:^11.14.0" "@emotion/styled": "npm:^11.14.0" @@ -88,24 +72,12 @@ __metadata: languageName: unknown linkType: soft -"@aztec/protocol-contracts@link:../yarn-project/protocol-contracts::locator=%40aztec%2Fplayground%40workspace%3A.": - version: 0.0.0-use.local - resolution: "@aztec/protocol-contracts@link:../yarn-project/protocol-contracts::locator=%40aztec%2Fplayground%40workspace%3A." - languageName: node - linkType: soft - "@aztec/pxe@link:../yarn-project/pxe::locator=%40aztec%2Fplayground%40workspace%3A.": version: 0.0.0-use.local resolution: "@aztec/pxe@link:../yarn-project/pxe::locator=%40aztec%2Fplayground%40workspace%3A." languageName: node linkType: soft -"@aztec/simulator@link:../yarn-project/simulator::locator=%40aztec%2Fplayground%40workspace%3A.": - version: 0.0.0-use.local - resolution: "@aztec/simulator@link:../yarn-project/simulator::locator=%40aztec%2Fplayground%40workspace%3A." - languageName: node - linkType: soft - "@aztec/stdlib@link:../yarn-project/stdlib::locator=%40aztec%2Fplayground%40workspace%3A.": version: 0.0.0-use.local resolution: "@aztec/stdlib@link:../yarn-project/stdlib::locator=%40aztec%2Fplayground%40workspace%3A." diff --git a/release-image/bootstrap.sh b/release-image/bootstrap.sh index c57338bbae9..3f1311ab628 100755 --- a/release-image/bootstrap.sh +++ b/release-image/bootstrap.sh @@ -31,17 +31,17 @@ case "$cmd" in sleep 10 done - # We release with our ref name, e.g. v1.0.0 + # We release with our tag, e.g. 1.0.0 docker manifest create aztecprotocol/aztec:$tag \ --amend aztecprotocol/aztec:$tag-amd64 \ --amend aztecprotocol/aztec:$tag-arm64 - docker manifest push $tag + docker manifest push aztecprotocol/aztec:$tag # We also release with our dist_tag, e.g. 'latest' or 'nightly'. docker manifest create aztecprotocol/aztec:$(dist_tag) \ --amend aztecprotocol/aztec:$tag-amd64 \ --amend aztecprotocol/aztec:$tag-arm64 - docker manifest push $(dist_tag) + docker manifest push aztecprotocol/aztec:$(dist_tag) fi ;; "release_commit") diff --git a/release-please-config.json b/release-please-config.json index d45f90f72f6..6d05c8ab5dd 100644 --- a/release-please-config.json +++ b/release-please-config.json @@ -14,5 +14,10 @@ { "type": "test", "section": "Miscellaneous", "hidden": false }, { "type": "refactor", "section": "Miscellaneous", "hidden": false }, { "type": "docs", "section": "Documentation", "hidden": false } - ] + ], + "packages": { + ".": { + "release-type": "simple" + } + } } diff --git a/spartan/aztec-network/files/config/deploy-l1-contracts.sh b/spartan/aztec-network/files/config/deploy-l1-contracts.sh index 3ff6801d009..bf99ebff494 100755 --- a/spartan/aztec-network/files/config/deploy-l1-contracts.sh +++ b/spartan/aztec-network/files/config/deploy-l1-contracts.sh @@ -16,6 +16,17 @@ SALT=${1:-$RANDOM} CHAIN_ID=$2 VALIDATOR_ADDRESSES=$3 +# If the chain ID is 11155111 or 1, we are deploying to a public network, make sure that we do not use accelerated test deployments +PUBLIC_CHAIN_ID=false +if [ "$CHAIN_ID" = "11155111" -o "$CHAIN_ID" = "1" ]; then + PUBLIC_CHAIN_ID=true +fi + +# Overwrite the value of ACCELERATED_TEST_DEPLOYMENTS env variable if we are deploying to a public network +if [ "$PUBLIC_CHAIN_ID" = "true" ]; then + ACCELERATED_TEST_DEPLOYMENTS=false +fi + # Run the deploy-l1-contracts command and capture the output output="" MAX_RETRIES=5 @@ -26,9 +37,14 @@ if [ "$TEST_ACCOUNTS" = "true" ]; then TEST_ACCOUNTS_ARG="--test-accounts" fi +ACCELERATED_TEST_DEPLOYMENTS_ARG="" +if [ "$ACCELERATED_TEST_DEPLOYMENTS" = "true" ]; then + ACCELERATED_TEST_DEPLOYMENTS_ARG="--accelerated-test-deployments" +fi + for attempt in $(seq 1 $MAX_RETRIES); do # Construct base command - base_cmd="LOG_LEVEL=debug node --no-warnings /usr/src/yarn-project/aztec/dest/bin/index.js deploy-l1-contracts $TEST_ACCOUNTS_ARG" + base_cmd="LOG_LEVEL=debug node --no-warnings /usr/src/yarn-project/aztec/dest/bin/index.js deploy-l1-contracts $TEST_ACCOUNTS_ARG $ACCELERATED_TEST_DEPLOYMENTS_ARG" # Add account - use private key if set, otherwise use mnemonic if [ -n "${L1_DEPLOYMENT_PRIVATE_KEY:-}" ]; then @@ -39,11 +55,11 @@ for attempt in $(seq 1 $MAX_RETRIES); do # Add validators if INIT_VALIDATORS is true if [ "${INIT_VALIDATORS:-false}" = "true" ]; then - output=$(eval $base_cmd --validators $VALIDATOR_ADDRESSES --l1-chain-id $CHAIN_ID --salt $SALT) && break - else - output=$(eval $base_cmd --l1-chain-id $CHAIN_ID --salt $SALT) && break + base_cmd="$base_cmd --validators $VALIDATOR_ADDRESSES" fi + output=$(eval $base_cmd --l1-chain-id $CHAIN_ID --salt $SALT) && break + echo "Attempt $attempt failed. Retrying in $RETRY_DELAY seconds..." sleep "$RETRY_DELAY" done || { diff --git a/spartan/aztec-network/files/config/setup-p2p-addresses.sh b/spartan/aztec-network/files/config/setup-p2p-addresses.sh index f4b2afce6f2..9a50a091304 100644 --- a/spartan/aztec-network/files/config/setup-p2p-addresses.sh +++ b/spartan/aztec-network/files/config/setup-p2p-addresses.sh @@ -30,10 +30,10 @@ else fi # Write addresses to file for sourcing -echo "export P2P_TCP_ANNOUNCE_ADDR=${TCP_ADDR}" > /shared/p2p/p2p-addresses -echo "export P2P_TCP_LISTEN_ADDR=0.0.0.0:${P2P_TCP_PORT}" >> /shared/p2p/p2p-addresses -echo "export P2P_UDP_ANNOUNCE_ADDR=${UDP_ADDR}" >> /shared/p2p/p2p-addresses -echo "export P2P_UDP_LISTEN_ADDR=0.0.0.0:${P2P_UDP_PORT}" >> /shared/p2p/p2p-addresses +echo "export P2P_TCP_ANNOUNCE_ADDR=${TCP_ADDR}" > /shared/config/p2p-addresses +echo "export P2P_TCP_LISTEN_ADDR=0.0.0.0:${P2P_TCP_PORT}" >> /shared/config/p2p-addresses +echo "export P2P_UDP_ANNOUNCE_ADDR=${UDP_ADDR}" >> /shared/config/p2p-addresses +echo "export P2P_UDP_LISTEN_ADDR=0.0.0.0:${P2P_UDP_PORT}" >> /shared/config/p2p-addresses echo "P2P addresses configured:" -cat /shared/p2p/p2p-addresses \ No newline at end of file +cat /shared/config/p2p-addresses diff --git a/spartan/aztec-network/templates/_helpers.tpl b/spartan/aztec-network/templates/_helpers.tpl index 70d9da62638..3a0fb55da3c 100644 --- a/spartan/aztec-network/templates/_helpers.tpl +++ b/spartan/aztec-network/templates/_helpers.tpl @@ -65,7 +65,7 @@ http://{{ include "aztec-network.fullname" . }}-pxe.{{ .Release.Namespace }}:{{ {{- end -}} {{- define "aztec-network.bootNodeUrl" -}} -http://{{ include "aztec-network.fullname" . }}-boot-node-0.{{ include "aztec-network.fullname" . }}-boot-node.{{ .Release.Namespace }}.svc.cluster.local:{{ .Values.bootNode.service.nodePort }} +http://{{ include "aztec-network.fullname" . }}-boot-node.{{ .Release.Namespace }}.svc.cluster.local:{{ .Values.bootNode.service.nodePort }} {{- end -}} {{- define "aztec-network.validatorUrl" -}} @@ -94,34 +94,6 @@ http://{{ include "aztec-network.fullname" . }}-metrics.{{ .Release.Namespace }} {{- end -}} {{- end -}} -{{/* -P2P Setup Container -*/}} -{{- define "aztec-network.p2pSetupContainer" -}} -- name: setup-p2p-addresses - image: bitnami/kubectl - command: - - /bin/sh - - -c - - | - cp /scripts/setup-p2p-addresses.sh /tmp/setup-p2p-addresses.sh && \ - chmod +x /tmp/setup-p2p-addresses.sh && \ - /tmp/setup-p2p-addresses.sh - env: - - name: NETWORK_PUBLIC - value: "{{ .Values.network.public }}" - - name: NAMESPACE - value: {{ .Release.Namespace }} - - name: P2P_TCP_PORT - value: "{{ .Values.validator.service.p2pTcpPort }}" - - name: P2P_UDP_PORT - value: "{{ .Values.validator.service.p2pUdpPort }}" - volumeMounts: - - name: scripts - mountPath: /scripts - - name: p2p-addresses - mountPath: /shared/p2p -{{- end -}} {{/* Service Address Setup Container @@ -133,9 +105,7 @@ Service Address Setup Container - /bin/bash - -c - | - cp /scripts/setup-service-addresses.sh /tmp/setup-service-addresses.sh && \ - chmod +x /tmp/setup-service-addresses.sh && \ - /tmp/setup-service-addresses.sh + /scripts/setup-service-addresses.sh env: - name: NETWORK_PUBLIC value: "{{ .Values.network.public }}" @@ -189,9 +159,7 @@ Sets up the OpenTelemetry resource attributes for a service - /bin/bash - -c - | - cp /scripts/setup-otel-resource.sh /tmp/setup-otel-resource.sh && \ - chmod +x /tmp/setup-otel-resource.sh && \ - /tmp/setup-otel-resource.sh + /scripts/setup-otel-resource.sh env: - name: POD_IP valueFrom: @@ -263,3 +231,142 @@ while true; do sleep 5 done {{- end -}} + +{{/* +Combined wait-for-services and configure-env container for full nodes +*/}} +{{- define "aztec-network.combinedWaitAndConfigureContainer" -}} +- name: wait-and-configure + {{- include "aztec-network.image" . | nindent 2 }} + command: + - /bin/bash + - -c + - | + # If we already have a registry address, and the bootstrap nodes are set, then we don't need to wait for the services + if [ -n "{{ .Values.aztec.contracts.registryAddress }}" ] && [ -n "{{ .Values.aztec.bootstrapENRs }}" ]; then + echo "Registry address and bootstrap nodes already set, skipping wait for services" + echo "{{ include "aztec-network.pxeUrl" . }}" > /shared/pxe/pxe_url + else + source /shared/config/service-addresses + cat /shared/config/service-addresses + {{- include "aztec-network.waitForEthereum" . | nindent 8 }} + + if [ "{{ .Values.validator.dynamicBootNode }}" = "true" ]; then + echo "{{ include "aztec-network.pxeUrl" . }}" > /shared/pxe/pxe_url + else + until curl --silent --head --fail "${BOOT_NODE_HOST}/status" > /dev/null; do + echo "Waiting for boot node..." + sleep 5 + done + echo "Boot node is ready!" + echo "${BOOT_NODE_HOST}" > /shared/pxe/pxe_url + fi + fi + + # Configure environment + source /shared/config/service-addresses + /scripts/configure-full-node-env.sh "$(cat /shared/pxe/pxe_url)" + volumeMounts: + - name: pxe-url + mountPath: /shared/pxe + - name: scripts + mountPath: /scripts + - name: config + mountPath: /shared/config + - name: contracts-env + mountPath: /shared/contracts + env: + - name: P2P_ENABLED + value: "{{ .Values.fullNode.p2p.enabled }}" + - name: BOOTSTRAP_NODES + value: "{{ .Values.aztec.bootstrapENRs }}" + - name: REGISTRY_CONTRACT_ADDRESS + value: "{{ .Values.aztec.contracts.registryAddress }}" + - name: SLASH_FACTORY_CONTRACT_ADDRESS + value: "{{ .Values.aztec.contracts.slashFactoryAddress }}" +{{- end -}} + +{{/* +Combined P2P, Service Address, and OpenTelemetry Setup Container +*/}} +{{- define "aztec-network.combinedAllSetupContainer" -}} +{{- $serviceName := base $.Template.Name | trimSuffix ".yaml" -}} +- name: setup-all + {{- include "aztec-network.image" . | nindent 2 }} + command: + - /bin/bash + - -c + - | + # Setup P2P addresses + /scripts/setup-p2p-addresses.sh + + # Setup service addresses + /scripts/setup-service-addresses.sh + + # Setup OpenTelemetry resource + /scripts/setup-otel-resource.sh + env: + - name: NETWORK_PUBLIC + value: "{{ .Values.network.public }}" + - name: NAMESPACE + value: {{ .Release.Namespace }} + - name: P2P_TCP_PORT + value: "{{ .Values.validator.service.p2pTcpPort }}" + - name: P2P_UDP_PORT + value: "{{ .Values.validator.service.p2pUdpPort }}" + - name: TELEMETRY + value: "{{ .Values.telemetry.enabled }}" + - name: OTEL_COLLECTOR_ENDPOINT + value: "{{ .Values.telemetry.otelCollectorEndpoint }}" + - name: EXTERNAL_ETHEREUM_HOSTS + value: "{{ .Values.ethereum.execution.externalHosts }}" + - name: ETHEREUM_PORT + value: "{{ .Values.ethereum.execution.service.port }}" + - name: EXTERNAL_ETHEREUM_CONSENSUS_HOST + value: "{{ .Values.ethereum.beacon.externalHost }}" + - name: EXTERNAL_ETHEREUM_CONSENSUS_HOST_API_KEY + value: "{{ .Values.ethereum.beacon.apiKey }}" + - name: EXTERNAL_ETHEREUM_CONSENSUS_HOST_API_KEY_HEADER + value: "{{ .Values.ethereum.beacon.apiKeyHeader }}" + - name: ETHEREUM_CONSENSUS_PORT + value: "{{ .Values.ethereum.beacon.service.port }}" + - name: EXTERNAL_BOOT_NODE_HOST + value: "{{ .Values.bootNode.externalHost }}" + - name: BOOT_NODE_PORT + value: "{{ .Values.bootNode.service.nodePort }}" + - name: EXTERNAL_PROVER_NODE_HOST + value: "{{ .Values.proverNode.externalHost }}" + - name: PROVER_NODE_PORT + value: "{{ .Values.proverNode.service.nodePort }}" + - name: PROVER_BROKER_PORT + value: "{{ .Values.proverBroker.service.nodePort }}" + - name: USE_GCLOUD_LOGGING + value: "{{ .Values.telemetry.useGcloudLogging }}" + - name: SERVICE_NAME + value: {{ include "aztec-network.fullname" . }} + - name: POD_IP + valueFrom: + fieldRef: + fieldPath: status.podIP + - name: K8S_POD_UID + valueFrom: + fieldRef: + fieldPath: metadata.uid + - name: K8S_POD_NAME + valueFrom: + fieldRef: + fieldPath: metadata.name + - name: K8S_NAMESPACE_NAME + valueFrom: + fieldRef: + fieldPath: metadata.namespace + - name: OTEL_SERVICE_NAME + value: "{{ $serviceName }}" + - name: OTEL_RESOURCE_ATTRIBUTES + value: 'service.namespace={{ .Release.Namespace }},environment={{ .Values.environment | default "production" }}' + volumeMounts: + - name: scripts + mountPath: /scripts + - name: config + mountPath: /shared/config +{{- end -}} diff --git a/spartan/aztec-network/templates/boot-node.yaml b/spartan/aztec-network/templates/boot-node.yaml index 2823afaf72b..b0897148e2c 100644 --- a/spartan/aztec-network/templates/boot-node.yaml +++ b/spartan/aztec-network/templates/boot-node.yaml @@ -39,10 +39,9 @@ spec: serviceAccountName: {{ include "aztec-network.fullname" . }}-node {{- include "aztec-network.publicAntiAffinity" . | nindent 6 }} {{- end }} + terminationGracePeriodSeconds: 5 # default is 30 - speed up initcontainer termination initContainers: - {{- include "aztec-network.p2pSetupContainer" . | nindent 8 }} - {{- include "aztec-network.serviceAddressSetupContainer" . | nindent 8 }} - {{- include "aztec-network.otelResourceSetupContainer" . | nindent 8 }} + {{- include "aztec-network.combinedAllSetupContainer" . | nindent 8 }} # Generate the validator addresses; used in the deploy-l1-contracts step - name: generate-validator-addresses @@ -66,32 +65,18 @@ spec: - name: NUMBER_OF_VALIDATORS value: {{ .Values.validator.replicas | quote }} - - name: wait-for-ethereum - {{- include "aztec-network.image" . | nindent 10 }} - command: - - /bin/bash - - -c - - | - source /shared/config/service-addresses - cat /shared/config/service-addresses - {{- include "aztec-network.waitForEthereum" . | nindent 14 }} - volumeMounts: - - name: config - mountPath: /shared/config - name: deploy-l1-contracts {{- include "aztec-network.image" . | nindent 10 }} command: - /bin/bash - -c - | - cp /scripts/deploy-l1-contracts.sh /tmp/deploy-l1-contracts.sh - chmod +x /tmp/deploy-l1-contracts.sh source /shared/config/service-addresses source /shared/config/validator-addresses {{- include "aztec-network.waitForEthereum" . | nindent 14 }} - /tmp/deploy-l1-contracts.sh "{{ .Values.aztec.l1Salt }}" "{{ .Values.ethereum.chainId }}" "$VALIDATOR_ADDRESSES" + /scripts/deploy-l1-contracts.sh "{{ .Values.aztec.l1Salt }}" "{{ .Values.ethereum.chainId }}" "$VALIDATOR_ADDRESSES" volumeMounts: - name: scripts-output mountPath: /shared/contracts @@ -100,6 +85,8 @@ spec: - name: scripts mountPath: /scripts env: + - name: ACCELERATED_TEST_DEPLOYMENTS + value: "{{ .Values.ethereum.acceleratedTestDeployments }}" - name: TEST_ACCOUNTS value: "{{ .Values.aztec.testAccounts }}" - name: REGISTRY_CONTRACT_ADDRESS @@ -145,9 +132,8 @@ spec: - /bin/bash - -c - | - sleep 30 && \ source /shared/contracts/contracts.env && \ - source /shared/p2p/p2p-addresses && \ + source /shared/config/p2p-addresses && \ source /shared/config/service-addresses && \ source /shared/config/otel-resource && \ env && \ @@ -167,8 +153,6 @@ spec: timeoutSeconds: 30 failureThreshold: 3 volumeMounts: - - name: p2p-addresses - mountPath: /shared/p2p - name: config mountPath: /shared/config - name: boot-node-data @@ -256,8 +240,6 @@ spec: resources: {{- toYaml .Values.bootNode.resources | nindent 12 }} volumes: - - name: p2p-addresses - emptyDir: {} - name: config emptyDir: {} {{- if .Values.storage.localSsd }} @@ -271,6 +253,7 @@ spec: - name: scripts configMap: name: {{ include "aztec-network.fullname" . }}-scripts + defaultMode: 0755 - name: scripts-output emptyDir: {} --- @@ -300,7 +283,7 @@ spec: # External load balancers cannot handle mixed TCP/UDP ports, so we only expose the node port {{- if not .Values.network.public }} - port: {{ .Values.bootNode.service.p2pTcpPort }} - name: p2p-tpc + name: p2p-tcp - port: {{ .Values.bootNode.service.p2pUdpPort }} name: p2p-udp protocol: UDP diff --git a/spartan/aztec-network/templates/consolidate-balances.yaml b/spartan/aztec-network/templates/consolidate-balances.yaml index 8a925ab3421..68a34886e31 100644 --- a/spartan/aztec-network/templates/consolidate-balances.yaml +++ b/spartan/aztec-network/templates/consolidate-balances.yaml @@ -26,6 +26,7 @@ spec: - name: scripts configMap: name: {{ include "aztec-network.fullname" . }}-scripts + defaultMode: 0755 containers: - name: consolidate-balances {{- include "aztec-network.image" . | nindent 10 }} @@ -38,9 +39,7 @@ spec: - /bin/bash - -c - | - cp /scripts/consolidate-sepolia-balances.sh /tmp/consolidate-sepolia-balances.sh - chmod +x /tmp/consolidate-sepolia-balances.sh - /tmp/consolidate-sepolia-balances.sh "{{ .Values.aztec.l1DeploymentMnemonic }}" {{ add .Values.validator.replicas .Values.proverNode.replicas }} + /scripts/consolidate-sepolia-balances.sh "{{ .Values.aztec.l1DeploymentMnemonic }}" {{ add .Values.validator.replicas .Values.proverNode.replicas }} env: - name: ETHEREUM_HOSTS value: "{{ .Values.ethereum.execution.externalHosts }}" diff --git a/spartan/aztec-network/templates/deploy-l1-verifier.yaml b/spartan/aztec-network/templates/deploy-l1-verifier.yaml index 7cbe04bf828..d83633e9705 100644 --- a/spartan/aztec-network/templates/deploy-l1-verifier.yaml +++ b/spartan/aztec-network/templates/deploy-l1-verifier.yaml @@ -27,6 +27,7 @@ spec: - name: scripts configMap: name: {{ include "aztec-network.fullname" . }}-scripts + defaultMode: 0755 containers: - name: deploy-l1-verifier {{- include "aztec-network.image" . | nindent 10 }} @@ -47,9 +48,7 @@ spec: kubectl config set-context default --cluster=default --user=default kubectl config use-context default - cp /scripts/setup-service-addresses.sh /tmp/setup-service-addresses.sh - chmod +x /tmp/setup-service-addresses.sh - /tmp/setup-service-addresses.sh + /scripts/setup-service-addresses.sh source /shared/config/service-addresses cat /shared/config/service-addresses diff --git a/spartan/aztec-network/templates/faucet.yaml b/spartan/aztec-network/templates/faucet.yaml index c455ec213c7..6215b8ef02e 100644 --- a/spartan/aztec-network/templates/faucet.yaml +++ b/spartan/aztec-network/templates/faucet.yaml @@ -32,8 +32,10 @@ spec: - name: scripts configMap: name: {{ include "aztec-network.fullname" . }}-scripts + defaultMode: 0755 - name: scripts-output emptyDir: {} + terminationGracePeriodSeconds: 5 # default is 30 - speed up initcontainer termination initContainers: {{- include "aztec-network.serviceAddressSetupContainer" . | nindent 8 }} - name: wait-for-dependencies diff --git a/spartan/aztec-network/templates/full-node.yaml b/spartan/aztec-network/templates/full-node.yaml index 7e1683c0b86..d983766777b 100644 --- a/spartan/aztec-network/templates/full-node.yaml +++ b/spartan/aztec-network/templates/full-node.yaml @@ -40,86 +40,9 @@ spec: {{- include "aztec-network.publicAntiAffinity" . | nindent 6 }} {{- end }} initContainers: - {{- include "aztec-network.p2pSetupContainer" . | nindent 8 }} - {{- include "aztec-network.serviceAddressSetupContainer" . | nindent 8 }} + {{- include "aztec-network.combinedAllSetupContainer" . | nindent 8 }} + {{- include "aztec-network.combinedWaitAndConfigureContainer" . | nindent 8 }} - - - name: wait-for-services - {{- include "aztec-network.image" . | nindent 10 }} - command: - - /bin/bash - - -c - - | - # If we already have a registry address, and the bootstrap nodes are set, then we don't need to wait for the services - if [ -n "{{ .Values.aztec.contracts.registryAddress }}" ] && [ -n "{{ .Values.aztec.bootstrapENRs }}" ]; then - echo "Registry address and bootstrap nodes already set, skipping wait for services" - exit 0 - fi - - source /shared/config/service-addresses - cat /shared/config/service-addresses - {{- include "aztec-network.waitForEthereum" . | nindent 14 }} - - - if [ "{{ .Values.validator.dynamicBootNode }}" = "true" ]; then - echo "{{ include "aztec-network.pxeUrl" . }}" > /shared/pxe/pxe_url - else - until curl --silent --head --fail "${BOOT_NODE_HOST}/status" > /dev/null; do - echo "Waiting for boot node..." - sleep 5 - done - echo "Boot node is ready!" - echo "${BOOT_NODE_HOST}" > /shared/pxe/pxe_url - fi - volumeMounts: - - name: pxe-url - mountPath: /shared/pxe - - name: scripts - mountPath: /scripts - - name: config - mountPath: /shared/config - - - name: configure-full-node-env - {{- include "aztec-network.image" . | nindent 10 }} - command: - - "/bin/bash" - - "-c" - - | - source /shared/config/service-addresses && \ - cp /scripts/configure-full-node-env.sh /tmp/configure-full-node-env.sh && \ - chmod +x /tmp/configure-full-node-env.sh && \ - /tmp/configure-full-node-env.sh "$(cat /shared/pxe/pxe_url)" - volumeMounts: - - name: contracts-env - mountPath: /shared/contracts - - name: pxe-url - mountPath: /shared/pxe - - name: scripts - mountPath: /scripts - - name: config - mountPath: /shared/config - env: - - name: P2P_ENABLED - value: "{{ .Values.fullNode.p2p.enabled }}" - - name: BOOTSTRAP_NODES - value: "{{ .Values.aztec.bootstrapENRs }}" - - name: REGISTRY_CONTRACT_ADDRESS - value: "{{ .Values.aztec.contracts.registryAddress }}" - - name: SLASH_FACTORY_CONTRACT_ADDRESS - value: "{{ .Values.aztec.contracts.slashFactoryAddress }}" - - - name: wait-for-ethereum - {{- include "aztec-network.image" . | nindent 10 }} - command: - - /bin/bash - - -c - - | - source /shared/config/service-addresses - cat /shared/config/service-addresses - {{- include "aztec-network.waitForEthereum" . | nindent 14 }} - volumeMounts: - - name: config - mountPath: /shared/config containers: - name: full-node {{- include "aztec-network.image" . | nindent 10 }} @@ -127,11 +50,11 @@ spec: - /bin/bash - -c - | - sleep 30 && \ - source /shared/contracts/contracts.env && \ - source /shared/p2p/p2p-addresses && \ - source /shared/config/service-addresses && \ - env && \ + source /shared/contracts/contracts.env + source /shared/config/p2p-addresses + source /shared/config/service-addresses + env + node --no-warnings /usr/src/yarn-project/aztec/dest/bin/index.js start --node --archiver --pxe startupProbe: httpGet: @@ -148,8 +71,6 @@ spec: timeoutSeconds: 30 failureThreshold: 3 volumeMounts: - - name: p2p-addresses - mountPath: /shared/p2p - name: config mountPath: /shared/config - name: full-node-data @@ -244,8 +165,6 @@ spec: resources: {{- toYaml .Values.fullNode.resources | nindent 12 }} volumes: - - name: p2p-addresses - emptyDir: {} - name: config emptyDir: {} {{- if .Values.storage.localSsd }} @@ -259,6 +178,7 @@ spec: - name: scripts configMap: name: {{ include "aztec-network.fullname" . }}-scripts + defaultMode: 0755 - name: scripts-output emptyDir: {} - name: contracts-env diff --git a/spartan/aztec-network/templates/prover-agent.yaml b/spartan/aztec-network/templates/prover-agent.yaml index 751689482ba..51b76161119 100644 --- a/spartan/aztec-network/templates/prover-agent.yaml +++ b/spartan/aztec-network/templates/prover-agent.yaml @@ -43,9 +43,9 @@ spec: - name: scripts configMap: name: {{ include "aztec-network.fullname" . }}-scripts + defaultMode: 0755 initContainers: - {{- include "aztec-network.serviceAddressSetupContainer" . | nindent 8 }} - {{- include "aztec-network.otelResourceSetupContainer" . | nindent 8 }} + {{- include "aztec-network.combinedAllSetupContainer" . | nindent 8 }} - name: wait-for-prover-node {{- include "aztec-network.image" . | nindent 10 }} command: diff --git a/spartan/aztec-network/templates/prover-broker.yaml b/spartan/aztec-network/templates/prover-broker.yaml index 8736fd5b827..5eeff959ddd 100644 --- a/spartan/aztec-network/templates/prover-broker.yaml +++ b/spartan/aztec-network/templates/prover-broker.yaml @@ -46,9 +46,10 @@ spec: - name: scripts configMap: name: {{ include "aztec-network.fullname" . }}-scripts + defaultMode: 0755 initContainers: - {{- include "aztec-network.serviceAddressSetupContainer" . | nindent 8 }} - {{- include "aztec-network.otelResourceSetupContainer" . | nindent 8 }} + {{- include "aztec-network.combinedAllSetupContainer" . | nindent 8 }} + - name: wait-for-dependencies {{- include "aztec-network.image" . | nindent 10 }} command: @@ -81,10 +82,9 @@ spec: command: - "/bin/bash" - "-c" - - "cp /scripts/configure-prover-env.sh /tmp/configure-prover-env.sh && \ - chmod +x /tmp/configure-prover-env.sh && \ - source /shared/config/service-addresses && \ - /tmp/configure-prover-env.sh ${BOOT_NODE_HOST}" + - | + source /shared/config/service-addresses + /scripts/configure-prover-env.sh ${BOOT_NODE_HOST} volumeMounts: - name: contracts-env mountPath: /shared/contracts @@ -162,6 +162,7 @@ spec: - name: scripts configMap: name: {{ include "aztec-network.fullname" . }}-scripts + defaultMode: 0755 - name: config emptyDir: {} - name: contracts-env diff --git a/spartan/aztec-network/templates/prover-node.yaml b/spartan/aztec-network/templates/prover-node.yaml index f906e415c6c..efeeb50ccad 100644 --- a/spartan/aztec-network/templates/prover-node.yaml +++ b/spartan/aztec-network/templates/prover-node.yaml @@ -38,10 +38,9 @@ spec: dnsPolicy: ClusterFirstWithHostNet {{- include "aztec-network.publicAntiAffinity" . | nindent 6 }} {{- end }} + terminationGracePeriodSeconds: 5 # default is 30 - speed up initcontainer termination initContainers: - {{- include "aztec-network.serviceAddressSetupContainer" . | nindent 8 }} - {{- include "aztec-network.p2pSetupContainer" . | nindent 8 }} - {{- include "aztec-network.otelResourceSetupContainer" . | nindent 8 }} + {{- include "aztec-network.combinedAllSetupContainer" . | nindent 8 }} - name: get-private-key image: {{ .Values.images.foundry.image }} @@ -111,10 +110,8 @@ spec: command: - "/bin/bash" - "-c" - - "cp /scripts/configure-prover-env.sh /tmp/configure-prover-env.sh && \ - chmod +x /tmp/configure-prover-env.sh && \ - source /shared/config/service-addresses && \ - /tmp/configure-prover-env.sh ${BOOT_NODE_HOST}" + - "source /shared/config/service-addresses && \ + /scripts/configure-prover-env.sh ${BOOT_NODE_HOST}" volumeMounts: - name: contracts-env mountPath: /shared/contracts @@ -134,12 +131,12 @@ spec: - name: prover-node {{- include "aztec-network.image" . | nindent 10 }} command: - - "/bin/bash" - - "-c" + - /bin/bash + - -c - | source /shared/config/keys.env && \ source /shared/contracts/contracts.env && \ - source /shared/p2p/p2p-addresses && \ + source /shared/config/p2p-addresses && \ source /shared/config/service-addresses && \ source /shared/config/otel-resource && \ env && \ @@ -147,8 +144,6 @@ spec: volumeMounts: - name: contracts-env mountPath: /shared/contracts - - name: p2p-addresses - mountPath: /shared/p2p - name: config mountPath: /shared/config - name: prover-node-data @@ -257,10 +252,9 @@ spec: - name: scripts configMap: name: {{ include "aztec-network.fullname" . }}-scripts + defaultMode: 0755 - name: contracts-env emptyDir: {} - - name: p2p-addresses - emptyDir: {} - name: config emptyDir: {} {{- if .Values.storage.localSsd }} diff --git a/spartan/aztec-network/templates/pxe.yaml b/spartan/aztec-network/templates/pxe.yaml index 170ea13980f..d611ffc86a7 100644 --- a/spartan/aztec-network/templates/pxe.yaml +++ b/spartan/aztec-network/templates/pxe.yaml @@ -35,11 +35,14 @@ spec: - name: scripts configMap: name: {{ include "aztec-network.fullname" . }}-scripts + defaultMode: 0755 - name: scripts-output emptyDir: {} + terminationGracePeriodSeconds: 5 # default is 30 - speed up initcontainer termination initContainers: {{- include "aztec-network.serviceAddressSetupContainer" . | nindent 8 }} - - name: wait-for-boot-node + + - name: wait-for-host image: {{ .Values.images.curl.image }} command: - /bin/sh @@ -47,26 +50,23 @@ spec: - | source /shared/config/service-addresses cat /shared/config/service-addresses - until curl --head --silent ${BOOT_NODE_HOST}/status; do + + {{- if .Values.network.public }} + # If the network is public, we need to use the boot node URL + export AZTEC_NODE_URL=${BOOT_NODE_HOST} + {{- else }} + # If the network is not public, we can use the validator URL + export AZTEC_NODE_URL={{ include "aztec-network.validatorUrl" . }} + {{- end }} + + until curl --head --silent ${AZTEC_NODE_URL}/status; do echo "Waiting for boot node..." sleep 5 done volumeMounts: - name: config mountPath: /shared/config - {{- if not .Values.network.public }} - # We only need to wait for the validator service if the network is not public - - name: wait-for-validator-service - image: {{ .Values.images.curl.image }} - command: - - /bin/sh - - -c - - | - until curl --head --silent {{ include "aztec-network.validatorUrl" . }}/status; do - echo "Waiting for validator service..." - sleep 5 - done - {{- end }} + containers: - name: pxe {{- include "aztec-network.image" . | nindent 10 }} diff --git a/spartan/aztec-network/templates/transaction-bot.yaml b/spartan/aztec-network/templates/transaction-bot.yaml index f162040ae58..0f0217ca7ec 100644 --- a/spartan/aztec-network/templates/transaction-bot.yaml +++ b/spartan/aztec-network/templates/transaction-bot.yaml @@ -44,11 +44,13 @@ spec: - name: scripts configMap: name: {{ include "aztec-network.fullname" . }}-scripts + defaultMode: 0755 - name: scripts-output emptyDir: {} + terminationGracePeriodSeconds: 5 # default is 30 - speed up initcontainer termination initContainers: - {{- include "aztec-network.serviceAddressSetupContainer" . | nindent 8 }} - {{- include "aztec-network.otelResourceSetupContainer" . | nindent 8 }} + {{- include "aztec-network.combinedAllSetupContainer" . | nindent 8 }} + - name: get-private-key image: {{ .Values.images.foundry.image }} imagePullPolicy: {{ .Values.images.foundry.pullPolicy }} diff --git a/spartan/aztec-network/templates/validator.yaml b/spartan/aztec-network/templates/validator.yaml index 0c7f82d5d23..e6f7edc209f 100644 --- a/spartan/aztec-network/templates/validator.yaml +++ b/spartan/aztec-network/templates/validator.yaml @@ -40,10 +40,9 @@ spec: dnsPolicy: ClusterFirstWithHostNet {{- include "aztec-network.publicAntiAffinity" . | nindent 6 }} {{- end }} + terminationGracePeriodSeconds: 5 # default is 30 - speed up initcontainer termination initContainers: - {{- include "aztec-network.p2pSetupContainer" . | nindent 8 }} - {{- include "aztec-network.serviceAddressSetupContainer" . | nindent 8 }} - {{- include "aztec-network.otelResourceSetupContainer" . | nindent 8 }} + {{- include "aztec-network.combinedAllSetupContainer" . | nindent 8 }} # Get the private key from the mnemonic - based on the pod replica index - name: get-private-key @@ -69,71 +68,8 @@ spec: fieldRef: fieldPath: metadata.name + {{- include "aztec-network.combinedWaitAndConfigureContainer" . | nindent 8 }} - - name: wait-for-services - {{- include "aztec-network.image" . | nindent 10 }} - command: - - /bin/bash - - -c - - | - # If we already have a registry address, and the bootstrap nodes are set, then we don't need to wait for the services - if [ -n "{{ .Values.aztec.contracts.registryAddress }}" ] && [ -n "{{ .Values.aztec.bootstrapENRs }}" ]; then - echo "Registry address and bootstrap nodes already set, skipping wait for services" - echo "{{ include "aztec-network.pxeUrl" . }}" > /shared/pxe/pxe_url - exit 0 - fi - - source /shared/config/service-addresses - cat /shared/config/service-addresses - {{- include "aztec-network.waitForEthereum" . | nindent 14 }} - - if [ "{{ .Values.validator.dynamicBootNode }}" = "true" ]; then - echo "{{ include "aztec-network.pxeUrl" . }}" > /shared/pxe/pxe_url - else - until curl --silent --head --fail "${BOOT_NODE_HOST}/status" > /dev/null; do - echo "Waiting for boot node..." - sleep 5 - done - echo "Boot node is ready!" - echo "${BOOT_NODE_HOST}" > /shared/pxe/pxe_url - fi - volumeMounts: - - name: pxe-url - mountPath: /shared/pxe - - name: scripts - mountPath: /scripts - - name: config - mountPath: /shared/config - - - - name: configure-validator-env - {{- include "aztec-network.image" . | nindent 10 }} - command: - - "/bin/bash" - - "-c" - - | - source /shared/config/service-addresses && \ - cp /scripts/configure-full-node-env.sh /tmp/configure-full-node-env.sh && \ - chmod +x /tmp/configure-full-node-env.sh && \ - /tmp/configure-full-node-env.sh "$(cat /shared/pxe/pxe_url)" - volumeMounts: - - name: contracts-env - mountPath: /shared/contracts - - name: pxe-url - mountPath: /shared/pxe - - name: scripts - mountPath: /scripts - - name: config - mountPath: /shared/config - env: - - name: P2P_ENABLED - value: "{{ .Values.validator.p2p.enabled }}" - - name: BOOTSTRAP_NODES - value: "{{ .Values.aztec.bootstrapENRs }}" - - name: REGISTRY_CONTRACT_ADDRESS - value: "{{ .Values.aztec.contracts.registryAddress }}" - - name: SLASH_FACTORY_CONTRACT_ADDRESS - value: "{{ .Values.aztec.contracts.slashFactoryAddress }}" containers: - name: validator {{- include "aztec-network.image" . | nindent 10 }} @@ -141,9 +77,8 @@ spec: - "/bin/bash" - "-c" - | - sleep 10 && \ source /shared/contracts/contracts.env && \ - source /shared/p2p/p2p-addresses && \ + source /shared/config/p2p-addresses && \ source /shared/config/service-addresses && \ source /shared/config/keys.env && \ source /shared/config/otel-resource && \ @@ -168,8 +103,6 @@ spec: volumeMounts: - name: contracts-env mountPath: /shared/contracts - - name: p2p-addresses - mountPath: /shared/p2p - name: config mountPath: /shared/config - name: validator-data @@ -270,10 +203,9 @@ spec: - name: scripts configMap: name: {{ include "aztec-network.fullname" . }}-scripts + defaultMode: 0755 - name: contracts-env emptyDir: {} - - name: p2p-addresses - emptyDir: {} - name: pxe-url emptyDir: {} - name: config diff --git a/spartan/aztec-network/values.yaml b/spartan/aztec-network/values.yaml index ed10c2a503e..fba758d4c75 100644 --- a/spartan/aztec-network/values.yaml +++ b/spartan/aztec-network/values.yaml @@ -272,6 +272,7 @@ bot: maxOldSpaceSize: "4608" ethereum: + acceleratedTestDeployments: false replicas: 1 chainId: 1337 blockTime: 12 diff --git a/spartan/aztec-network/values/ci-smoke.yaml b/spartan/aztec-network/values/ci-smoke.yaml index 7d682a91150..eb2d749639d 100644 --- a/spartan/aztec-network/values/ci-smoke.yaml +++ b/spartan/aztec-network/values/ci-smoke.yaml @@ -37,6 +37,7 @@ bot: enabled: false ethereum: + acceleratedTestDeployments: true execution: resources: requests: diff --git a/spartan/aztec-network/values/ci.yaml b/spartan/aztec-network/values/ci.yaml index ae1e157c126..4c4860a5c2b 100644 --- a/spartan/aztec-network/values/ci.yaml +++ b/spartan/aztec-network/values/ci.yaml @@ -4,6 +4,7 @@ aztec: proofSubmissionWindow: 8 ethereum: + acceleratedTestDeployments: true blockTime: 8 execution: resources: diff --git a/spartan/aztec-network/values/ignition-testnet.yaml b/spartan/aztec-network/values/ignition-testnet.yaml index 25d8de36108..b6b411dfa05 100644 --- a/spartan/aztec-network/values/ignition-testnet.yaml +++ b/spartan/aztec-network/values/ignition-testnet.yaml @@ -4,7 +4,7 @@ telemetry: aztec: realProofs: true validatorKeyIndexStart: 0 - proverKeyIndexStart: 1 + proverKeyIndexStart: 3 testAccounts: true bootstrapENRs: "enr:-LO4QDwlKJN0BqMc4hYPsI-MQoR1O7qLVr4TK6DhqGsZT_pPTmg3gS-JD072rKI4vlaR0N4SdeH2gCD09oh-zMVT3JkEhWF6dGVjqDAwLTExMTU1MTExLTAwMDAwMDAwLTAtMmM4ZmM0NjMtMjM3YWFkY2WCaWSCdjSCaXCEI-XzqolzZWNwMjU2azGhA0da3IZGbY1tLdqXgdQKG-SW-Z4D6dvXJBeoXn8EZsCVg3VkcIKd0A,enr:-LO4QPJR493G_BQG1UU0_h-g0TEBnZEJ-zgWYH3YctVAn3GzfM9dWVIO7_TSETXYLy-h34bF6sSoSfpP5O44qsZnp00EhWF6dGVjqDAwLTExMTU1MTExLTAwMDAwMDAwLTAtMmM4ZmM0NjMtMjM3YWFkY2WCaWSCdjSCaXCEIlle64lzZWNwMjU2azGhAwuSF_VE1cRfSc3MvtDZvvaTl2Qo_dJK-Qp7TcnhYWBtg3VkcIKd0A,enr:-LO4QKq488wXvw6vAHToGWJYkxMmKsjQCsFjPs5Pt_MrawlnZ7G-xIfwhkXR1afddf8lFj_RNVZdBfGzHHR262pXNhMEhWF6dGVjqDAwLTExMTU1MTExLTAwMDAwMDAwLTAtMmM4ZmM0NjMtMjM3YWFkY2WCaWSCdjSCaXCEI8VFSYlzZWNwMjU2azGhA2xqOyFaHAARgLAi3dORuPmFHbxgoMDWBZJnnbiatW8jg3VkcIKd0A" contracts: @@ -53,7 +53,7 @@ validator: cpu: "1" proverAgent: - replicas: 8 + replicas: 2 bb: hardwareConcurrency: 31 gke: diff --git a/spartan/testnet-runbook.md b/spartan/testnet-runbook.md index ab9314c570a..55760e513c8 100644 --- a/spartan/testnet-runbook.md +++ b/spartan/testnet-runbook.md @@ -16,25 +16,20 @@ The engineering team's public testnet responsibilities begin after code-freeze. ### Release Notes and Artifact Builds -Verify the `release-please` CI workflow completed successfully and that release notes have been published. If there were no hotfixes, then this simply moves the tags forward to `v0.64.0`, otherwise, it releases `v0.64.X` (and moves the tags). -A successful CI run publishes the following Barretenberg artifacts with the release notes: +A successful tagged CI run publishes the following Barretenberg artifacts with the release notes: - Barretenberg for Mac (x86 64-bit) - Barretenberg for Mac (Arm 64-bit) - Barretenberg for Linux (x86 64-bit) - Barretenberg for WASM -Additionally, the following NPM packages are published: - - BB.js - l1-contracts -- yarn-project (see [publish_npm.sh](https://github.com/AztecProtocol/aztec-packages/blob/aztec-packages-v0.63.0/yarn-project/publish_npm.sh)) +- yarn-project The following Docker containers are also published: - aztecprotocol/aztec:latest -- aztecprotocol/aztec-nargo:latest -- aztecprotocol/cli-wallet:latest Lastly, any changes made to developer documentation are published to diff --git a/yarn-project/.yarnrc.yml b/yarn-project/.yarnrc.yml index 8aaa7608581..a6b42f0c4ee 100644 --- a/yarn-project/.yarnrc.yml +++ b/yarn-project/.yarnrc.yml @@ -10,4 +10,4 @@ nodeLinker: node-modules # Do not allow 'yarn install' on CI to format package.json files, # otherwise it will change their hash and bust the cache -immutablePatterns: ['**/package.json'] +immutablePatterns: ['package.json', '*/package.json'] diff --git a/yarn-project/aztec-node/src/aztec-node/config.ts b/yarn-project/aztec-node/src/aztec-node/config.ts index 5949bcd59dd..658b1db5014 100644 --- a/yarn-project/aztec-node/src/aztec-node/config.ts +++ b/yarn-project/aztec-node/src/aztec-node/config.ts @@ -59,10 +59,13 @@ export function getConfigEnvVars(): AztecNodeConfig { } /** - * Returns package name and version. + * Returns package version. */ -export function getPackageInfo() { - const packageJsonPath = resolve(dirname(fileURLToPath(import.meta.url)), '../../package.json'); - const { version, name } = JSON.parse(readFileSync(packageJsonPath).toString()); - return { version, name }; +export function getPackageVersion() { + const releasePleaseManifestPath = resolve( + dirname(fileURLToPath(import.meta.url)), + '../../../../.release-please-manifest.json', + ); + const version = JSON.parse(readFileSync(releasePleaseManifestPath).toString()); + return version['.']; } diff --git a/yarn-project/aztec-node/src/aztec-node/server.test.ts b/yarn-project/aztec-node/src/aztec-node/server.test.ts index 763565ce1ae..c664d2e64ce 100644 --- a/yarn-project/aztec-node/src/aztec-node/server.test.ts +++ b/yarn-project/aztec-node/src/aztec-node/server.test.ts @@ -16,7 +16,10 @@ import { mockTx } from '@aztec/stdlib/testing'; import { MerkleTreeId, PublicDataTreeLeafPreimage } from '@aztec/stdlib/trees'; import { BlockHeader, GlobalVariables, MaxBlockNumber } from '@aztec/stdlib/tx'; +import { readFileSync } from 'fs'; import { type MockProxy, mock } from 'jest-mock-extended'; +import { dirname, resolve } from 'path'; +import { fileURLToPath } from 'url'; import { type AztecNodeConfig, getConfigEnvVars } from './config.js'; import { AztecNodeService } from './server.js'; @@ -204,6 +207,18 @@ describe('aztec node', () => { }); }); + describe('Node Info', () => { + it('returns the correct node version', async () => { + const releasePleaseVersionFile = readFileSync( + resolve(dirname(fileURLToPath(import.meta.url)), '../../../../.release-please-manifest.json'), + ).toString(); + const releasePleaseVersion = JSON.parse(releasePleaseVersionFile)['.']; + + const nodeInfo = await node.getNodeInfo(); + expect(nodeInfo.nodeVersion).toBe(releasePleaseVersion); + }); + }); + describe('getters', () => { describe('getBlockHeader', () => { let initialHeader: BlockHeader; diff --git a/yarn-project/aztec-node/src/aztec-node/server.ts b/yarn-project/aztec-node/src/aztec-node/server.ts index 4b5bc2519b6..64fe41acc49 100644 --- a/yarn-project/aztec-node/src/aztec-node/server.ts +++ b/yarn-project/aztec-node/src/aztec-node/server.ts @@ -80,7 +80,7 @@ import { import { createValidatorClient } from '@aztec/validator-client'; import { createWorldStateSynchronizer } from '@aztec/world-state'; -import { type AztecNodeConfig, getPackageInfo } from './config.js'; +import { type AztecNodeConfig, getPackageVersion } from './config.js'; import { NodeMetrics } from './node_metrics.js'; /** @@ -109,10 +109,11 @@ export class AztecNodeService implements AztecNode, Traceable { private telemetry: TelemetryClient = getTelemetryClient(), private log = createLogger('node'), ) { - this.packageVersion = getPackageInfo().version; + this.packageVersion = getPackageVersion(); this.metrics = new NodeMetrics(telemetry, 'AztecNodeService'); this.tracer = telemetry.getTracer('AztecNodeService'); + this.log.info(`Aztec Node version: ${this.packageVersion}`); this.log.info(`Aztec Node started on chain 0x${l1ChainId.toString(16)}`, config.l1Contracts); } diff --git a/yarn-project/aztec/src/cli/chain_l2_config.ts b/yarn-project/aztec/src/cli/chain_l2_config.ts index 61c45ccd65c..a09594cd80d 100644 --- a/yarn-project/aztec/src/cli/chain_l2_config.ts +++ b/yarn-project/aztec/src/cli/chain_l2_config.ts @@ -10,6 +10,8 @@ export type L2ChainConfig = { p2pEnabled: boolean; p2pBootstrapNodes: string[]; registryAddress: string; + seqMinTxsPerBlock: number; + seqMaxTxsPerBlock: number; }; export const testnetIgnitionL2ChainConfig: L2ChainConfig = { @@ -22,6 +24,8 @@ export const testnetIgnitionL2ChainConfig: L2ChainConfig = { p2pEnabled: true, p2pBootstrapNodes: [], registryAddress: '0x12b3ebc176a1646b911391eab3760764f2e05fe3', + seqMinTxsPerBlock: 0, + seqMaxTxsPerBlock: 0, }; export async function getBootnodes(networkName: NetworkNames) { @@ -65,4 +69,6 @@ export async function enrichEnvironmentWithChainConfig(networkName: NetworkNames enrichVar('P2P_ENABLED', config.p2pEnabled.toString()); enrichVar('L1_CHAIN_ID', config.l1ChainId.toString()); enrichVar('REGISTRY_CONTRACT_ADDRESS', config.registryAddress); + enrichVar('SEQ_MIN_TX_PER_BLOCK', config.seqMinTxsPerBlock.toString()); + enrichVar('SEQ_MAX_TX_PER_BLOCK', config.seqMaxTxsPerBlock.toString()); } diff --git a/yarn-project/aztec/src/cli/cmds/start_pxe.ts b/yarn-project/aztec/src/cli/cmds/start_pxe.ts index 19a778cb63c..2f582318a22 100644 --- a/yarn-project/aztec/src/cli/cmds/start_pxe.ts +++ b/yarn-project/aztec/src/cli/cmds/start_pxe.ts @@ -15,7 +15,7 @@ import { type PXEServiceConfig, allPxeConfigMappings, createPXEService, -} from '@aztec/pxe'; +} from '@aztec/pxe/server'; import { type AztecNode, PXESchema, createAztecNodeClient } from '@aztec/stdlib/interfaces/client'; import { L2BasicContractsMap, Network } from '@aztec/stdlib/network'; import { makeTracedFetch } from '@aztec/telemetry-client'; diff --git a/yarn-project/aztec/src/cli/util.ts b/yarn-project/aztec/src/cli/util.ts index 015a6cc2534..bc9532a79b4 100644 --- a/yarn-project/aztec/src/cli/util.ts +++ b/yarn-project/aztec/src/cli/util.ts @@ -1,7 +1,7 @@ import type { AccountManager, Fr } from '@aztec/aztec.js'; import type { ConfigMappingsType } from '@aztec/foundation/config'; import type { LogFn } from '@aztec/foundation/log'; -import type { PXEService } from '@aztec/pxe'; +import type { PXEService } from '@aztec/pxe/server'; import chalk from 'chalk'; import type { Command } from 'commander'; diff --git a/yarn-project/aztec/src/sandbox/sandbox.ts b/yarn-project/aztec/src/sandbox/sandbox.ts index f9c800f51ee..b5b4990b2d0 100644 --- a/yarn-project/aztec/src/sandbox/sandbox.ts +++ b/yarn-project/aztec/src/sandbox/sandbox.ts @@ -17,7 +17,7 @@ import { Fr } from '@aztec/foundation/fields'; import { type LogFn, createLogger } from '@aztec/foundation/log'; import { getVKTreeRoot } from '@aztec/noir-protocol-circuits-types/vk-tree'; import { ProtocolContractAddress, protocolContractTreeRoot } from '@aztec/protocol-contracts'; -import { type PXEServiceConfig, createPXEService, getPXEServiceConfig } from '@aztec/pxe'; +import { type PXEServiceConfig, createPXEService, getPXEServiceConfig } from '@aztec/pxe/server'; import type { AztecNode } from '@aztec/stdlib/interfaces/client'; import type { PublicDataTreeLeaf } from '@aztec/stdlib/trees'; import { diff --git a/yarn-project/bootstrap.sh b/yarn-project/bootstrap.sh index 3065370e082..944c3b2a7da 100755 --- a/yarn-project/bootstrap.sh +++ b/yarn-project/bootstrap.sh @@ -92,7 +92,7 @@ function build { # files to yarn immutablePatterns, so if they are also changed, this step will fail. denoise "retry yarn install --immutable" else - denoise "yarn install" + denoise "yarn install --no-immutable" fi denoise "compile_all" echo -e "${green}Yarn project successfully built!${reset}" diff --git a/yarn-project/cli-wallet/src/bin/index.ts b/yarn-project/cli-wallet/src/bin/index.ts index 6b00de35169..9579904727e 100644 --- a/yarn-project/cli-wallet/src/bin/index.ts +++ b/yarn-project/cli-wallet/src/bin/index.ts @@ -2,6 +2,7 @@ import { Fr, computeSecretHash, fileURLToPath } from '@aztec/aztec.js'; import { LOCALHOST } from '@aztec/cli/cli-utils'; import { type LogFn, createConsoleLogger, createLogger } from '@aztec/foundation/log'; import { openStoreAt } from '@aztec/kv-store/lmdb-v2'; +import type { PXEServiceConfig } from '@aztec/pxe/config'; import { Argument, Command, Option } from 'commander'; import { mkdirSync, readFileSync } from 'fs'; @@ -94,18 +95,22 @@ async function main() { if (!remotePxe) { debugLogger.info('Using local PXE service'); + const proverEnabled = prover !== 'none'; + const bbBinaryPath = prover === 'native' ? resolve(dirname(fileURLToPath(import.meta.url)), '../../../../barretenberg/cpp/build/bin/bb') : undefined; const bbWorkingDirectory = dataDir + '/bb'; - const proverEnabled = prover !== 'none'; - mkdirSync(bbWorkingDirectory, { recursive: true }); - await pxeWrapper.init(nodeUrl, join(dataDir, 'pxe'), { - ...(proverEnabled && { proverEnabled, bbBinaryPath, bbWorkingDirectory }), // only override if we're profiling - }); + const overridePXEConfig: Partial = { + proverEnabled, + bbBinaryPath: prover === 'native' ? bbBinaryPath : undefined, + bbWorkingDirectory: prover === 'native' ? bbWorkingDirectory : undefined, + }; + + await pxeWrapper.init(nodeUrl, join(dataDir, 'pxe'), overridePXEConfig); } await db.init(await openStoreAt(dataDir)); }); diff --git a/yarn-project/cli-wallet/src/utils/pxe_wrapper.ts b/yarn-project/cli-wallet/src/utils/pxe_wrapper.ts index 8a83c05d1ad..52883f36b4a 100644 --- a/yarn-project/cli-wallet/src/utils/pxe_wrapper.ts +++ b/yarn-project/cli-wallet/src/utils/pxe_wrapper.ts @@ -1,4 +1,4 @@ -import { type PXEServiceConfig, createPXEService, getPXEServiceConfig } from '@aztec/pxe'; +import { type PXEServiceConfig, createPXEService, getPXEServiceConfig } from '@aztec/pxe/server'; import { type AztecNode, type PXE, createAztecNodeClient } from '@aztec/stdlib/interfaces/client'; /* diff --git a/yarn-project/cli/src/cmds/l1/deploy_l1_contracts.ts b/yarn-project/cli/src/cmds/l1/deploy_l1_contracts.ts index 6ffd0fa1a0d..23d006155fd 100644 --- a/yarn-project/cli/src/cmds/l1/deploy_l1_contracts.ts +++ b/yarn-project/cli/src/cmds/l1/deploy_l1_contracts.ts @@ -14,6 +14,7 @@ export async function deployL1Contracts( mnemonicIndex: number, salt: number | undefined, testAccounts: boolean, + acceleratedTestDeployments: boolean, json: boolean, initialValidators: EthAddress[], log: LogFn, @@ -34,6 +35,7 @@ export async function deployL1Contracts( initialValidators, genesisArchiveRoot, genesisBlockHash, + acceleratedTestDeployments, config, debugLogger, ); diff --git a/yarn-project/cli/src/cmds/l1/index.ts b/yarn-project/cli/src/cmds/l1/index.ts index 1b8cd88c41c..0e291874633 100644 --- a/yarn-project/cli/src/cmds/l1/index.ts +++ b/yarn-project/cli/src/cmds/l1/index.ts @@ -42,6 +42,7 @@ export function injectCommands(program: Command, log: LogFn, debugLogger: Logger .option('--salt ', 'The optional salt to use in deployment', arg => parseInt(arg)) .option('--json', 'Output the contract addresses in JSON format') .option('--test-accounts', 'Populate genesis state with initial fee juice for test accounts') + .option('--accelerated-test-deployments', 'Fire and forget deployment transactions, use in testing only', false) .action(async options => { const { deployL1Contracts } = await import('./deploy_l1_contracts.js'); @@ -55,6 +56,7 @@ export function injectCommands(program: Command, log: LogFn, debugLogger: Logger options.mnemonicIndex, options.salt, options.testAccounts, + options.acceleratedTestDeployments, options.json, initialValidators, log, diff --git a/yarn-project/cli/src/utils/aztec.ts b/yarn-project/cli/src/utils/aztec.ts index 620e7db7814..bae192a3a7c 100644 --- a/yarn-project/cli/src/utils/aztec.ts +++ b/yarn-project/cli/src/utils/aztec.ts @@ -44,6 +44,7 @@ export async function deployAztecContracts( initialValidators: EthAddress[], genesisArchiveRoot: Fr, genesisBlockHash: Fr, + acceleratedTestDeployments: boolean, config: L1ContractsConfig, debugLogger: Logger, ): Promise { @@ -70,6 +71,7 @@ export async function deployAztecContracts( genesisBlockHash, salt, initialValidators, + acceleratedTestDeployments, ...config, }, config, diff --git a/yarn-project/end-to-end/src/bench/utils.ts b/yarn-project/end-to-end/src/bench/utils.ts index c3ae647bd6a..76023777427 100644 --- a/yarn-project/end-to-end/src/bench/utils.ts +++ b/yarn-project/end-to-end/src/bench/utils.ts @@ -3,7 +3,7 @@ import { type AztecNode, BatchCall, INITIAL_L2_BLOCK_NUM, type SentTx, type Wait import { mean, stdDev, timesParallel } from '@aztec/foundation/collection'; import { randomInt } from '@aztec/foundation/crypto'; import { BenchmarkingContract } from '@aztec/noir-contracts.js/Benchmarking'; -import { type PXEService, type PXEServiceConfig, createPXEService } from '@aztec/pxe'; +import { type PXEService, type PXEServiceConfig, createPXEService } from '@aztec/pxe/server'; import type { MetricsType } from '@aztec/telemetry-client'; import type { BenchmarkDataPoint, BenchmarkMetricsType, BenchmarkTelemetryClient } from '@aztec/telemetry-client/bench'; diff --git a/yarn-project/end-to-end/src/composed/e2e_pxe.test.ts b/yarn-project/end-to-end/src/composed/e2e_pxe.test.ts index 89bcae3be1a..98db76e755a 100644 --- a/yarn-project/end-to-end/src/composed/e2e_pxe.test.ts +++ b/yarn-project/end-to-end/src/composed/e2e_pxe.test.ts @@ -1,5 +1,5 @@ import { waitForPXE } from '@aztec/aztec.js'; -import { pxeTestSuite } from '@aztec/pxe'; +import { pxeTestSuite } from '@aztec/pxe/testing'; import { setup } from '../fixtures/utils.js'; diff --git a/yarn-project/end-to-end/src/e2e_l1_with_wall_time.test.ts b/yarn-project/end-to-end/src/e2e_l1_with_wall_time.test.ts index d72c9cb9e71..e55dfe12075 100644 --- a/yarn-project/end-to-end/src/e2e_l1_with_wall_time.test.ts +++ b/yarn-project/end-to-end/src/e2e_l1_with_wall_time.test.ts @@ -1,7 +1,7 @@ import type { Logger, PXE, Wallet } from '@aztec/aztec.js'; import { getL1ContractsConfigEnvVars } from '@aztec/ethereum'; import { EthAddress } from '@aztec/foundation/eth-address'; -import type { PXEService } from '@aztec/pxe'; +import type { PXEService } from '@aztec/pxe/server'; import { jest } from '@jest/globals'; import { privateKeyToAccount } from 'viem/accounts'; diff --git a/yarn-project/end-to-end/src/e2e_p2p/shared.ts b/yarn-project/end-to-end/src/e2e_p2p/shared.ts index dd9b581f47e..6c625ac1ed1 100644 --- a/yarn-project/end-to-end/src/e2e_p2p/shared.ts +++ b/yarn-project/end-to-end/src/e2e_p2p/shared.ts @@ -3,7 +3,7 @@ import type { InitialAccountData } from '@aztec/accounts/testing'; import type { AztecNodeService } from '@aztec/aztec-node'; import { type Logger, type SentTx, TxStatus } from '@aztec/aztec.js'; import type { SpamContract } from '@aztec/noir-contracts.js/Spam'; -import { createPXEService, getPXEServiceConfig as getRpcConfig } from '@aztec/pxe'; +import { createPXEService, getPXEServiceConfig as getRpcConfig } from '@aztec/pxe/server'; import type { NodeContext } from '../fixtures/setup_p2p_test.js'; import { submitTxsTo } from '../shared/submit-transactions.js'; diff --git a/yarn-project/end-to-end/src/e2e_prover/e2e_prover_test.ts b/yarn-project/end-to-end/src/e2e_prover/e2e_prover_test.ts index a7811c7a9e2..2dd3dcaf6e5 100644 --- a/yarn-project/end-to-end/src/e2e_prover/e2e_prover_test.ts +++ b/yarn-project/end-to-end/src/e2e_prover/e2e_prover_test.ts @@ -24,7 +24,7 @@ import { Buffer32 } from '@aztec/foundation/buffer'; import { HonkVerifierAbi, HonkVerifierBytecode, RollupAbi, TestERC20Abi } from '@aztec/l1-artifacts'; import { TokenContract } from '@aztec/noir-contracts.js/Token'; import { type ProverNode, type ProverNodeConfig, createProverNode } from '@aztec/prover-node'; -import type { PXEService } from '@aztec/pxe'; +import type { PXEService } from '@aztec/pxe/server'; import { getGenesisValues } from '@aztec/world-state/testing'; import { type Hex, getContract } from 'viem'; diff --git a/yarn-project/end-to-end/src/e2e_sequencer/gov_proposal.test.ts b/yarn-project/end-to-end/src/e2e_sequencer/gov_proposal.test.ts index dd29b0d5fc7..5979422dfb4 100644 --- a/yarn-project/end-to-end/src/e2e_sequencer/gov_proposal.test.ts +++ b/yarn-project/end-to-end/src/e2e_sequencer/gov_proposal.test.ts @@ -9,7 +9,7 @@ import { import { EthAddress } from '@aztec/foundation/eth-address'; import { NewGovernanceProposerPayloadAbi } from '@aztec/l1-artifacts/NewGovernanceProposerPayloadAbi'; import { NewGovernanceProposerPayloadBytecode } from '@aztec/l1-artifacts/NewGovernanceProposerPayloadBytecode'; -import type { PXEService } from '@aztec/pxe'; +import type { PXEService } from '@aztec/pxe/server'; import { privateKeyToAccount } from 'viem/accounts'; diff --git a/yarn-project/end-to-end/src/e2e_synching.test.ts b/yarn-project/end-to-end/src/e2e_synching.test.ts index 3a6d962e1a3..89c4a858e58 100644 --- a/yarn-project/end-to-end/src/e2e_synching.test.ts +++ b/yarn-project/end-to-end/src/e2e_synching.test.ts @@ -61,7 +61,7 @@ import { RollupAbi } from '@aztec/l1-artifacts'; import { SchnorrHardcodedAccountContract } from '@aztec/noir-contracts.js/SchnorrHardcodedAccount'; import { SpamContract } from '@aztec/noir-contracts.js/Spam'; import { TokenContract } from '@aztec/noir-contracts.js/Token'; -import type { PXEService } from '@aztec/pxe'; +import type { PXEService } from '@aztec/pxe/server'; import { SequencerPublisher } from '@aztec/sequencer-client'; import type { AztecAddress } from '@aztec/stdlib/aztec-address'; import { L2Block } from '@aztec/stdlib/block'; diff --git a/yarn-project/end-to-end/src/fixtures/setup_p2p_test.ts b/yarn-project/end-to-end/src/fixtures/setup_p2p_test.ts index 84396c497a1..abe39cc0100 100644 --- a/yarn-project/end-to-end/src/fixtures/setup_p2p_test.ts +++ b/yarn-project/end-to-end/src/fixtures/setup_p2p_test.ts @@ -5,7 +5,7 @@ import { type AztecNodeConfig, AztecNodeService } from '@aztec/aztec-node'; import type { SentTx } from '@aztec/aztec.js'; import { addLogNameHandler, removeLogNameHandler } from '@aztec/foundation/log'; import type { DateProvider } from '@aztec/foundation/timer'; -import type { PXEService } from '@aztec/pxe'; +import type { PXEService } from '@aztec/pxe/server'; import type { PublicDataTreeLeaf } from '@aztec/stdlib/trees'; import getPort from 'get-port'; diff --git a/yarn-project/end-to-end/src/fixtures/snapshot_manager.ts b/yarn-project/end-to-end/src/fixtures/snapshot_manager.ts index 5e56093da06..d9ea3e2b42a 100644 --- a/yarn-project/end-to-end/src/fixtures/snapshot_manager.ts +++ b/yarn-project/end-to-end/src/fixtures/snapshot_manager.ts @@ -26,7 +26,7 @@ import { createLogger } from '@aztec/foundation/log'; import { resolver, reviver } from '@aztec/foundation/serialize'; import { TestDateProvider } from '@aztec/foundation/timer'; import type { ProverNode } from '@aztec/prover-node'; -import { type PXEService, createPXEService, getPXEServiceConfig } from '@aztec/pxe'; +import { type PXEService, createPXEService, getPXEServiceConfig } from '@aztec/pxe/server'; import { getConfigEnvVars as getTelemetryConfig, initTelemetryClient } from '@aztec/telemetry-client'; import { getGenesisValues } from '@aztec/world-state/testing'; diff --git a/yarn-project/end-to-end/src/fixtures/utils.ts b/yarn-project/end-to-end/src/fixtures/utils.ts index d52264bc8c0..18edbcce1aa 100644 --- a/yarn-project/end-to-end/src/fixtures/utils.ts +++ b/yarn-project/end-to-end/src/fixtures/utils.ts @@ -53,7 +53,7 @@ import { FeeJuiceContract } from '@aztec/noir-contracts.js/FeeJuice'; import { getVKTreeRoot } from '@aztec/noir-protocol-circuits-types/vk-tree'; import { ProtocolContractAddress, protocolContractTreeRoot } from '@aztec/protocol-contracts'; import { type ProverNode, type ProverNodeConfig, createProverNode } from '@aztec/prover-node'; -import { type PXEService, type PXEServiceConfig, createPXEService, getPXEServiceConfig } from '@aztec/pxe'; +import { type PXEService, type PXEServiceConfig, createPXEService, getPXEServiceConfig } from '@aztec/pxe/server'; import type { SequencerClient } from '@aztec/sequencer-client'; import type { TestSequencerClient } from '@aztec/sequencer-client/test'; import { getContractClassFromArtifact } from '@aztec/stdlib/contract'; diff --git a/yarn-project/end-to-end/src/shared/submit-transactions.ts b/yarn-project/end-to-end/src/shared/submit-transactions.ts index 02cacfe6f83..f00698a2fd5 100644 --- a/yarn-project/end-to-end/src/shared/submit-transactions.ts +++ b/yarn-project/end-to-end/src/shared/submit-transactions.ts @@ -1,7 +1,7 @@ import { getSchnorrAccount } from '@aztec/accounts/schnorr'; import { Fr, GrumpkinScalar, type Logger, type SentTx, TxStatus, type Wallet } from '@aztec/aztec.js'; import { times } from '@aztec/foundation/collection'; -import type { PXEService } from '@aztec/pxe'; +import type { PXEService } from '@aztec/pxe/server'; // submits a set of transactions to the provided Private eXecution Environment (PXE) export const submitTxsTo = async ( diff --git a/yarn-project/ethereum/src/deploy_l1_contracts.ts b/yarn-project/ethereum/src/deploy_l1_contracts.ts index 9c36418cfaa..dc37f112757 100644 --- a/yarn-project/ethereum/src/deploy_l1_contracts.ts +++ b/yarn-project/ethereum/src/deploy_l1_contracts.ts @@ -1,6 +1,6 @@ import { EthAddress } from '@aztec/foundation/eth-address'; import type { Fr } from '@aztec/foundation/fields'; -import type { Logger } from '@aztec/foundation/log'; +import { type Logger, createLogger } from '@aztec/foundation/log'; import { CoinIssuerAbi, CoinIssuerBytecode, @@ -43,6 +43,7 @@ import { createPublicClient, createWalletClient, encodeDeployData, + encodeFunctionData, fallback, getAddress, getContract, @@ -60,7 +61,13 @@ import type { L1ContractsConfig } from './config.js'; import { RegistryContract } from './contracts/registry.js'; import { RollupContract } from './contracts/rollup.js'; import type { L1ContractAddresses } from './l1_contract_addresses.js'; -import { L1TxUtils, type L1TxUtilsConfig, defaultL1TxUtilsConfig } from './l1_tx_utils.js'; +import { + type GasPrice, + type L1TxRequest, + L1TxUtils, + type L1TxUtilsConfig, + defaultL1TxUtilsConfig, +} from './l1_tx_utils.js'; import type { L1Clients, ViemPublicClient, ViemWalletClient } from './types.js'; export const DEPLOYER_ADDRESS: Hex = '0x4e59b44847b379578588920cA78FbF26c0B4956C'; @@ -204,6 +211,8 @@ export interface DeployL1ContractsArgs extends L1ContractsConfig { initialValidators?: EthAddress[]; /** Configuration for the L1 tx utils module. */ l1TxConfig?: Partial; + /** Enable fast mode for deployments (fire and forget transactions) */ + acceleratedTestDeployments?: boolean; } /** @@ -255,7 +264,14 @@ export const deployRollupAndPeriphery = async ( logger: Logger, txUtilsConfig: L1TxUtilsConfig, ) => { - const deployer = new L1Deployer(clients.walletClient, clients.publicClient, args.salt, logger, txUtilsConfig); + const deployer = new L1Deployer( + clients.walletClient, + clients.publicClient, + args.salt, + args.acceleratedTestDeployments, + logger, + txUtilsConfig, + ); const addresses = await RegistryContract.collectAddresses(clients.publicClient, registryAddress, 'canonical'); @@ -337,31 +353,48 @@ export const deployRollup = async ( if (args.initialValidators && args.initialValidators.length > 0) { // Check if some of the initial validators are already registered, so we support idempotent deployments - const validatorsInfo = await Promise.all( - args.initialValidators.map(async address => ({ address, ...(await rollup.read.getInfo([address.toString()])) })), - ); - const existingValidators = validatorsInfo.filter(v => v.status !== 0); - if (existingValidators.length > 0) { - logger.warn( - `Validators ${existingValidators.map(v => v.address).join(', ')} already exist. Skipping from initialization.`, + let newValidatorsAddresses = args.initialValidators.map(v => v.toString()); + if (!args.acceleratedTestDeployments) { + const validatorsInfo = await Promise.all( + args.initialValidators.map(async address => ({ + address, + ...(await rollup.read.getInfo([address.toString()])), + })), ); - } + const existingValidators = validatorsInfo.filter(v => v.status !== 0); + if (existingValidators.length > 0) { + logger.warn( + `Validators ${existingValidators + .map(v => v.address) + .join(', ')} already exist. Skipping from initialization.`, + ); + } - const newValidatorsAddresses = validatorsInfo.filter(v => v.status === 0).map(v => v.address.toString()); + newValidatorsAddresses = validatorsInfo.filter(v => v.status === 0).map(v => v.address.toString()); + } if (newValidatorsAddresses.length > 0) { - const stakingAsset = getContract({ - address: addresses.stakingAssetAddress.toString(), - abi: l1Artifacts.stakingAsset.contractAbi, - client: clients.walletClient, - }); // Mint tokens, approve them, use cheat code to initialise validator set without setting up the epoch. const stakeNeeded = args.minimumStake * BigInt(newValidatorsAddresses.length); await Promise.all( [ - await stakingAsset.write.mint([clients.walletClient.account.address, stakeNeeded], {} as any), - await stakingAsset.write.approve([rollupAddress.toString(), stakeNeeded], {} as any), - ].map(txHash => clients.publicClient.waitForTransactionReceipt({ hash: txHash })), + await deployer.sendTransaction({ + to: addresses.stakingAssetAddress.toString(), + data: encodeFunctionData({ + abi: l1Artifacts.stakingAsset.contractAbi, + functionName: 'mint', + args: [clients.walletClient.account.address, stakeNeeded], + }), + }), + await deployer.sendTransaction({ + to: addresses.stakingAssetAddress.toString(), + data: encodeFunctionData({ + abi: l1Artifacts.stakingAsset.contractAbi, + functionName: 'approve', + args: [rollupAddress.toString(), stakeNeeded], + }), + }), + ].map(tx => clients.publicClient.waitForTransactionReceipt({ hash: tx.txHash })), ); const validators = newValidatorsAddresses.map(v => ({ @@ -370,7 +403,13 @@ export const deployRollup = async ( withdrawer: v, amount: args.minimumStake, })); - const initiateValidatorSetTxHash = await rollup.write.cheat__InitialiseValidatorSet([validators]); + // const initiateValidatorSetTxHash = await rollup.write.cheat__InitialiseValidatorSet([validators]); + const initiateValidatorSetTxHash = await deployer.walletClient.writeContract({ + address: rollupAddress.toString(), + abi: l1Artifacts.rollup.contractAbi, + functionName: 'cheat__InitialiseValidatorSet', + args: [validators], + }); txHashes.push(initiateValidatorSetTxHash); logger.info(`Initialized validator set`, { validators, @@ -401,9 +440,11 @@ export const deployL1Contracts = async ( args: DeployL1ContractsArgs, txUtilsConfig: L1TxUtilsConfig = defaultL1TxUtilsConfig, ): Promise => { + const clients = createL1Clients(rpcUrls, account, chain); + const { walletClient, publicClient } = clients; + // We are assuming that you are running this on a local anvil node which have 1s block times // To align better with actual deployment, we update the block interval to 12s - const { walletClient, publicClient } = createL1Clients(rpcUrls, account, chain); const rpcCall = async (method: string, params: any[]) => { logger.info(`Calling ${method} with params: ${JSON.stringify(params)}`); @@ -425,7 +466,14 @@ export const deployL1Contracts = async ( logger.verbose(`Deploying contracts from ${account.address.toString()}`); // Governance stuff - const deployer = new L1Deployer(walletClient, publicClient, args.salt, logger, txUtilsConfig); + const deployer = new L1Deployer( + walletClient, + publicClient, + args.salt, + args.acceleratedTestDeployments, + logger, + txUtilsConfig, + ); const registryAddress = await deployer.deploy(l1Artifacts.registry, [account.address.toString()]); logger.verbose(`Deployed Registry at ${registryAddress}`); @@ -494,14 +542,28 @@ export const deployL1Contracts = async ( // Transaction hashes to await const txHashes: Hex[] = []; - if (!(await feeAsset.read.freeForAll())) { - const txHash = await feeAsset.write.setFreeForAll([true], {} as any); + if (args.acceleratedTestDeployments || !(await feeAsset.read.freeForAll())) { + const { txHash } = await deployer.sendTransaction({ + to: feeAssetAddress.toString(), + data: encodeFunctionData({ + abi: l1Artifacts.feeAsset.contractAbi, + functionName: 'setFreeForAll', + args: [true], + }), + }); logger.verbose(`Fee asset set to free for all in ${txHash}`); txHashes.push(txHash); } - if ((await feeAsset.read.owner()) !== getAddress(coinIssuerAddress.toString())) { - const txHash = await feeAsset.write.transferOwnership([coinIssuerAddress.toString()], { account }); + if (args.acceleratedTestDeployments || (await feeAsset.read.owner()) !== getAddress(coinIssuerAddress.toString())) { + const { txHash } = await deployer.sendTransaction({ + to: feeAssetAddress.toString(), + data: encodeFunctionData({ + abi: l1Artifacts.feeAsset.contractAbi, + functionName: 'transferOwnership', + args: [coinIssuerAddress.toString()], + }), + }); logger.verbose(`Fee asset transferred ownership to coin issuer in ${txHash}`); txHashes.push(txHash); } @@ -511,21 +573,47 @@ export const deployL1Contracts = async ( // @todo #8084 // fund the portal contract with Fee Juice const FEE_JUICE_INITIAL_MINT = 200000000000000000000000n; - const mintTxHash = await feeAsset.write.mint([feeJuicePortalAddress.toString(), FEE_JUICE_INITIAL_MINT], {} as any); + + // In fast mode, use the L1TxUtils to send transactions with nonce management + const { txHash: mintTxHash } = await deployer.sendTransaction({ + to: feeAssetAddress.toString(), + data: encodeFunctionData({ + abi: l1Artifacts.feeAsset.contractAbi, + functionName: 'mint', + args: [feeJuicePortalAddress.toString(), FEE_JUICE_INITIAL_MINT], + }), + }); + logger.verbose(`Funding fee juice portal contract with fee juice in ${mintTxHash} (accelerated test deployments)`); + txHashes.push(mintTxHash); // @note This is used to ensure we fully wait for the transaction when running against a real chain // otherwise we execute subsequent transactions too soon - await publicClient.waitForTransactionReceipt({ hash: mintTxHash }); - logger.verbose(`Funding fee juice portal contract with fee juice in ${mintTxHash}`); + if (!args.acceleratedTestDeployments) { + await publicClient.waitForTransactionReceipt({ hash: mintTxHash }); + logger.verbose(`Funding fee juice portal contract with fee juice in ${mintTxHash}`); + } + + // Check if portal needs initialization + let needsInitialization = args.acceleratedTestDeployments; + if (!args.acceleratedTestDeployments) { + // Only check if not in fast mode and not already known to need initialization + needsInitialization = !(await feeJuicePortal.read.initialized()); + } + if (needsInitialization) { + const { txHash: initPortalTxHash } = await deployer.sendTransaction({ + to: feeJuicePortalAddress.toString(), + data: encodeFunctionData({ + abi: l1Artifacts.feeJuicePortal.contractAbi, + functionName: 'initialize', + args: [], + }), + }); - if (!(await feeJuicePortal.read.initialized())) { - const initPortalTxHash = await feeJuicePortal.write.initialize(); txHashes.push(initPortalTxHash); logger.verbose(`Fee juice portal initializing in tx ${initPortalTxHash}`); } else { logger.verbose(`Fee juice portal is already initialized`); } - logger.verbose( `Initialized Fee Juice Portal at ${feeJuicePortalAddress} to bridge between L1 ${feeAssetAddress} to L2 ${args.l2FeeJuiceAddress}`, ); @@ -553,8 +641,18 @@ export const deployL1Contracts = async ( client: walletClient, }); - if (!(await registryContract.read.isRollupRegistered([getAddress(rollup.address.toString())]))) { - const upgradeTxHash = await registryContract.write.upgrade([getAddress(rollup.address.toString())], { account }); + if ( + args.acceleratedTestDeployments || + !(await registryContract.read.isRollupRegistered([getAddress(rollup.address.toString())])) + ) { + const { txHash: upgradeTxHash } = await deployer.sendTransaction({ + to: registryAddress.toString(), + data: encodeFunctionData({ + abi: l1Artifacts.registry.contractAbi, + functionName: 'upgrade', + args: [getAddress(rollup.address.toString())], + }), + }); logger.verbose( `Upgrading registry contract at ${registryAddress} to rollup ${rollup.address} in tx ${upgradeTxHash}`, ); @@ -564,13 +662,19 @@ export const deployL1Contracts = async ( } // If the owner is not the Governance contract, transfer ownership to the Governance contract - if ((await registryContract.read.owner()) !== getAddress(governanceAddress.toString())) { - const transferOwnershipTxHash = await registryContract.write.transferOwnership( - [getAddress(governanceAddress.toString())], - { - account, - }, - ); + if ( + args.acceleratedTestDeployments || + (await registryContract.read.owner()) !== getAddress(governanceAddress.toString()) + ) { + // TODO(md): add send transaction to the deployer such that we do not need to manage tx hashes here + const { txHash: transferOwnershipTxHash } = await deployer.sendTransaction({ + to: registryAddress.toString(), + data: encodeFunctionData({ + abi: l1Artifacts.registry.contractAbi, + functionName: 'transferOwnership', + args: [getAddress(governanceAddress.toString())], + }), + }); logger.verbose( `Transferring the ownership of the registry contract at ${registryAddress} to the Governance ${governanceAddress} in tx ${transferOwnershipTxHash}`, ); @@ -620,16 +724,24 @@ export const deployL1Contracts = async ( class L1Deployer { private salt: Hex | undefined; private txHashes: Hex[] = []; - private l1TxUtils: L1TxUtils; + public readonly l1TxUtils: L1TxUtils; + constructor( - private walletClient: ViemWalletClient, + public readonly walletClient: ViemWalletClient, private publicClient: ViemPublicClient, maybeSalt: number | undefined, - private logger: Logger, + private acceleratedTestDeployments: boolean = false, + private logger: Logger = createLogger('L1Deployer'), private txUtilsConfig?: L1TxUtilsConfig, ) { this.salt = maybeSalt ? padHex(numberToHex(maybeSalt), { size: 32 }) : undefined; - this.l1TxUtils = new L1TxUtils(this.publicClient, this.walletClient, this.logger, this.txUtilsConfig); + this.l1TxUtils = new L1TxUtils( + this.publicClient, + this.walletClient, + this.logger, + this.txUtilsConfig, + this.acceleratedTestDeployments, + ); } async deploy(params: ContractArtifacts, args: readonly unknown[] = []): Promise { @@ -643,6 +755,7 @@ class L1Deployer { params.libraries, this.logger, this.l1TxUtils, + this.acceleratedTestDeployments, ); if (txHash) { this.txHashes.push(txHash); @@ -651,7 +764,21 @@ class L1Deployer { } async waitForDeployments(): Promise { + if (this.acceleratedTestDeployments) { + this.logger.info('Accelerated test deployments - skipping waiting for deployments'); + return; + } + if (this.txHashes.length === 0) { + return; + } + + this.logger.info(`Waiting for ${this.txHashes.length} transactions to be mined...`); await Promise.all(this.txHashes.map(txHash => this.publicClient.waitForTransactionReceipt({ hash: txHash }))); + this.logger.info('All transactions mined successfully'); + } + + sendTransaction(tx: L1TxRequest): Promise<{ txHash: Hex; gasLimit: bigint; gasPrice: GasPrice }> { + return this.l1TxUtils.sendTransaction(tx); } } @@ -675,14 +802,14 @@ export async function deployL1Contract( maybeSalt?: Hex, libraries?: Libraries, logger?: Logger, - _l1TxUtils?: L1TxUtils, + l1TxUtils?: L1TxUtils, + acceleratedTestDeployments: boolean = false, ): Promise<{ address: EthAddress; txHash: Hex | undefined }> { let txHash: Hex | undefined = undefined; let resultingAddress: Hex | null | undefined = undefined; - let l1TxUtils: L1TxUtils | undefined = _l1TxUtils; if (!l1TxUtils) { - l1TxUtils = new L1TxUtils(publicClient, walletClient, logger); + l1TxUtils = new L1TxUtils(publicClient, walletClient, logger, undefined, acceleratedTestDeployments); } if (libraries) { @@ -712,6 +839,7 @@ export async function deployL1Contract( undefined, logger, l1TxUtils, + acceleratedTestDeployments, ); if (txHash) { @@ -746,9 +874,16 @@ export async function deployL1Contract( // Reth fails gas estimation if the deployed contract attempts to call a library that is not yet deployed, // so we wait for all library deployments to be mined before deploying the contract. - if (libraryTxs.length > 0) { + // However, if we are in fast mode or using debugMaxGasLimit, we will skip simulation, so we can skip waiting. + if (libraryTxs.length > 0 && !acceleratedTestDeployments) { logger?.verbose(`Awaiting for linked libraries to be deployed`); await Promise.all(libraryTxs.map(txHash => publicClient.waitForTransactionReceipt({ hash: txHash }))); + } else { + logger?.verbose( + `Skipping waiting for linked libraries to be deployed ${ + acceleratedTestDeployments ? '(accelerated test deployments)' : '' + }`, + ); } } diff --git a/yarn-project/ethereum/src/l1_tx_utils.ts b/yarn-project/ethereum/src/l1_tx_utils.ts index a050ef34ade..1b75f3f28a5 100644 --- a/yarn-project/ethereum/src/l1_tx_utils.ts +++ b/yarn-project/ethereum/src/l1_tx_utils.ts @@ -5,7 +5,7 @@ import { getDefaultConfig, numberConfigHelper, } from '@aztec/foundation/config'; -import type { Logger } from '@aztec/foundation/log'; +import { type Logger, createLogger } from '@aztec/foundation/log'; import { makeBackoff, retry } from '@aztec/foundation/retry'; import { sleep } from '@aztec/foundation/sleep'; @@ -190,14 +190,15 @@ export type TransactionStats = { }; export class L1TxUtils { - protected readonly config: L1TxUtilsConfig; + public readonly config: L1TxUtilsConfig; private interrupted = false; constructor( public publicClient: ViemPublicClient, public walletClient: ViemWalletClient, - protected readonly logger?: Logger, + protected logger: Logger = createLogger('L1TxUtils'), config?: Partial, + private debugMaxGasLimit: boolean = false, ) { this.config = { ...defaultL1TxUtilsConfig, @@ -248,7 +249,9 @@ export class L1TxUtils { const account = this.walletClient.account; let gasLimit: bigint; - if (gasConfig.gasLimit) { + if (this.debugMaxGasLimit) { + gasLimit = LARGE_GAS_LIMIT; + } else if (gasConfig.gasLimit) { gasLimit = gasConfig.gasLimit; } else { gasLimit = await this.estimateGas(account, request); @@ -288,7 +291,9 @@ export class L1TxUtils { return { txHash, gasLimit, gasPrice }; } catch (err: any) { const viemError = formatViemError(err); - this.logger?.error(`Failed to send L1 transaction`, viemError.message, { metaMessages: viemError.metaMessages }); + this.logger?.error(`Failed to send L1 transaction`, viemError.message, { + metaMessages: viemError.metaMessages, + }); throw viemError; } } diff --git a/yarn-project/p2p/src/services/libp2p/libp2p_service.ts b/yarn-project/p2p/src/services/libp2p/libp2p_service.ts index 1af6e6670f0..fedd1a40b1d 100644 --- a/yarn-project/p2p/src/services/libp2p/libp2p_service.ts +++ b/yarn-project/p2p/src/services/libp2p/libp2p_service.ts @@ -137,7 +137,7 @@ export class LibP2PService extends WithTracer implement this.blockProposalValidator = new BlockProposalValidator(epochCache); this.blockReceivedCallback = async (block: BlockProposal): Promise => { - this.logger.warn( + this.logger.debug( `Handler not yet registered: Block received callback not set. Received block for slot ${block.slotNumber.toNumber()} from peer.`, { p2pMessageIdentifier: await block.p2pMessageIdentifier() }, ); diff --git a/yarn-project/pxe/package.json b/yarn-project/pxe/package.json index f1c4afef806..04a5083b2b9 100644 --- a/yarn-project/pxe/package.json +++ b/yarn-project/pxe/package.json @@ -3,20 +3,13 @@ "version": "0.1.0", "type": "module", "exports": { - ".": "./dest/index.js", - "./service": "./dest/pxe_service/index.js", + "./server": "./dest/entrypoints/server/index.js", + "./client/lazy": "./dest/entrypoints/client/lazy/index.js", + "./client/bundle": "./dest/entrypoints/client/bundle/index.js", "./config": "./dest/config/index.js", - "./database": "./dest/database/index.js", - "./kernel_prover": "./dest/kernel_prover/index.js" + "./testing": "./dest/test/pxe_test_suite.js" }, "bin": "./dest/bin/index.js", - "typedocOptions": { - "entryPoints": [ - "./src/index.ts" - ], - "name": "Wallet", - "tsconfig": "./tsconfig.json" - }, "scripts": { "build": "yarn clean && yarn generate && tsc -b", "build:dev": "tsc -b --watch", diff --git a/yarn-project/pxe/src/bin/index.ts b/yarn-project/pxe/src/bin/index.ts index 0a60ce14cea..9b6224b8e25 100644 --- a/yarn-project/pxe/src/bin/index.ts +++ b/yarn-project/pxe/src/bin/index.ts @@ -3,8 +3,8 @@ import { createLogger } from '@aztec/foundation/log'; import { createAztecNodeClient } from '@aztec/stdlib/interfaces/client'; import { getPXEServiceConfig } from '../config/index.js'; +import { createPXEService } from '../entrypoints/server/utils.js'; import { startPXEHttpServer } from '../pxe_http/index.js'; -import { createPXEService } from '../utils/create_pxe_service.js'; const { PXE_PORT = 8080, AZTEC_NODE_URL = 'http://localhost:8079' } = process.env; diff --git a/yarn-project/pxe/src/config/index.ts b/yarn-project/pxe/src/config/index.ts index 62b788c831f..abef74bee99 100644 --- a/yarn-project/pxe/src/config/index.ts +++ b/yarn-project/pxe/src/config/index.ts @@ -10,6 +10,8 @@ import { type DataStoreConfig, dataConfigMappings } from '@aztec/kv-store/config import { type ChainConfig, chainConfigMappings } from '@aztec/stdlib/config'; import type { Network } from '@aztec/stdlib/network'; +export { getPackageInfo } from './package_info.js'; + /** * Temporary configuration until WASM can be used instead of native */ diff --git a/yarn-project/pxe/src/database/index.ts b/yarn-project/pxe/src/database/index.ts deleted file mode 100644 index 933d0356eed..00000000000 --- a/yarn-project/pxe/src/database/index.ts +++ /dev/null @@ -1,2 +0,0 @@ -export * from './kv_pxe_database.js'; -export * from './interfaces/index.js'; diff --git a/yarn-project/pxe/src/database/interfaces/contract_artifact_db.ts b/yarn-project/pxe/src/database/interfaces/contract_artifact_db.ts deleted file mode 100644 index 9cf9c217dec..00000000000 --- a/yarn-project/pxe/src/database/interfaces/contract_artifact_db.ts +++ /dev/null @@ -1,20 +0,0 @@ -import type { Fr } from '@aztec/foundation/fields'; -import type { ContractArtifact } from '@aztec/stdlib/abi'; - -/** - * PXE database for managing contract artifacts. - */ -export interface ContractArtifactDatabase { - /** - * Adds a new contract artifact to the database or updates an existing one. - * @param id - Id of the corresponding contract class. - * @param contract - Contract artifact to add. - * @throws - If there are duplicate private function selectors. - */ - addContractArtifact(id: Fr, contract: ContractArtifact): Promise; - /** - * Gets a contract artifact given its resulting contract class id. - * @param id - Contract class id for the given artifact. - */ - getContractArtifact(id: Fr): Promise; -} diff --git a/yarn-project/pxe/src/database/interfaces/contract_instance_db.ts b/yarn-project/pxe/src/database/interfaces/contract_instance_db.ts deleted file mode 100644 index 6bf084a375b..00000000000 --- a/yarn-project/pxe/src/database/interfaces/contract_instance_db.ts +++ /dev/null @@ -1,21 +0,0 @@ -import type { AztecAddress } from '@aztec/stdlib/aztec-address'; -import type { ContractInstanceWithAddress } from '@aztec/stdlib/contract'; - -/** - * PXE database for managing contract instances. - */ -export interface ContractInstanceDatabase { - /** - * Adds a new contract to the db or updates an existing one. - * @param contract - Contract to insert. - */ - addContractInstance(contract: ContractInstanceWithAddress): Promise; - /** - * Gets a contract given its address. - * @param address - Address of the contract. - */ - getContractInstance(address: AztecAddress): Promise; - - /** Returns the addresses all contract instances registered in the DB. */ - getContractsAddresses(): Promise; -} diff --git a/yarn-project/pxe/src/database/interfaces/index.ts b/yarn-project/pxe/src/database/interfaces/index.ts deleted file mode 100644 index 1a772fe8750..00000000000 --- a/yarn-project/pxe/src/database/interfaces/index.ts +++ /dev/null @@ -1,3 +0,0 @@ -export type { ContractArtifactDatabase } from './contract_artifact_db.js'; -export type { ContractInstanceDatabase } from './contract_instance_db.js'; -export type { PxeDatabase } from './pxe_database.js'; diff --git a/yarn-project/pxe/src/database/interfaces/pxe_database.ts b/yarn-project/pxe/src/database/interfaces/pxe_database.ts deleted file mode 100644 index ccb000397b7..00000000000 --- a/yarn-project/pxe/src/database/interfaces/pxe_database.ts +++ /dev/null @@ -1,240 +0,0 @@ -import type { Fr } from '@aztec/foundation/fields'; -import type { ContractArtifact } from '@aztec/stdlib/abi'; -import type { AztecAddress } from '@aztec/stdlib/aztec-address'; -import type { InBlock } from '@aztec/stdlib/block'; -import type { CompleteAddress, ContractInstanceWithAddress } from '@aztec/stdlib/contract'; -import type { PublicKey } from '@aztec/stdlib/keys'; -import type { IndexedTaggingSecret } from '@aztec/stdlib/logs'; -import type { NotesFilter } from '@aztec/stdlib/note'; -import type { BlockHeader } from '@aztec/stdlib/tx'; - -import type { NoteDao } from '../note_dao.js'; -import type { ContractArtifactDatabase } from './contract_artifact_db.js'; -import type { ContractInstanceDatabase } from './contract_instance_db.js'; - -/** - * A database interface that provides methods for retrieving, adding, and removing transactional data related to Aztec - * addresses, storage slots, and nullifiers. - */ -export interface PxeDatabase extends ContractArtifactDatabase, ContractInstanceDatabase { - getContract(address: AztecAddress): Promise<(ContractInstanceWithAddress & ContractArtifact) | undefined>; - - /** - * Add a auth witness to the database. - * @param messageHash - The message hash. - * @param witness - An array of field elements representing the auth witness. - */ - addAuthWitness(messageHash: Fr, witness: Fr[]): Promise; - - /** - * Fetching the auth witness for a given message hash. - * @param messageHash - The message hash. - * @returns A Promise that resolves to an array of field elements representing the auth witness. - */ - getAuthWitness(messageHash: Fr): Promise; - - /** - * Gets notes based on the provided filter. - * @param filter - The filter to apply to the notes. - * @returns The requested notes. - */ - getNotes(filter: NotesFilter): Promise; - - /** - * Adds a note to DB. - * @param note - The note to add. - * @param scope - The scope to add the note under. Currently optional. - * @remark - Will create a database for the scope if it does not already exist. - */ - addNote(note: NoteDao, scope?: AztecAddress): Promise; - - /** - * Adds a nullified note to DB. - * @param note - The note to add. - */ - addNullifiedNote(note: NoteDao): Promise; - - /** - * Adds an array of notes to DB. - * This function is used to insert multiple notes to the database at once, - * which can improve performance when dealing with large numbers of transactions. - * - * @param notes - An array of notes. - * @param scope - The scope to add the notes under. Currently optional. - * @remark - Will create a database for the scope if it does not already exist. - */ - addNotes(notes: NoteDao[], scope?: AztecAddress): Promise; - - /** - * Remove nullified notes associated with the given account and nullifiers. - * - * @param nullifiers - An array of Fr instances representing nullifiers to be matched. - * @param account - A PublicKey instance representing the account for which the records are being removed. - * @returns Removed notes. - */ - removeNullifiedNotes(nullifiers: InBlock[], account: PublicKey): Promise; - - /** - * Gets the most recently processed block number. - * @returns The most recently processed block number or undefined if never synched. - */ - getBlockNumber(): Promise; - - /** - * Retrieve the stored Block Header from the database. - * The function returns a Promise that resolves to the Block Header. - * This data is required to reproduce block attestations. - * Throws an error if the block header is not available within the database. - * - * note: this data is a combination of the tree roots and the global variables hash. - * - * @returns The Block Header. - * @throws If no block have been processed yet. - */ - getBlockHeader(): Promise; - - /** - * Set the latest Block Header. - * Note that this will overwrite any existing hash or roots in the database. - * - * @param header - An object containing the most recent block header. - * @returns A Promise that resolves when the hash has been successfully updated in the database. - */ - setHeader(header: BlockHeader): Promise; - - /** - * Adds sender address to the database. - * @param address - The address to add to the address book. - * @returns A promise resolving to true if the address was added, false if it already exists. - */ - addSenderAddress(address: AztecAddress): Promise; - - /** - * Retrieves the list of sender addresses in the address book. - * @returns An array of Aztec addresses. - */ - getSenderAddresses(): Promise; - - /** - * Removes a sender address from the database. - * @param address - The address to remove from the address book. - * @returns A promise resolving to true if the address was removed, false if it does not exist. - */ - removeSenderAddress(address: AztecAddress): Promise; - - /** - * Adds complete address to the database. - * @param address - The complete address to add. - * @returns A promise resolving to true if the address was added, false if it already exists. - * @throws If we try to add a CompleteAddress with the same AztecAddress but different public key or partial - * address. - */ - addCompleteAddress(address: CompleteAddress): Promise; - - /** - * Retrieve the complete address associated to a given address. - * @param account - The account address. - * @returns A promise that resolves to a CompleteAddress instance if found, or undefined if not found. - */ - getCompleteAddress(account: AztecAddress): Promise; - - /** - * Retrieves the list of complete addresses added to this database - * @returns A promise that resolves to an array of AztecAddress instances. - */ - getCompleteAddresses(): Promise; - - /** - * Returns the estimated size in bytes of this db. - * @returns The estimated size in bytes of this db. - */ - estimateSize(): Promise; - - /** - * Returns the last seen indexes for the provided app siloed tagging secrets or 0 if they've never been seen. - * @param appTaggingSecrets - The app siloed tagging secrets. - * @returns The indexes for the provided secrets, 0 if they've never been seen. - */ - getTaggingSecretsIndexesAsRecipient(appTaggingSecrets: Fr[]): Promise; - - /** - * Returns the last seen indexes for the provided app siloed tagging secrets or 0 if they've never been used - * @param appTaggingSecrets - The app siloed tagging secrets. - * @returns The indexes for the provided secrets, 0 if they've never been seen. - */ - getTaggingSecretsIndexesAsSender(appTaggingSecrets: Fr[]): Promise; - - /** - * Sets the index for the provided app siloed tagging secrets - * To be used when the generated tags have been "seen" as a sender - * @param appTaggingSecrets - The app siloed tagging secrets. - */ - setTaggingSecretsIndexesAsSender(indexedTaggingSecrets: IndexedTaggingSecret[]): Promise; - - /** - * Sets the index for the provided app siloed tagging secrets - * To be used when the generated tags have been "seen" as a recipient - * @param appTaggingSecrets - The app siloed tagging secrets. - */ - setTaggingSecretsIndexesAsRecipient(indexedTaggingSecrets: IndexedTaggingSecret[]): Promise; - - /** - * Deletes all notes synched after this block number. - * @param blockNumber - All notes strictly after this block number are removed. - */ - removeNotesAfter(blockNumber: number): Promise; - - /** - * Restores notes nullified after the given block. - * @param blockNumber - All nullifiers strictly after this block are removed. - */ - unnullifyNotesAfter(blockNumber: number): Promise; - - /** - * Resets the indexes used to sync notes to 0 for every sender and recipient, causing the next sync process to - * start from scratch, taking longer than usual. - * This can help fix desynchronization issues, including finding logs that had previously been overlooked, and - * is also required to deal with chain reorgs. - */ - resetNoteSyncData(): Promise; - - /** - * Stores arbitrary information in a per-contract non-volatile database (called capsules), which can later - * be retrieved with `loadCapsule`. If data was already stored at this slot, it is overwritten. - * @param contractAddress - The contract address to scope the data under. - * @param slot - The slot in the database in which to store the value. Slots need not be contiguous. - * @param capsule - An array of field elements representing the capsule. - * @remarks A capsule is a "blob" of data that is passed to the contract through an oracle. It works similarly - * to public contract storage in that it's indexed by the contract address and storage slot but instead of the global - * network state it's backed by local PXE db. - */ - storeCapsule(contractAddress: AztecAddress, slot: Fr, capsule: Fr[]): Promise; - - /** - * Returns data previously stored via `storeCapsule` in the per-contract non-volatile database (called capsules). - * @param contractAddress - The contract address under which the data is scoped. - * @param slot - The slot in the database to read. - * @returns The stored data or `null` if no data is stored under the slot. - */ - loadCapsule(contractAddress: AztecAddress, slot: Fr): Promise; - - /** - * Deletes data in the per-contract non-volatile database (called capsules). Does nothing if no data was present. - * @param contractAddress - The contract address under which the data is scoped. - * @param slot - The slot in the database to delete. - */ - deleteCapsule(contractAddress: AztecAddress, slot: Fr): Promise; - - /** - * Copies a number of contiguous entries in the per-contract non-volatile database (called capsules). This allows for - * efficient data structures by avoiding repeated calls to `loadCapsule` and `storeCapsule`. - * Supports overlapping source and destination regions (which will result in the overlapped source values being - * overwritten). All copied slots must exist in the database (i.e. have been stored and not deleted) - * - * @param contractAddress - The contract address under which the data is scoped. - * @param srcSlot - The first slot to copy from. - * @param dstSlot - The first slot to copy to. - * @param numEntries - The number of entries to copy. - */ - copyCapsule(contractAddress: AztecAddress, srcSlot: Fr, dstSlot: Fr, numEntries: number): Promise; -} diff --git a/yarn-project/pxe/src/database/interfaces/pxe_database_test_suite.ts b/yarn-project/pxe/src/database/interfaces/pxe_database_test_suite.ts deleted file mode 100644 index f6ae1154761..00000000000 --- a/yarn-project/pxe/src/database/interfaces/pxe_database_test_suite.ts +++ /dev/null @@ -1,558 +0,0 @@ -import { INITIAL_L2_BLOCK_NUM } from '@aztec/constants'; -import { timesParallel } from '@aztec/foundation/collection'; -import { randomInt } from '@aztec/foundation/crypto'; -import { Fr, Point } from '@aztec/foundation/fields'; -import { BenchmarkingContractArtifact } from '@aztec/noir-contracts.js/Benchmarking'; -import { TestContractArtifact } from '@aztec/noir-contracts.js/Test'; -import { FunctionType } from '@aztec/stdlib/abi'; -import { AztecAddress } from '@aztec/stdlib/aztec-address'; -import { CompleteAddress, SerializableContractInstance } from '@aztec/stdlib/contract'; -import { PublicKeys } from '@aztec/stdlib/keys'; -import { NoteStatus, type NotesFilter } from '@aztec/stdlib/note'; -import { makeHeader, randomTxHash } from '@aztec/stdlib/testing'; - -import times from 'lodash.times'; - -import { NoteDao } from '../note_dao.js'; -import type { PxeDatabase } from './pxe_database.js'; - -/** - * A common test suite for a PXE database. - * @param getDatabase - A function that returns a database instance. - */ -export function describePxeDatabase(getDatabase: () => PxeDatabase) { - let database: PxeDatabase; - - beforeEach(() => { - database = getDatabase(); - }); - - describe('Database', () => { - describe('auth witnesses', () => { - it('stores and retrieves auth witnesses', async () => { - const messageHash = Fr.random(); - const witness = [Fr.random(), Fr.random()]; - - await database.addAuthWitness(messageHash, witness); - await expect(database.getAuthWitness(messageHash)).resolves.toEqual(witness); - }); - - it("returns undefined if it doesn't have auth witnesses for the message", async () => { - const messageHash = Fr.random(); - await expect(database.getAuthWitness(messageHash)).resolves.toBeUndefined(); - }); - - it.skip('refuses to overwrite auth witnesses for the same message', async () => { - const messageHash = Fr.random(); - const witness = [Fr.random(), Fr.random()]; - - await database.addAuthWitness(messageHash, witness); - await expect(database.addAuthWitness(messageHash, witness)).rejects.toThrow(); - }); - }); - - describe('incoming notes', () => { - let owners: CompleteAddress[]; - let contractAddresses: AztecAddress[]; - let storageSlots: Fr[]; - let notes: NoteDao[]; - - const filteringTests: [() => Promise, () => Promise][] = [ - [() => Promise.resolve({}), () => Promise.resolve(notes)], - - [ - () => Promise.resolve({ contractAddress: contractAddresses[0] }), - () => Promise.resolve(notes.filter(note => note.contractAddress.equals(contractAddresses[0]))), - ], - [async () => ({ contractAddress: await AztecAddress.random() }), () => Promise.resolve([])], - - [ - () => Promise.resolve({ storageSlot: storageSlots[0] }), - () => Promise.resolve(notes.filter(note => note.storageSlot.equals(storageSlots[0]))), - ], - [() => Promise.resolve({ storageSlot: Fr.random() }), () => Promise.resolve([])], - - [() => Promise.resolve({ txHash: notes[0].txHash }), () => Promise.resolve([notes[0]])], - [() => Promise.resolve({ txHash: randomTxHash() }), () => Promise.resolve([])], - - [ - () => Promise.resolve({ owner: owners[0].address }), - async () => { - const ownerAddressPoint = await owners[0].address.toAddressPoint(); - return notes.filter(note => note.addressPoint.equals(ownerAddressPoint)); - }, - ], - - [ - () => Promise.resolve({ contractAddress: contractAddresses[0], storageSlot: storageSlots[0] }), - () => - Promise.resolve( - notes.filter( - note => note.contractAddress.equals(contractAddresses[0]) && note.storageSlot.equals(storageSlots[0]), - ), - ), - ], - [ - () => Promise.resolve({ contractAddress: contractAddresses[0], storageSlot: storageSlots[1] }), - () => Promise.resolve([]), - ], - ]; - - beforeEach(async () => { - owners = await timesParallel(2, () => CompleteAddress.random()); - contractAddresses = await timesParallel(2, () => AztecAddress.random()); - storageSlots = times(2, () => Fr.random()); - - notes = await timesParallel(10, async i => { - const addressPoint = await owners[i % owners.length].address.toAddressPoint(); - return NoteDao.random({ - contractAddress: contractAddresses[i % contractAddresses.length], - storageSlot: storageSlots[i % storageSlots.length], - addressPoint, - index: BigInt(i), - l2BlockNumber: i, - }); - }); - - for (const owner of owners) { - await database.addCompleteAddress(owner); - } - }); - - it.each(filteringTests)('stores notes in bulk and retrieves notes', async (getFilter, getExpected) => { - await database.addNotes(notes); - const returnedNotes = await database.getNotes(await getFilter()); - const expected = await getExpected(); - expect(returnedNotes.sort()).toEqual(expected.sort()); - }); - - it.each(filteringTests)('stores notes one by one and retrieves notes', async (getFilter, getExpected) => { - for (const note of notes) { - await database.addNote(note); - } - - const returnedNotes = await database.getNotes(await getFilter()); - - const expected = await getExpected(); - expect(returnedNotes.sort()).toEqual(expected.sort()); - }); - - it.each(filteringTests)('retrieves nullified notes', async (getFilter, getExpected) => { - await database.addNotes(notes); - - // Nullify all notes and use the same filter as other test cases - for (const owner of owners) { - const ownerAddressPoint = await owner.address.toAddressPoint(); - const notesToNullify = notes.filter(note => note.addressPoint.equals(ownerAddressPoint)); - const nullifiers = notesToNullify.map(note => ({ - data: note.siloedNullifier, - l2BlockNumber: note.l2BlockNumber, - l2BlockHash: note.l2BlockHash, - })); - await expect(database.removeNullifiedNotes(nullifiers, ownerAddressPoint)).resolves.toEqual(notesToNullify); - } - const filter = await getFilter(); - const returnedNotes = await database.getNotes({ ...filter, status: NoteStatus.ACTIVE_OR_NULLIFIED }); - const expected = await getExpected(); - expect(returnedNotes.sort()).toEqual(expected.sort()); - }); - - it('skips nullified notes by default or when requesting active', async () => { - await database.addNotes(notes); - const ownerAddressPoint = await owners[0].address.toAddressPoint(); - const notesToNullify = notes.filter(note => note.addressPoint.equals(ownerAddressPoint)); - const nullifiers = notesToNullify.map(note => ({ - data: note.siloedNullifier, - l2BlockNumber: note.l2BlockNumber, - l2BlockHash: note.l2BlockHash, - })); - await expect(database.removeNullifiedNotes(nullifiers, notesToNullify[0].addressPoint)).resolves.toEqual( - notesToNullify, - ); - - const actualNotesWithDefault = await database.getNotes({}); - const actualNotesWithActive = await database.getNotes({ status: NoteStatus.ACTIVE }); - - expect(actualNotesWithDefault).toEqual(actualNotesWithActive); - expect(actualNotesWithActive).toEqual(notes.filter(note => !notesToNullify.includes(note))); - }); - - it('handles note unnullification', async () => { - await database.setHeader(makeHeader(randomInt(1000), 100, 0 /** slot number */)); - await database.addNotes(notes); - const ownerAddressPoint = await owners[0].address.toAddressPoint(); - - const notesToNullify = notes.filter(note => note.addressPoint.equals(ownerAddressPoint)); - const nullifiers = notesToNullify.map(note => ({ - data: note.siloedNullifier, - l2BlockNumber: 99, - l2BlockHash: Fr.random().toString(), - })); - await expect(database.removeNullifiedNotes(nullifiers, notesToNullify[0].addressPoint)).resolves.toEqual( - notesToNullify, - ); - await expect(database.unnullifyNotesAfter(98)).resolves.toEqual(undefined); - - const result = await database.getNotes({ status: NoteStatus.ACTIVE, owner: owners[0].address }); - - expect(result.sort()).toEqual([...notesToNullify].sort()); - }); - - it('returns active and nullified notes when requesting either', async () => { - await database.addNotes(notes); - const ownerAddressPoint = await owners[0].address.toAddressPoint(); - - const notesToNullify = notes.filter(note => note.addressPoint.equals(ownerAddressPoint)); - const nullifiers = notesToNullify.map(note => ({ - data: note.siloedNullifier, - l2BlockNumber: note.l2BlockNumber, - l2BlockHash: note.l2BlockHash, - })); - await expect(database.removeNullifiedNotes(nullifiers, notesToNullify[0].addressPoint)).resolves.toEqual( - notesToNullify, - ); - - const result = await database.getNotes({ - status: NoteStatus.ACTIVE_OR_NULLIFIED, - }); - - // We have to compare the sorted arrays since the database does not return the same order as when originally - // inserted combining active and nullified results. - expect(result.sort()).toEqual([...notes].sort()); - }); - - it('stores notes one by one and retrieves notes with siloed account', async () => { - for (const note of notes.slice(0, 5)) { - await database.addNote(note, owners[0].address); - } - - for (const note of notes.slice(5)) { - await database.addNote(note, owners[1].address); - } - - const owner0Notes = await database.getNotes({ - scopes: [owners[0].address], - }); - - expect(owner0Notes.sort()).toEqual(notes.slice(0, 5).sort()); - - const owner1Notes = await database.getNotes({ - scopes: [owners[1].address], - }); - - expect(owner1Notes.sort()).toEqual(notes.slice(5).sort()); - - const bothOwnerNotes = await database.getNotes({ - scopes: [owners[0].address, owners[1].address], - }); - - expect(bothOwnerNotes.sort()).toEqual(notes.sort()); - }); - - it('a nullified note removes notes from all accounts in the pxe', async () => { - await database.addNote(notes[0], owners[0].address); - await database.addNote(notes[0], owners[1].address); - - await expect( - database.getNotes({ - scopes: [owners[0].address], - }), - ).resolves.toEqual([notes[0]]); - await expect( - database.getNotes({ - scopes: [owners[1].address], - }), - ).resolves.toEqual([notes[0]]); - const ownerAddressPoint = await owners[0].address.toAddressPoint(); - await expect( - database.removeNullifiedNotes( - [ - { - data: notes[0].siloedNullifier, - l2BlockHash: notes[0].l2BlockHash, - l2BlockNumber: notes[0].l2BlockNumber, - }, - ], - ownerAddressPoint, - ), - ).resolves.toEqual([notes[0]]); - - await expect( - database.getNotes({ - scopes: [owners[0].address], - }), - ).resolves.toEqual([]); - await expect( - database.getNotes({ - scopes: [owners[1].address], - }), - ).resolves.toEqual([]); - }); - - it('removes notes after a given block', async () => { - await database.addNotes(notes, owners[0].address); - - await database.removeNotesAfter(5); - const result = await database.getNotes({ scopes: [owners[0].address] }); - expect(new Set(result)).toEqual(new Set(notes.slice(0, 6))); - }); - }); - - describe('block header', () => { - it('stores and retrieves the block header', async () => { - const header = makeHeader(randomInt(1000), INITIAL_L2_BLOCK_NUM, 0 /** slot number */); - - await database.setHeader(header); - await expect(database.getBlockHeader()).resolves.toEqual(header); - }); - - it('rejects getting header if no block set', async () => { - await expect(() => database.getBlockHeader()).rejects.toThrow(); - }); - }); - - describe('addresses', () => { - it('stores and retrieves addresses', async () => { - const address = await CompleteAddress.random(); - await expect(database.addCompleteAddress(address)).resolves.toBe(true); - await expect(database.getCompleteAddress(address.address)).resolves.toEqual(address); - }); - - it('silently ignores an address it already knows about', async () => { - const address = await CompleteAddress.random(); - await expect(database.addCompleteAddress(address)).resolves.toBe(true); - await expect(database.addCompleteAddress(address)).resolves.toBe(false); - }); - - it.skip('refuses to overwrite an address with a different public key', async () => { - const address = await CompleteAddress.random(); - const otherAddress = await CompleteAddress.create( - address.address, - new PublicKeys(await Point.random(), await Point.random(), await Point.random(), await Point.random()), - address.partialAddress, - ); - - await database.addCompleteAddress(address); - await expect(database.addCompleteAddress(otherAddress)).rejects.toThrow(); - }); - - it('returns all addresses', async () => { - const addresses = await timesParallel(10, () => CompleteAddress.random()); - for (const address of addresses) { - await database.addCompleteAddress(address); - } - - const result = await database.getCompleteAddresses(); - expect(result).toEqual(expect.arrayContaining(addresses)); - }); - - it('returns a single address', async () => { - const addresses = await timesParallel(10, () => CompleteAddress.random()); - for (const address of addresses) { - await database.addCompleteAddress(address); - } - - const result = await database.getCompleteAddress(addresses[3].address); - expect(result).toEqual(addresses[3]); - }); - - it("returns an empty array if it doesn't have addresses", async () => { - expect(await database.getCompleteAddresses()).toEqual([]); - }); - - it("returns undefined if it doesn't have an address", async () => { - const completeAddress = await CompleteAddress.random(); - expect(await database.getCompleteAddress(completeAddress.address)).toBeUndefined(); - }); - }); - - describe('contracts', () => { - it('stores a contract artifact', async () => { - const artifact = BenchmarkingContractArtifact; - const id = Fr.random(); - await database.addContractArtifact(id, artifact); - await expect(database.getContractArtifact(id)).resolves.toEqual(artifact); - }); - - it('does not store a contract artifact with a duplicate private function selector', async () => { - const artifact = TestContractArtifact; - const index = artifact.functions.findIndex(fn => fn.functionType === FunctionType.PRIVATE); - - const copiedFn = structuredClone(artifact.functions[index]); - artifact.functions.push(copiedFn); - - const id = Fr.random(); - await expect(database.addContractArtifact(id, artifact)).rejects.toThrow( - 'Repeated function selectors of private functions', - ); - }); - - it('stores a contract instance', async () => { - const address = await AztecAddress.random(); - const instance = (await SerializableContractInstance.random()).withAddress(address); - await database.addContractInstance(instance); - await expect(database.getContractInstance(address)).resolves.toEqual(instance); - }); - }); - - describe('contract non-volatile database', () => { - let contract: AztecAddress; - - beforeEach(async () => { - // Setup mock contract address - contract = await AztecAddress.random(); - }); - - it('stores and loads a single value', async () => { - const slot = new Fr(1); - const values = [new Fr(42)]; - - await database.storeCapsule(contract, slot, values); - const result = await database.loadCapsule(contract, slot); - expect(result).toEqual(values); - }); - - it('stores and loads multiple values', async () => { - const slot = new Fr(1); - const values = [new Fr(42), new Fr(43), new Fr(44)]; - - await database.storeCapsule(contract, slot, values); - const result = await database.loadCapsule(contract, slot); - expect(result).toEqual(values); - }); - - it('overwrites existing values', async () => { - const slot = new Fr(1); - const initialValues = [new Fr(42)]; - const newValues = [new Fr(100)]; - - await database.storeCapsule(contract, slot, initialValues); - await database.storeCapsule(contract, slot, newValues); - - const result = await database.loadCapsule(contract, slot); - expect(result).toEqual(newValues); - }); - - it('stores values for different contracts independently', async () => { - const anotherContract = await AztecAddress.random(); - const slot = new Fr(1); - const values1 = [new Fr(42)]; - const values2 = [new Fr(100)]; - - await database.storeCapsule(contract, slot, values1); - await database.storeCapsule(anotherContract, slot, values2); - - const result1 = await database.loadCapsule(contract, slot); - const result2 = await database.loadCapsule(anotherContract, slot); - - expect(result1).toEqual(values1); - expect(result2).toEqual(values2); - }); - - it('returns null for non-existent slots', async () => { - const slot = Fr.random(); - const result = await database.loadCapsule(contract, slot); - expect(result).toBeNull(); - }); - - it('deletes a slot', async () => { - const slot = new Fr(1); - const values = [new Fr(42)]; - - await database.storeCapsule(contract, slot, values); - await database.deleteCapsule(contract, slot); - - expect(await database.loadCapsule(contract, slot)).toBeNull(); - }); - - it('deletes an empty slot', async () => { - const slot = new Fr(1); - await database.deleteCapsule(contract, slot); - - expect(await database.loadCapsule(contract, slot)).toBeNull(); - }); - - it('copies a single value', async () => { - const slot = new Fr(1); - const values = [new Fr(42)]; - - await database.storeCapsule(contract, slot, values); - - const dstSlot = new Fr(5); - await database.copyCapsule(contract, slot, dstSlot, 1); - - expect(await database.loadCapsule(contract, dstSlot)).toEqual(values); - }); - - it('copies multiple non-overlapping values', async () => { - const src = new Fr(1); - const valuesArray = [[new Fr(42)], [new Fr(1337)], [new Fr(13)]]; - - await database.storeCapsule(contract, src, valuesArray[0]); - await database.storeCapsule(contract, src.add(new Fr(1)), valuesArray[1]); - await database.storeCapsule(contract, src.add(new Fr(2)), valuesArray[2]); - - const dst = new Fr(5); - await database.copyCapsule(contract, src, dst, 3); - - expect(await database.loadCapsule(contract, dst)).toEqual(valuesArray[0]); - expect(await database.loadCapsule(contract, dst.add(new Fr(1)))).toEqual(valuesArray[1]); - expect(await database.loadCapsule(contract, dst.add(new Fr(2)))).toEqual(valuesArray[2]); - }); - - it('copies overlapping values with src ahead', async () => { - const src = new Fr(1); - const valuesArray = [[new Fr(42)], [new Fr(1337)], [new Fr(13)]]; - - await database.storeCapsule(contract, src, valuesArray[0]); - await database.storeCapsule(contract, src.add(new Fr(1)), valuesArray[1]); - await database.storeCapsule(contract, src.add(new Fr(2)), valuesArray[2]); - - const dst = new Fr(2); - await database.copyCapsule(contract, src, dst, 3); - - expect(await database.loadCapsule(contract, dst)).toEqual(valuesArray[0]); - expect(await database.loadCapsule(contract, dst.add(new Fr(1)))).toEqual(valuesArray[1]); - expect(await database.loadCapsule(contract, dst.add(new Fr(2)))).toEqual(valuesArray[2]); - - // Slots 2 and 3 (src[1] and src[2]) should have been overwritten since they are also dst[0] and dst[1] - expect(await database.loadCapsule(contract, src)).toEqual(valuesArray[0]); // src[0] (unchanged) - expect(await database.loadCapsule(contract, src.add(new Fr(1)))).toEqual(valuesArray[0]); // dst[0] - expect(await database.loadCapsule(contract, src.add(new Fr(2)))).toEqual(valuesArray[1]); // dst[1] - }); - - it('copies overlapping values with dst ahead', async () => { - const src = new Fr(5); - const valuesArray = [[new Fr(42)], [new Fr(1337)], [new Fr(13)]]; - - await database.storeCapsule(contract, src, valuesArray[0]); - await database.storeCapsule(contract, src.add(new Fr(1)), valuesArray[1]); - await database.storeCapsule(contract, src.add(new Fr(2)), valuesArray[2]); - - const dst = new Fr(4); - await database.copyCapsule(contract, src, dst, 3); - - expect(await database.loadCapsule(contract, dst)).toEqual(valuesArray[0]); - expect(await database.loadCapsule(contract, dst.add(new Fr(1)))).toEqual(valuesArray[1]); - expect(await database.loadCapsule(contract, dst.add(new Fr(2)))).toEqual(valuesArray[2]); - - // Slots 5 and 6 (src[0] and src[1]) should have been overwritten since they are also dst[1] and dst[2] - expect(await database.loadCapsule(contract, src)).toEqual(valuesArray[1]); // dst[1] - expect(await database.loadCapsule(contract, src.add(new Fr(1)))).toEqual(valuesArray[2]); // dst[2] - expect(await database.loadCapsule(contract, src.add(new Fr(2)))).toEqual(valuesArray[2]); // src[2] (unchanged) - }); - - it('copying fails if any value is empty', async () => { - const src = new Fr(1); - const valuesArray = [[new Fr(42)], [new Fr(1337)], [new Fr(13)]]; - - await database.storeCapsule(contract, src, valuesArray[0]); - // We skip src[1] - await database.storeCapsule(contract, src.add(new Fr(2)), valuesArray[2]); - - const dst = new Fr(5); - await expect(database.copyCapsule(contract, src, dst, 3)).rejects.toThrow('Attempted to copy empty slot'); - }); - }); - }); -} diff --git a/yarn-project/pxe/src/database/kv_pxe_database.test.ts b/yarn-project/pxe/src/database/kv_pxe_database.test.ts deleted file mode 100644 index 294f219fa62..00000000000 --- a/yarn-project/pxe/src/database/kv_pxe_database.test.ts +++ /dev/null @@ -1,14 +0,0 @@ -import { openTmpStore } from '@aztec/kv-store/lmdb-v2'; - -import { describePxeDatabase } from './interfaces/pxe_database_test_suite.js'; -import { KVPxeDatabase } from './kv_pxe_database.js'; - -describe('KVPxeDatabase', () => { - let database: KVPxeDatabase; - - beforeEach(async () => { - database = await KVPxeDatabase.create(await openTmpStore('test')); - }); - - describePxeDatabase(() => database); -}); diff --git a/yarn-project/pxe/src/database/kv_pxe_database.ts b/yarn-project/pxe/src/database/kv_pxe_database.ts deleted file mode 100644 index dd76d73b5da..00000000000 --- a/yarn-project/pxe/src/database/kv_pxe_database.ts +++ /dev/null @@ -1,670 +0,0 @@ -import { toBufferBE } from '@aztec/foundation/bigint-buffer'; -import { Fr, type Point } from '@aztec/foundation/fields'; -import { toArray } from '@aztec/foundation/iterable'; -import { type LogFn, createDebugOnlyLogger } from '@aztec/foundation/log'; -import type { - AztecAsyncArray, - AztecAsyncKVStore, - AztecAsyncMap, - AztecAsyncMultiMap, - AztecAsyncSingleton, -} from '@aztec/kv-store'; -import { type ContractArtifact, FunctionSelector, FunctionType } from '@aztec/stdlib/abi'; -import { contractArtifactFromBuffer, contractArtifactToBuffer } from '@aztec/stdlib/abi'; -import { AztecAddress } from '@aztec/stdlib/aztec-address'; -import type { InBlock } from '@aztec/stdlib/block'; -import { - CompleteAddress, - type ContractInstanceWithAddress, - SerializableContractInstance, -} from '@aztec/stdlib/contract'; -import type { PublicKey } from '@aztec/stdlib/keys'; -import type { IndexedTaggingSecret } from '@aztec/stdlib/logs'; -import { NoteStatus, type NotesFilter } from '@aztec/stdlib/note'; -import { MerkleTreeId } from '@aztec/stdlib/trees'; -import { BlockHeader } from '@aztec/stdlib/tx'; - -import type { PxeDatabase } from './interfaces/pxe_database.js'; -import { NoteDao } from './note_dao.js'; - -/** - * A PXE database backed by LMDB. - */ -export class KVPxeDatabase implements PxeDatabase { - public static readonly SCHEMA_VERSION = 1; - - #synchronizedBlock: AztecAsyncSingleton; - #completeAddresses: AztecAsyncArray; - #completeAddressIndex: AztecAsyncMap; - #addressBook: AztecAsyncMap; - #authWitnesses: AztecAsyncMap; - #notes: AztecAsyncMap; - #nullifiedNotes: AztecAsyncMap; - #nullifierToNoteId: AztecAsyncMap; - #nullifiersByBlockNumber: AztecAsyncMultiMap; - - #nullifiedNotesToScope: AztecAsyncMultiMap; - #nullifiedNotesByContract: AztecAsyncMultiMap; - #nullifiedNotesByStorageSlot: AztecAsyncMultiMap; - #nullifiedNotesByTxHash: AztecAsyncMultiMap; - #nullifiedNotesByAddressPoint: AztecAsyncMultiMap; - #nullifiedNotesByNullifier: AztecAsyncMap; - #contractArtifacts: AztecAsyncMap; - #contractInstances: AztecAsyncMap; - #db: AztecAsyncKVStore; - - #scopes: AztecAsyncMap; - #notesToScope: AztecAsyncMultiMap; - #notesByContractAndScope: Map>; - #notesByStorageSlotAndScope: Map>; - #notesByTxHashAndScope: Map>; - #notesByAddressPointAndScope: Map>; - - // Stores the last index used for each tagging secret, taking direction into account - // This is necessary to avoid reusing the same index for the same secret, which happens if - // sender and recipient are the same - #taggingSecretIndexesForSenders: AztecAsyncMap; - #taggingSecretIndexesForRecipients: AztecAsyncMap; - - // Arbitrary data stored by contracts. Key is computed as `${contractAddress}:${key}` - #capsules: AztecAsyncMap; - - debug: LogFn; - - protected constructor(private db: AztecAsyncKVStore) { - this.#db = db; - - this.#completeAddresses = db.openArray('complete_addresses'); - this.#completeAddressIndex = db.openMap('complete_address_index'); - - this.#addressBook = db.openMap('address_book'); - - this.#authWitnesses = db.openMap('auth_witnesses'); - - this.#contractArtifacts = db.openMap('contract_artifacts'); - this.#contractInstances = db.openMap('contracts_instances'); - - this.#synchronizedBlock = db.openSingleton('header'); - - this.#notes = db.openMap('notes'); - this.#nullifiedNotes = db.openMap('nullified_notes'); - this.#nullifierToNoteId = db.openMap('nullifier_to_note'); - this.#nullifiersByBlockNumber = db.openMultiMap('nullifier_to_block_number'); - - this.#nullifiedNotesToScope = db.openMultiMap('nullified_notes_to_scope'); - this.#nullifiedNotesByContract = db.openMultiMap('nullified_notes_by_contract'); - this.#nullifiedNotesByStorageSlot = db.openMultiMap('nullified_notes_by_storage_slot'); - this.#nullifiedNotesByTxHash = db.openMultiMap('nullified_notes_by_tx_hash'); - this.#nullifiedNotesByAddressPoint = db.openMultiMap('nullified_notes_by_address_point'); - this.#nullifiedNotesByNullifier = db.openMap('nullified_notes_by_nullifier'); - - this.#scopes = db.openMap('scopes'); - this.#notesToScope = db.openMultiMap('notes_to_scope'); - this.#notesByContractAndScope = new Map>(); - this.#notesByStorageSlotAndScope = new Map>(); - this.#notesByTxHashAndScope = new Map>(); - this.#notesByAddressPointAndScope = new Map>(); - - this.#taggingSecretIndexesForSenders = db.openMap('tagging_secret_indexes_for_senders'); - this.#taggingSecretIndexesForRecipients = db.openMap('tagging_secret_indexes_for_recipients'); - - this.#capsules = db.openMap('capsules'); - - this.debug = createDebugOnlyLogger('aztec:kv-pxe-database'); - } - - public static async create(db: AztecAsyncKVStore): Promise { - const pxeDB = new KVPxeDatabase(db); - for await (const scope of pxeDB.#scopes.keysAsync()) { - pxeDB.#notesByContractAndScope.set(scope, db.openMultiMap(`${scope}:notes_by_contract`)); - pxeDB.#notesByStorageSlotAndScope.set(scope, db.openMultiMap(`${scope}:notes_by_storage_slot`)); - pxeDB.#notesByTxHashAndScope.set(scope, db.openMultiMap(`${scope}:notes_by_tx_hash`)); - pxeDB.#notesByAddressPointAndScope.set(scope, db.openMultiMap(`${scope}:notes_by_address_point`)); - } - return pxeDB; - } - - public async getContract( - address: AztecAddress, - ): Promise<(ContractInstanceWithAddress & ContractArtifact) | undefined> { - const instance = await this.getContractInstance(address); - const artifact = instance && (await this.getContractArtifact(instance?.currentContractClassId)); - if (!instance || !artifact) { - return undefined; - } - return { ...instance, ...artifact }; - } - - public async addContractArtifact(id: Fr, contract: ContractArtifact): Promise { - const privateFunctions = contract.functions.filter( - functionArtifact => functionArtifact.functionType === FunctionType.PRIVATE, - ); - - const privateSelectors = await Promise.all( - privateFunctions.map(async privateFunctionArtifact => - ( - await FunctionSelector.fromNameAndParameters(privateFunctionArtifact.name, privateFunctionArtifact.parameters) - ).toString(), - ), - ); - - if (privateSelectors.length !== new Set(privateSelectors).size) { - throw new Error('Repeated function selectors of private functions'); - } - - await this.#contractArtifacts.set(id.toString(), contractArtifactToBuffer(contract)); - } - - public async getContractArtifact(id: Fr): Promise { - const contract = await this.#contractArtifacts.getAsync(id.toString()); - // TODO(@spalladino): AztecAsyncMap lies and returns Uint8Arrays instead of Buffers, hence the extra Buffer.from. - return contract && contractArtifactFromBuffer(Buffer.from(contract)); - } - - async addContractInstance(contract: ContractInstanceWithAddress): Promise { - await this.#contractInstances.set( - contract.address.toString(), - new SerializableContractInstance(contract).toBuffer(), - ); - } - - async getContractInstance(address: AztecAddress): Promise { - const contract = await this.#contractInstances.getAsync(address.toString()); - return contract && SerializableContractInstance.fromBuffer(contract).withAddress(address); - } - - async getContractsAddresses(): Promise { - const keys = await toArray(this.#contractInstances.keysAsync()); - return keys.map(AztecAddress.fromString); - } - - async addAuthWitness(messageHash: Fr, witness: Fr[]): Promise { - await this.#authWitnesses.set( - messageHash.toString(), - witness.map(w => w.toBuffer()), - ); - } - - async getAuthWitness(messageHash: Fr): Promise { - const witness = await this.#authWitnesses.getAsync(messageHash.toString()); - return Promise.resolve(witness?.map(w => Fr.fromBuffer(w))); - } - - async addNote(note: NoteDao, scope?: AztecAddress): Promise { - await this.addNotes([note], scope); - } - - async addNotes(notes: NoteDao[], scope: AztecAddress = AztecAddress.ZERO): Promise { - if (!(await this.#scopes.hasAsync(scope.toString()))) { - await this.#addScope(scope); - } - - return this.db.transactionAsync(async () => { - for (const dao of notes) { - // store notes by their index in the notes hash tree - // this provides the uniqueness we need to store individual notes - // and should also return notes in the order that they were created. - // Had we stored them by their nullifier, they would be returned in random order - const noteIndex = toBufferBE(dao.index, 32).toString('hex'); - await this.#notes.set(noteIndex, dao.toBuffer()); - await this.#notesToScope.set(noteIndex, scope.toString()); - await this.#nullifierToNoteId.set(dao.siloedNullifier.toString(), noteIndex); - - await this.#notesByContractAndScope.get(scope.toString())!.set(dao.contractAddress.toString(), noteIndex); - await this.#notesByStorageSlotAndScope.get(scope.toString())!.set(dao.storageSlot.toString(), noteIndex); - await this.#notesByTxHashAndScope.get(scope.toString())!.set(dao.txHash.toString(), noteIndex); - await this.#notesByAddressPointAndScope.get(scope.toString())!.set(dao.addressPoint.toString(), noteIndex); - } - }); - } - - public removeNotesAfter(blockNumber: number): Promise { - return this.db.transactionAsync(async () => { - const notes = await toArray(this.#notes.valuesAsync()); - for (const note of notes) { - const noteDao = NoteDao.fromBuffer(note); - if (noteDao.l2BlockNumber > blockNumber) { - const noteIndex = toBufferBE(noteDao.index, 32).toString('hex'); - await this.#notes.delete(noteIndex); - await this.#notesToScope.delete(noteIndex); - await this.#nullifierToNoteId.delete(noteDao.siloedNullifier.toString()); - const scopes = await toArray(this.#scopes.keysAsync()); - for (const scope of scopes) { - await this.#notesByAddressPointAndScope.get(scope)!.deleteValue(noteDao.addressPoint.toString(), noteIndex); - await this.#notesByTxHashAndScope.get(scope)!.deleteValue(noteDao.txHash.toString(), noteIndex); - await this.#notesByContractAndScope.get(scope)!.deleteValue(noteDao.contractAddress.toString(), noteIndex); - await this.#notesByStorageSlotAndScope.get(scope)!.deleteValue(noteDao.storageSlot.toString(), noteIndex); - } - } - } - }); - } - - public async unnullifyNotesAfter(blockNumber: number): Promise { - const nullifiersToUndo: string[] = []; - const currentBlockNumber = blockNumber + 1; - const maxBlockNumber = (await this.getBlockNumber()) ?? currentBlockNumber; - for (let i = currentBlockNumber; i <= maxBlockNumber; i++) { - nullifiersToUndo.push(...(await toArray(this.#nullifiersByBlockNumber.getValuesAsync(i)))); - } - - const notesIndexesToReinsert = await Promise.all( - nullifiersToUndo.map(nullifier => this.#nullifiedNotesByNullifier.getAsync(nullifier)), - ); - const notNullNoteIndexes = notesIndexesToReinsert.filter(noteIndex => noteIndex != undefined); - const nullifiedNoteBuffers = await Promise.all( - notNullNoteIndexes.map(noteIndex => this.#nullifiedNotes.getAsync(noteIndex!)), - ); - const noteDaos = nullifiedNoteBuffers - .filter(buffer => buffer != undefined) - .map(buffer => NoteDao.fromBuffer(buffer!)); - - await this.db.transactionAsync(async () => { - for (const dao of noteDaos) { - const noteIndex = toBufferBE(dao.index, 32).toString('hex'); - await this.#notes.set(noteIndex, dao.toBuffer()); - await this.#nullifierToNoteId.set(dao.siloedNullifier.toString(), noteIndex); - - let scopes = (await toArray(this.#nullifiedNotesToScope.getValuesAsync(noteIndex))) ?? []; - - if (scopes.length === 0) { - scopes = [new AztecAddress(dao.addressPoint.x).toString()]; - } - - for (const scope of scopes) { - await this.#notesByContractAndScope.get(scope.toString())!.set(dao.contractAddress.toString(), noteIndex); - await this.#notesByStorageSlotAndScope.get(scope.toString())!.set(dao.storageSlot.toString(), noteIndex); - await this.#notesByTxHashAndScope.get(scope.toString())!.set(dao.txHash.toString(), noteIndex); - await this.#notesByAddressPointAndScope.get(scope.toString())!.set(dao.addressPoint.toString(), noteIndex); - await this.#notesToScope.set(noteIndex, scope); - } - - await this.#nullifiedNotes.delete(noteIndex); - await this.#nullifiedNotesToScope.delete(noteIndex); - await this.#nullifiersByBlockNumber.deleteValue(dao.l2BlockNumber, dao.siloedNullifier.toString()); - await this.#nullifiedNotesByContract.deleteValue(dao.contractAddress.toString(), noteIndex); - await this.#nullifiedNotesByStorageSlot.deleteValue(dao.storageSlot.toString(), noteIndex); - await this.#nullifiedNotesByTxHash.deleteValue(dao.txHash.toString(), noteIndex); - await this.#nullifiedNotesByAddressPoint.deleteValue(dao.addressPoint.toString(), noteIndex); - await this.#nullifiedNotesByNullifier.delete(dao.siloedNullifier.toString()); - } - }); - } - - async getNotes(filter: NotesFilter): Promise { - const publicKey: PublicKey | undefined = filter.owner ? await filter.owner.toAddressPoint() : undefined; - - filter.status = filter.status ?? NoteStatus.ACTIVE; - - const candidateNoteSources = []; - - filter.scopes ??= (await toArray(this.#scopes.keysAsync())).map(addressString => - AztecAddress.fromString(addressString), - ); - - const activeNoteIdsPerScope: string[][] = []; - - for (const scope of new Set(filter.scopes)) { - const formattedScopeString = scope.toString(); - if (!(await this.#scopes.hasAsync(formattedScopeString))) { - throw new Error('Trying to get incoming notes of an scope that is not in the PXE database'); - } - - activeNoteIdsPerScope.push( - publicKey - ? await toArray( - this.#notesByAddressPointAndScope.get(formattedScopeString)!.getValuesAsync(publicKey.toString()), - ) - : filter.txHash - ? await toArray( - this.#notesByTxHashAndScope.get(formattedScopeString)!.getValuesAsync(filter.txHash.toString()), - ) - : filter.contractAddress - ? await toArray( - this.#notesByContractAndScope - .get(formattedScopeString)! - .getValuesAsync(filter.contractAddress.toString()), - ) - : filter.storageSlot - ? await toArray( - this.#notesByStorageSlotAndScope.get(formattedScopeString)!.getValuesAsync(filter.storageSlot.toString()), - ) - : await toArray(this.#notesByAddressPointAndScope.get(formattedScopeString)!.valuesAsync()), - ); - } - - candidateNoteSources.push({ - ids: new Set(activeNoteIdsPerScope.flat()), - notes: this.#notes, - }); - - if (filter.status == NoteStatus.ACTIVE_OR_NULLIFIED) { - candidateNoteSources.push({ - ids: publicKey - ? await toArray(this.#nullifiedNotesByAddressPoint.getValuesAsync(publicKey.toString())) - : filter.txHash - ? await toArray(this.#nullifiedNotesByTxHash.getValuesAsync(filter.txHash.toString())) - : filter.contractAddress - ? await toArray(this.#nullifiedNotesByContract.getValuesAsync(filter.contractAddress.toString())) - : filter.storageSlot - ? await toArray(this.#nullifiedNotesByStorageSlot.getValuesAsync(filter.storageSlot.toString())) - : await toArray(this.#nullifiedNotes.keysAsync()), - notes: this.#nullifiedNotes, - }); - } - - const result: NoteDao[] = []; - for (const { ids, notes } of candidateNoteSources) { - for (const id of ids) { - const serializedNote = await notes.getAsync(id); - if (!serializedNote) { - continue; - } - - const note = NoteDao.fromBuffer(serializedNote); - if (filter.contractAddress && !note.contractAddress.equals(filter.contractAddress)) { - continue; - } - - if (filter.txHash && !note.txHash.equals(filter.txHash)) { - continue; - } - - if (filter.storageSlot && !note.storageSlot.equals(filter.storageSlot!)) { - continue; - } - - if (publicKey && !note.addressPoint.equals(publicKey)) { - continue; - } - - if (filter.siloedNullifier && !note.siloedNullifier.equals(filter.siloedNullifier)) { - continue; - } - - result.push(note); - } - } - - return result; - } - - removeNullifiedNotes(nullifiers: InBlock[], accountAddressPoint: Point): Promise { - if (nullifiers.length === 0) { - return Promise.resolve([]); - } - - return this.db.transactionAsync(async () => { - const nullifiedNotes: NoteDao[] = []; - - for (const blockScopedNullifier of nullifiers) { - const { data: nullifier, l2BlockNumber: blockNumber } = blockScopedNullifier; - const noteIndex = await this.#nullifierToNoteId.getAsync(nullifier.toString()); - if (!noteIndex) { - continue; - } - - const noteBuffer = noteIndex ? await this.#notes.getAsync(noteIndex) : undefined; - - if (!noteBuffer) { - // note doesn't exist. Maybe it got nullified already - continue; - } - const noteScopes = (await toArray(this.#notesToScope.getValuesAsync(noteIndex))) ?? []; - const note = NoteDao.fromBuffer(noteBuffer); - if (!note.addressPoint.equals(accountAddressPoint)) { - // tried to nullify someone else's note - continue; - } - - nullifiedNotes.push(note); - - await this.#notes.delete(noteIndex); - await this.#notesToScope.delete(noteIndex); - - const scopes = await toArray(this.#scopes.keysAsync()); - - for (const scope of scopes) { - await this.#notesByAddressPointAndScope.get(scope)!.deleteValue(accountAddressPoint.toString(), noteIndex); - await this.#notesByTxHashAndScope.get(scope)!.deleteValue(note.txHash.toString(), noteIndex); - await this.#notesByContractAndScope.get(scope)!.deleteValue(note.contractAddress.toString(), noteIndex); - await this.#notesByStorageSlotAndScope.get(scope)!.deleteValue(note.storageSlot.toString(), noteIndex); - } - - if (noteScopes !== undefined) { - for (const scope of noteScopes) { - await this.#nullifiedNotesToScope.set(noteIndex, scope); - } - } - await this.#nullifiedNotes.set(noteIndex, note.toBuffer()); - await this.#nullifiersByBlockNumber.set(blockNumber, nullifier.toString()); - await this.#nullifiedNotesByContract.set(note.contractAddress.toString(), noteIndex); - await this.#nullifiedNotesByStorageSlot.set(note.storageSlot.toString(), noteIndex); - await this.#nullifiedNotesByTxHash.set(note.txHash.toString(), noteIndex); - await this.#nullifiedNotesByAddressPoint.set(note.addressPoint.toString(), noteIndex); - await this.#nullifiedNotesByNullifier.set(nullifier.toString(), noteIndex); - - await this.#nullifierToNoteId.delete(nullifier.toString()); - } - return nullifiedNotes; - }); - } - - async addNullifiedNote(note: NoteDao): Promise { - const noteIndex = toBufferBE(note.index, 32).toString('hex'); - - await this.#nullifiedNotes.set(noteIndex, note.toBuffer()); - await this.#nullifiedNotesByContract.set(note.contractAddress.toString(), noteIndex); - await this.#nullifiedNotesByStorageSlot.set(note.storageSlot.toString(), noteIndex); - await this.#nullifiedNotesByTxHash.set(note.txHash.toString(), noteIndex); - await this.#nullifiedNotesByAddressPoint.set(note.addressPoint.toString(), noteIndex); - } - - async setHeader(header: BlockHeader): Promise { - await this.#synchronizedBlock.set(header.toBuffer()); - } - - async getBlockNumber(): Promise { - const headerBuffer = await this.#synchronizedBlock.getAsync(); - if (!headerBuffer) { - return undefined; - } - - return Number(BlockHeader.fromBuffer(headerBuffer).globalVariables.blockNumber.toBigInt()); - } - - async getBlockHeader(): Promise { - const headerBuffer = await this.#synchronizedBlock.getAsync(); - if (!headerBuffer) { - throw new Error(`Header not set`); - } - - return BlockHeader.fromBuffer(headerBuffer); - } - - async #addScope(scope: AztecAddress): Promise { - const scopeString = scope.toString(); - - if (await this.#scopes.hasAsync(scopeString)) { - return false; - } - - await this.#scopes.set(scopeString, true); - this.#notesByContractAndScope.set(scopeString, this.#db.openMultiMap(`${scopeString}:notes_by_contract`)); - this.#notesByStorageSlotAndScope.set(scopeString, this.#db.openMultiMap(`${scopeString}:notes_by_storage_slot`)); - this.#notesByTxHashAndScope.set(scopeString, this.#db.openMultiMap(`${scopeString}:notes_by_tx_hash`)); - this.#notesByAddressPointAndScope.set(scopeString, this.#db.openMultiMap(`${scopeString}:notes_by_address_point`)); - - return true; - } - - addCompleteAddress(completeAddress: CompleteAddress): Promise { - return this.db.transactionAsync(async () => { - await this.#addScope(completeAddress.address); - - const addressString = completeAddress.address.toString(); - const buffer = completeAddress.toBuffer(); - const existing = await this.#completeAddressIndex.getAsync(addressString); - if (existing === undefined) { - const index = await this.#completeAddresses.lengthAsync(); - await this.#completeAddresses.push(buffer); - await this.#completeAddressIndex.set(addressString, index); - - return true; - } else { - const existingBuffer = await this.#completeAddresses.atAsync(existing); - - if (existingBuffer && Buffer.from(existingBuffer).equals(buffer)) { - return false; - } - - throw new Error( - `Complete address with aztec address ${addressString} but different public key or partial key already exists in memory database`, - ); - } - }); - } - - async #getCompleteAddress(address: AztecAddress): Promise { - const index = await this.#completeAddressIndex.getAsync(address.toString()); - if (index === undefined) { - return undefined; - } - - const value = await this.#completeAddresses.atAsync(index); - return value ? await CompleteAddress.fromBuffer(value) : undefined; - } - - getCompleteAddress(account: AztecAddress): Promise { - return this.#getCompleteAddress(account); - } - - async getCompleteAddresses(): Promise { - return await Promise.all( - (await toArray(this.#completeAddresses.valuesAsync())).map(v => CompleteAddress.fromBuffer(v)), - ); - } - - async addSenderAddress(address: AztecAddress): Promise { - if (await this.#addressBook.hasAsync(address.toString())) { - return false; - } - - await this.#addressBook.set(address.toString(), true); - - return true; - } - - async getSenderAddresses(): Promise { - return (await toArray(this.#addressBook.keysAsync())).map(AztecAddress.fromString); - } - - async removeSenderAddress(address: AztecAddress): Promise { - if (!(await this.#addressBook.hasAsync(address.toString()))) { - return false; - } - - await this.#addressBook.delete(address.toString()); - - return true; - } - - async estimateSize(): Promise { - const noteSize = (await this.getNotes({})).reduce((sum, note) => sum + note.getSize(), 0); - - const authWitsSize = (await toArray(this.#authWitnesses.valuesAsync())).reduce( - (sum, value) => sum + value.length * Fr.SIZE_IN_BYTES, - 0, - ); - const addressesSize = (await this.#completeAddresses.lengthAsync()) * CompleteAddress.SIZE_IN_BYTES; - const treeRootsSize = Object.keys(MerkleTreeId).length * Fr.SIZE_IN_BYTES; - - return noteSize + treeRootsSize + authWitsSize + addressesSize; - } - - async setTaggingSecretsIndexesAsSender(indexedSecrets: IndexedTaggingSecret[]): Promise { - await this.#setTaggingSecretsIndexes(indexedSecrets, this.#taggingSecretIndexesForSenders); - } - - async setTaggingSecretsIndexesAsRecipient(indexedSecrets: IndexedTaggingSecret[]): Promise { - await this.#setTaggingSecretsIndexes(indexedSecrets, this.#taggingSecretIndexesForRecipients); - } - - async #setTaggingSecretsIndexes(indexedSecrets: IndexedTaggingSecret[], storageMap: AztecAsyncMap) { - await Promise.all( - indexedSecrets.map(indexedSecret => - storageMap.set(indexedSecret.appTaggingSecret.toString(), indexedSecret.index), - ), - ); - } - - async getTaggingSecretsIndexesAsRecipient(appTaggingSecrets: Fr[]) { - return await this.#getTaggingSecretsIndexes(appTaggingSecrets, this.#taggingSecretIndexesForRecipients); - } - - async getTaggingSecretsIndexesAsSender(appTaggingSecrets: Fr[]) { - return await this.#getTaggingSecretsIndexes(appTaggingSecrets, this.#taggingSecretIndexesForSenders); - } - - #getTaggingSecretsIndexes(appTaggingSecrets: Fr[], storageMap: AztecAsyncMap): Promise { - return Promise.all(appTaggingSecrets.map(async secret => (await storageMap.getAsync(`${secret.toString()}`)) ?? 0)); - } - - resetNoteSyncData(): Promise { - return this.db.transactionAsync(async () => { - const recipients = await toArray(this.#taggingSecretIndexesForRecipients.keysAsync()); - await Promise.all(recipients.map(recipient => this.#taggingSecretIndexesForRecipients.delete(recipient))); - const senders = await toArray(this.#taggingSecretIndexesForSenders.keysAsync()); - await Promise.all(senders.map(sender => this.#taggingSecretIndexesForSenders.delete(sender))); - }); - } - - async storeCapsule(contractAddress: AztecAddress, slot: Fr, capsule: Fr[]): Promise { - await this.#capsules.set(dbSlotToKey(contractAddress, slot), Buffer.concat(capsule.map(value => value.toBuffer()))); - } - - async loadCapsule(contractAddress: AztecAddress, slot: Fr): Promise { - const dataBuffer = await this.#capsules.getAsync(dbSlotToKey(contractAddress, slot)); - if (!dataBuffer) { - this.debug(`Data not found for contract ${contractAddress.toString()} and slot ${slot.toString()}`); - return null; - } - const capsule: Fr[] = []; - for (let i = 0; i < dataBuffer.length; i += Fr.SIZE_IN_BYTES) { - capsule.push(Fr.fromBuffer(dataBuffer.subarray(i, i + Fr.SIZE_IN_BYTES))); - } - return capsule; - } - - async deleteCapsule(contractAddress: AztecAddress, slot: Fr): Promise { - await this.#capsules.delete(dbSlotToKey(contractAddress, slot)); - } - - async copyCapsule(contractAddress: AztecAddress, srcSlot: Fr, dstSlot: Fr, numEntries: number): Promise { - // In order to support overlapping source and destination regions, we need to check the relative positions of source - // and destination. If destination is ahead of source, then by the time we overwrite source elements using forward - // indexes we'll have already read those. On the contrary, if source is ahead of destination we need to use backward - // indexes to avoid reading elements that've been overwritten. - - const indexes = Array.from(Array(numEntries).keys()); - if (srcSlot.lt(dstSlot)) { - indexes.reverse(); - } - - for (const i of indexes) { - const currentSrcSlot = dbSlotToKey(contractAddress, srcSlot.add(new Fr(i))); - const currentDstSlot = dbSlotToKey(contractAddress, dstSlot.add(new Fr(i))); - - const toCopy = await this.#capsules.getAsync(currentSrcSlot); - if (!toCopy) { - throw new Error(`Attempted to copy empty slot ${currentSrcSlot} for contract ${contractAddress.toString()}`); - } - - await this.#capsules.set(currentDstSlot, toCopy); - } - } -} - -function dbSlotToKey(contractAddress: AztecAddress, slot: Fr): string { - return `${contractAddress.toString()}:${slot.toString()}`; -} diff --git a/yarn-project/pxe/src/entrypoints/client/bundle/index.ts b/yarn-project/pxe/src/entrypoints/client/bundle/index.ts new file mode 100644 index 00000000000..3f4a339f0e7 --- /dev/null +++ b/yarn-project/pxe/src/entrypoints/client/bundle/index.ts @@ -0,0 +1,5 @@ +export * from '../../../pxe_service/index.js'; +export * from '../../../config/index.js'; +export * from '../../../storage/index.js'; +export * from './utils.js'; +export { PXEOracleInterface } from '../../../pxe_oracle_interface/index.js'; diff --git a/yarn-project/pxe/src/entrypoints/client/bundle/utils.ts b/yarn-project/pxe/src/entrypoints/client/bundle/utils.ts new file mode 100644 index 00000000000..b686a09116e --- /dev/null +++ b/yarn-project/pxe/src/entrypoints/client/bundle/utils.ts @@ -0,0 +1,58 @@ +import { BBWASMBundlePrivateKernelProver } from '@aztec/bb-prover/wasm/bundle'; +import { createLogger } from '@aztec/foundation/log'; +import { createStore } from '@aztec/kv-store/indexeddb'; +import { BundledProtocolContractsProvider } from '@aztec/protocol-contracts/providers/bundle'; +import { WASMSimulator } from '@aztec/simulator/client'; +import type { AztecNode } from '@aztec/stdlib/interfaces/client'; + +import type { PXEServiceConfig } from '../../../config/index.js'; +import { PXEService } from '../../../pxe_service/pxe_service.js'; +import type { PXECreationOptions } from '../pxe_creation_options.js'; + +/** + * Create and start an PXEService instance with the given AztecNode. + * If no keyStore or database is provided, it will use KeyStore and MemoryDB as default values. + * Returns a Promise that resolves to the started PXEService instance. + * + * @param aztecNode - The AztecNode instance to be used by the server. + * @param config - The PXE Service Config to use + * @param options - (Optional) Optional information for creating an PXEService. + * @returns A Promise that resolves to the started PXEService instance. + */ +export async function createPXEService( + aztecNode: AztecNode, + config: PXEServiceConfig, + options: PXECreationOptions = { loggers: {} }, +) { + const l1Contracts = await aztecNode.getL1ContractAddresses(); + const configWithContracts = { + ...config, + l1Contracts, + } as PXEServiceConfig; + + const store = await createStore( + 'pxe_data', + configWithContracts, + options.loggers.store ?? createLogger('pxe:data:indexeddb'), + ); + + const simulationProvider = new WASMSimulator(); + const prover = + options.prover ?? + new BBWASMBundlePrivateKernelProver( + simulationProvider, + 16, + options.loggers.prover ?? createLogger('bb:wasm:bundle'), + ); + const protocolContractsProvider = new BundledProtocolContractsProvider(); + const pxe = await PXEService.create( + aztecNode, + store, + prover, + simulationProvider, + protocolContractsProvider, + config, + options.loggers.pxe ?? createLogger('pxe:service'), + ); + return pxe; +} diff --git a/yarn-project/pxe/src/entrypoints/client/lazy/index.ts b/yarn-project/pxe/src/entrypoints/client/lazy/index.ts new file mode 100644 index 00000000000..3f4a339f0e7 --- /dev/null +++ b/yarn-project/pxe/src/entrypoints/client/lazy/index.ts @@ -0,0 +1,5 @@ +export * from '../../../pxe_service/index.js'; +export * from '../../../config/index.js'; +export * from '../../../storage/index.js'; +export * from './utils.js'; +export { PXEOracleInterface } from '../../../pxe_oracle_interface/index.js'; diff --git a/yarn-project/pxe/src/entrypoints/client/lazy/utils.ts b/yarn-project/pxe/src/entrypoints/client/lazy/utils.ts new file mode 100644 index 00000000000..bd5877e2517 --- /dev/null +++ b/yarn-project/pxe/src/entrypoints/client/lazy/utils.ts @@ -0,0 +1,53 @@ +import { BBWASMLazyPrivateKernelProver } from '@aztec/bb-prover/wasm/lazy'; +import { createLogger } from '@aztec/foundation/log'; +import { createStore } from '@aztec/kv-store/indexeddb'; +import { LazyProtocolContractsProvider } from '@aztec/protocol-contracts/providers/lazy'; +import { WASMSimulator } from '@aztec/simulator/client'; +import type { AztecNode } from '@aztec/stdlib/interfaces/client'; + +import type { PXEServiceConfig } from '../../../config/index.js'; +import { PXEService } from '../../../pxe_service/pxe_service.js'; +import type { PXECreationOptions } from '../pxe_creation_options.js'; + +/** + * Create and start an PXEService instance with the given AztecNode. + * Returns a Promise that resolves to the started PXEService instance. + * + * @param aztecNode - The AztecNode instance to be used by the server. + * @param config - The PXE Service Config to use + * @param + * @returns A Promise that resolves to the started PXEService instance. + */ +export async function createPXEService( + aztecNode: AztecNode, + config: PXEServiceConfig, + options: PXECreationOptions = { loggers: {} }, +) { + const l1Contracts = await aztecNode.getL1ContractAddresses(); + const configWithContracts = { + ...config, + l1Contracts, + } as PXEServiceConfig; + + const store = await createStore( + 'pxe_data', + configWithContracts, + options.loggers.store ?? createLogger('pxe:data:indexeddb'), + ); + + const simulationProvider = new WASMSimulator(); + const prover = + options.prover ?? + new BBWASMLazyPrivateKernelProver(simulationProvider, 16, options.loggers.prover ?? createLogger('bb:wasm:lazy')); + const protocolContractsProvider = new LazyProtocolContractsProvider(); + const pxe = await PXEService.create( + aztecNode, + store, + prover, + simulationProvider, + protocolContractsProvider, + config, + options.loggers.pxe ?? createLogger('pxe:service'), + ); + return pxe; +} diff --git a/yarn-project/pxe/src/entrypoints/client/pxe_creation_options.ts b/yarn-project/pxe/src/entrypoints/client/pxe_creation_options.ts new file mode 100644 index 00000000000..5f76dbffb58 --- /dev/null +++ b/yarn-project/pxe/src/entrypoints/client/pxe_creation_options.ts @@ -0,0 +1,7 @@ +import type { Logger } from '@aztec/foundation/log'; +import type { PrivateKernelProver } from '@aztec/stdlib/interfaces/client'; + +export type PXECreationOptions = { + loggers: { store?: Logger; pxe?: Logger; prover?: Logger }; + prover?: PrivateKernelProver; +}; diff --git a/yarn-project/pxe/src/entrypoints/server/index.ts b/yarn-project/pxe/src/entrypoints/server/index.ts new file mode 100644 index 00000000000..05cc5088853 --- /dev/null +++ b/yarn-project/pxe/src/entrypoints/server/index.ts @@ -0,0 +1,6 @@ +export * from '../../pxe_service/index.js'; +export * from '../../pxe_http/index.js'; +export * from '../../config/index.js'; +export * from '../../storage/index.js'; +export * from './utils.js'; +export { PXEOracleInterface } from '../../pxe_oracle_interface/index.js'; diff --git a/yarn-project/pxe/src/utils/create_pxe_service.ts b/yarn-project/pxe/src/entrypoints/server/utils.ts similarity index 78% rename from yarn-project/pxe/src/utils/create_pxe_service.ts rename to yarn-project/pxe/src/entrypoints/server/utils.ts index d13f49c67e1..8d157bdb1d5 100644 --- a/yarn-project/pxe/src/utils/create_pxe_service.ts +++ b/yarn-project/pxe/src/entrypoints/server/utils.ts @@ -2,16 +2,14 @@ import { BBNativePrivateKernelProver } from '@aztec/bb-prover'; import { BBWASMBundlePrivateKernelProver } from '@aztec/bb-prover/wasm/bundle'; import { randomBytes } from '@aztec/foundation/crypto'; import { createLogger } from '@aztec/foundation/log'; -import { KeyStore } from '@aztec/key-store'; import { createStore } from '@aztec/kv-store/lmdb-v2'; -import { L2TipsStore } from '@aztec/kv-store/stores'; import { BundledProtocolContractsProvider } from '@aztec/protocol-contracts/providers/bundle'; import { type SimulationProvider, WASMSimulator } from '@aztec/simulator/client'; import type { AztecNode, PrivateKernelProver } from '@aztec/stdlib/interfaces/client'; -import type { PXEServiceConfig } from '../config/index.js'; -import { KVPxeDatabase } from '../database/kv_pxe_database.js'; -import { PXEService } from '../pxe_service/pxe_service.js'; +import type { PXEServiceConfig } from '../../config/index.js'; +import { PXEService } from '../../pxe_service/pxe_service.js'; +import { PXE_DATA_SCHEMA_VERSION } from './index.js'; /** * Create and start an PXEService instance with the given AztecNode. @@ -20,7 +18,7 @@ import { PXEService } from '../pxe_service/pxe_service.js'; * * @param aztecNode - The AztecNode instance to be used by the server. * @param config - The PXE Service Config to use - * @param options - (Optional) Optional information for creating an PXEService. + * @param useLogSuffix - (Optional) Log suffix for PXE's logger. * @param proofCreator - An optional proof creator to use in place of any other configuration * @returns A Promise that resolves to the started PXEService instance. */ @@ -39,34 +37,25 @@ export async function createPXEService( l1Contracts, } as PXEServiceConfig; - const keyStore = new KeyStore( - await createStore('pxe_key_store', KeyStore.SCHEMA_VERSION, configWithContracts, createLogger('pxe:keystore:lmdb')), - ); - const store = await createStore( 'pxe_data', - KVPxeDatabase.SCHEMA_VERSION, + PXE_DATA_SCHEMA_VERSION, configWithContracts, createLogger('pxe:data:lmdb'), ); - const db = await KVPxeDatabase.create(store); - const tips = new L2TipsStore(store, 'pxe'); const simulationProvider = new WASMSimulator(); const prover = proofCreator ?? (await createProver(config, simulationProvider, logSuffix)); const protocolContractsProvider = new BundledProtocolContractsProvider(); - const pxe = new PXEService( - keyStore, + const pxe = await PXEService.create( aztecNode, - db, - tips, + store, prover, simulationProvider, protocolContractsProvider, config, logSuffix, ); - await pxe.init(); return pxe; } diff --git a/yarn-project/pxe/src/index.ts b/yarn-project/pxe/src/index.ts deleted file mode 100644 index 950af4ae067..00000000000 --- a/yarn-project/pxe/src/index.ts +++ /dev/null @@ -1,9 +0,0 @@ -export * from './pxe_service/index.js'; -export { pxeTestSuite } from './pxe_service/test/pxe_test_suite.js'; -export * from './pxe_http/index.js'; -export * from './config/index.js'; -export * from './utils/create_pxe_service.js'; - -export * from './database/index.js'; -export { PXEDataProvider } from './pxe_data_provider/index.js'; -export * from './contract_data_provider/index.js'; diff --git a/yarn-project/pxe/src/kernel_oracle/index.ts b/yarn-project/pxe/src/kernel_oracle/index.ts index 72da149e1e2..aad67ddb93a 100644 --- a/yarn-project/pxe/src/kernel_oracle/index.ts +++ b/yarn-project/pxe/src/kernel_oracle/index.ts @@ -17,7 +17,7 @@ import { SharedMutableValues, SharedMutableValuesWithHash } from '@aztec/stdlib/ import type { NullifierMembershipWitness } from '@aztec/stdlib/trees'; import type { VerificationKeyAsFields } from '@aztec/stdlib/vks'; -import type { ContractDataProvider } from '../contract_data_provider/index.js'; +import type { ContractDataProvider } from '../storage/contract_data_provider/contract_data_provider.js'; import type { ProvingDataOracle } from './../kernel_prover/proving_data_oracle.js'; // TODO: Block number should not be "latest". diff --git a/yarn-project/pxe/src/note_decryption_utils/add_public_values_to_payload.ts b/yarn-project/pxe/src/note_decryption_utils/add_public_values_to_payload.ts index 17c911040fe..cb174bdc6dc 100644 --- a/yarn-project/pxe/src/note_decryption_utils/add_public_values_to_payload.ts +++ b/yarn-project/pxe/src/note_decryption_utils/add_public_values_to_payload.ts @@ -2,7 +2,7 @@ import { ContractNotFoundError } from '@aztec/simulator/client'; import type { L1NotePayload } from '@aztec/stdlib/logs'; import { Note } from '@aztec/stdlib/note'; -import type { PxeDatabase } from '../database/interfaces/pxe_database.js'; +import type { ContractDataProvider } from '../storage/contract_data_provider/contract_data_provider.js'; /** * Merges privately and publicly delivered note values. @@ -11,21 +11,21 @@ import type { PxeDatabase } from '../database/interfaces/pxe_database.js'; * @returns Note payload with public fields added. */ export async function getOrderedNoteItems( - db: PxeDatabase, + contractDataProvider: ContractDataProvider, { contractAddress, noteTypeId, privateNoteValues, publicNoteValues }: L1NotePayload, ): Promise { if (publicNoteValues.length === 0) { return new Note(privateNoteValues); } - const instance = await db.getContractInstance(contractAddress); + const instance = await contractDataProvider.getContractInstance(contractAddress); if (!instance) { throw new ContractNotFoundError( `Could not find instance for ${contractAddress.toString()}. This should never happen here as the partial notes flow should be triggered only for non-deferred notes.`, ); } - const artifact = await db.getContractArtifact(instance.currentContractClassId); + const artifact = await contractDataProvider.getContractArtifact(instance.currentContractClassId); if (!artifact) { throw new Error( `Could not find artifact for contract class ${instance.currentContractClassId.toString()}. This should never happen here as the partial notes flow should be triggered only for non-deferred notes.`, diff --git a/yarn-project/pxe/src/pxe_data_provider/index.ts b/yarn-project/pxe/src/pxe_oracle_interface/index.ts similarity index 90% rename from yarn-project/pxe/src/pxe_data_provider/index.ts rename to yarn-project/pxe/src/pxe_oracle_interface/index.ts index 8a7374c6728..0ce346472d9 100644 --- a/yarn-project/pxe/src/pxe_data_provider/index.ts +++ b/yarn-project/pxe/src/pxe_oracle_interface/index.ts @@ -42,23 +42,33 @@ import { MerkleTreeId, type NullifierMembershipWitness, PublicDataWitness } from import type { BlockHeader } from '@aztec/stdlib/tx'; import { TxHash } from '@aztec/stdlib/tx'; -import { ContractDataProvider } from '../contract_data_provider/index.js'; -import type { PxeDatabase } from '../database/index.js'; -import { NoteDao } from '../database/note_dao.js'; import { getOrderedNoteItems } from '../note_decryption_utils/add_public_values_to_payload.js'; +import type { AddressDataProvider } from '../storage/address_data_provider/address_data_provider.js'; +import type { AuthWitnessDataProvider } from '../storage/auth_witness_data_provider/auth_witness_data_provider.js'; +import type { CapsuleDataProvider } from '../storage/capsule_data_provider/capsule_data_provider.js'; +import type { ContractDataProvider } from '../storage/contract_data_provider/contract_data_provider.js'; +import { NoteDao } from '../storage/note_data_provider/note_dao.js'; +import type { NoteDataProvider } from '../storage/note_data_provider/note_data_provider.js'; +import type { SyncDataProvider } from '../storage/sync_data_provider/sync_data_provider.js'; +import type { TaggingDataProvider } from '../storage/tagging_data_provider/tagging_data_provider.js'; import { WINDOW_HALF_SIZE, getIndexedTaggingSecretsForTheWindow, getInitialIndexesMap } from './tagging_utils.js'; /** * A data layer that provides and stores information needed for simulating/proving a transaction. */ -export class PXEDataProvider implements ExecutionDataProvider { +export class PXEOracleInterface implements ExecutionDataProvider { constructor( - private db: PxeDatabase, - private keyStore: KeyStore, private aztecNode: AztecNode, + private keyStore: KeyStore, private simulationProvider: SimulationProvider, private contractDataProvider: ContractDataProvider, - private log = createLogger('pxe:pxe_data_provider'), + private noteDataProvider: NoteDataProvider, + private capsuleDataProvider: CapsuleDataProvider, + private syncDataProvider: SyncDataProvider, + private taggingDataProvider: TaggingDataProvider, + private addressDataProvider: AddressDataProvider, + private authWitnessDataProvider: AuthWitnessDataProvider, + private log = createLogger('pxe:pxe_data_manager'), ) {} getKeyValidationRequest(pkMHash: Fr, contractAddress: AztecAddress): Promise { @@ -66,7 +76,7 @@ export class PXEDataProvider implements ExecutionDataProvider { } async getCompleteAddress(account: AztecAddress): Promise { - const completeAddress = await this.db.getCompleteAddress(account); + const completeAddress = await this.addressDataProvider.getCompleteAddress(account); if (!completeAddress) { throw new Error( `No public key registered for address ${account}. @@ -77,23 +87,20 @@ export class PXEDataProvider implements ExecutionDataProvider { } async getContractInstance(address: AztecAddress): Promise { - const instance = await this.db.getContractInstance(address); + const instance = await this.contractDataProvider.getContractInstance(address); if (!instance) { throw new Error(`No contract instance found for address ${address.toString()}`); } return instance; } - async getAuthWitness(messageHash: Fr): Promise { - const witness = await this.db.getAuthWitness(messageHash); - if (!witness) { - throw new Error(`Unknown auth witness for message hash ${messageHash.toString()}`); - } + async getAuthWitness(messageHash: Fr): Promise { + const witness = await this.authWitnessDataProvider.getAuthWitness(messageHash); return witness; } async getNotes(contractAddress: AztecAddress, storageSlot: Fr, status: NoteStatus, scopes?: AztecAddress[]) { - const noteDaos = await this.db.getNotes({ + const noteDaos = await this.noteDataProvider.getNotes({ contractAddress, storageSlot, status, @@ -244,7 +251,7 @@ export class PXEDataProvider implements ExecutionDataProvider { * @returns A Promise that resolves to a BlockHeader object. */ getBlockHeader(): Promise { - return this.db.getBlockHeader(); + return this.syncDataProvider.getBlockHeader(); } /** @@ -282,7 +289,7 @@ export class PXEDataProvider implements ExecutionDataProvider { * @returns The full list of the users contact addresses. */ public getSenders(): Promise { - return this.db.getSenderAddresses(); + return this.taggingDataProvider.getSenderAddresses(); } /** @@ -301,7 +308,7 @@ export class PXEDataProvider implements ExecutionDataProvider { await this.syncTaggedLogsAsSender(contractAddress, sender, recipient); const appTaggingSecret = await this.#calculateAppTaggingSecret(contractAddress, sender, recipient); - const [index] = await this.db.getTaggingSecretsIndexesAsSender([appTaggingSecret]); + const [index] = await this.taggingDataProvider.getTaggingSecretsIndexesAsSender([appTaggingSecret]); return new IndexedTaggingSecret(appTaggingSecret, index); } @@ -327,8 +334,8 @@ export class PXEDataProvider implements ExecutionDataProvider { contractAddress, }); - const [index] = await this.db.getTaggingSecretsIndexesAsSender([secret]); - await this.db.setTaggingSecretsIndexesAsSender([new IndexedTaggingSecret(secret, index + 1)]); + const [index] = await this.taggingDataProvider.getTaggingSecretsIndexesAsSender([secret]); + await this.taggingDataProvider.setTaggingSecretsIndexesAsSender([new IndexedTaggingSecret(secret, index + 1)]); } async #calculateAppTaggingSecret(contractAddress: AztecAddress, sender: AztecAddress, recipient: AztecAddress) { @@ -358,16 +365,17 @@ export class PXEDataProvider implements ExecutionDataProvider { // We implicitly add all PXE accounts as senders, this helps us decrypt tags on notes that we send to ourselves // (recipient = us, sender = us) - const senders = [...(await this.db.getSenderAddresses()), ...(await this.keyStore.getAccounts())].filter( - (address, index, self) => index === self.findIndex(otherAddress => otherAddress.equals(address)), - ); + const senders = [ + ...(await this.taggingDataProvider.getSenderAddresses()), + ...(await this.keyStore.getAccounts()), + ].filter((address, index, self) => index === self.findIndex(otherAddress => otherAddress.equals(address))); const appTaggingSecrets = await Promise.all( senders.map(async contact => { const sharedSecret = await computeTaggingSecretPoint(recipientCompleteAddress, recipientIvsk, contact); return poseidon2Hash([sharedSecret.x, sharedSecret.y, contractAddress]); }), ); - const indexes = await this.db.getTaggingSecretsIndexesAsRecipient(appTaggingSecrets); + const indexes = await this.taggingDataProvider.getTaggingSecretsIndexesAsRecipient(appTaggingSecrets); return appTaggingSecrets.map((secret, i) => new IndexedTaggingSecret(secret, indexes[i])); } @@ -384,7 +392,7 @@ export class PXEDataProvider implements ExecutionDataProvider { recipient: AztecAddress, ): Promise { const appTaggingSecret = await this.#calculateAppTaggingSecret(contractAddress, sender, recipient); - const [oldIndex] = await this.db.getTaggingSecretsIndexesAsSender([appTaggingSecret]); + const [oldIndex] = await this.taggingDataProvider.getTaggingSecretsIndexesAsSender([appTaggingSecret]); // This algorithm works such that: // 1. If we find minimum consecutive empty logs in a window of logs we set the index to the index of the last log @@ -422,7 +430,9 @@ export class PXEDataProvider implements ExecutionDataProvider { const contractName = await this.contractDataProvider.getDebugContractName(contractAddress); if (currentIndex !== oldIndex) { - await this.db.setTaggingSecretsIndexesAsSender([new IndexedTaggingSecret(appTaggingSecret, currentIndex)]); + await this.taggingDataProvider.setTaggingSecretsIndexesAsSender([ + new IndexedTaggingSecret(appTaggingSecret, currentIndex), + ]); this.log.debug(`Syncing logs for sender ${sender} at contract ${contractName}(${contractAddress})`, { sender, @@ -591,7 +601,7 @@ export class PXEDataProvider implements ExecutionDataProvider { ); // At this point we have processed all the logs for the recipient so we store the new largest indexes in the db. - await this.db.setTaggingSecretsIndexesAsRecipient( + await this.taggingDataProvider.setTaggingSecretsIndexesAsRecipient( Object.entries(newLargestIndexMapToStore).map( ([appTaggingSecret, index]) => new IndexedTaggingSecret(Fr.fromHexString(appTaggingSecret), index), ), @@ -632,7 +642,7 @@ export class PXEDataProvider implements ExecutionDataProvider { excludedIndices.set(scopedLog.txHash.toString(), new Set()); } - const note = await getOrderedNoteItems(this.db, payload); + const note = await getOrderedNoteItems(this.contractDataProvider, payload); const plaintext = [payload.storageSlot, payload.noteTypeId.toField(), ...note.items]; decrypted.push({ plaintext, txHash: scopedLog.txHash, contractAddress: payload.contractAddress }); @@ -702,7 +712,7 @@ export class PXEDataProvider implements ExecutionDataProvider { recipient, ); - await this.db.addNotes([noteDao], recipient); + await this.noteDataProvider.addNotes([noteDao], recipient); this.log.verbose('Added note', { contract: noteDao.contractAddress, slot: noteDao.storageSlot, @@ -751,7 +761,7 @@ export class PXEDataProvider implements ExecutionDataProvider { this.log.verbose('Searching for nullifiers of known notes', { contract: contractAddress }); for (const recipient of await this.keyStore.getAccounts()) { - const currentNotesForRecipient = await this.db.getNotes({ contractAddress, owner: recipient }); + const currentNotesForRecipient = await this.noteDataProvider.getNotes({ contractAddress, owner: recipient }); const nullifiersToCheck = currentNotesForRecipient.map(note => note.siloedNullifier); const nullifierIndexes = await this.aztecNode.findNullifiersIndexesWithBlock('latest', nullifiersToCheck); @@ -763,7 +773,10 @@ export class PXEDataProvider implements ExecutionDataProvider { }) .filter(nullifier => nullifier !== undefined) as InBlock[]; - const nullifiedNotes = await this.db.removeNullifiedNotes(foundNullifiers, await recipient.toAddressPoint()); + const nullifiedNotes = await this.noteDataProvider.removeNullifiedNotes( + foundNullifiers, + await recipient.toAddressPoint(), + ); nullifiedNotes.forEach(noteDao => { this.log.verbose(`Removed note for contract ${noteDao.contractAddress} at slot ${noteDao.storageSlot}`, { contract: noteDao.contractAddress, @@ -801,7 +814,7 @@ export class PXEDataProvider implements ExecutionDataProvider { // note existence in said tree. Note that while this is technically a historical query, we perform it at the latest // locally synced block number which *should* be recent enough to be available. We avoid querying at 'latest' since // we want to avoid accidentally processing notes that only exist ahead in time of the locally synced state. - const syncedBlockNumber = await this.db.getBlockNumber(); + const syncedBlockNumber = await this.syncDataProvider.getBlockNumber(); const uniqueNoteHashTreeIndex = ( await this.aztecNode.findLeavesIndexes(syncedBlockNumber!, MerkleTreeId.NOTE_HASH_TREE, [uniqueNoteHash]) )[0]; @@ -836,7 +849,7 @@ export class PXEDataProvider implements ExecutionDataProvider { recipient: AztecAddress, simulator?: AcirSimulator, ) { - const artifact: FunctionArtifact | undefined = await new ContractDataProvider(this.db).getFunctionArtifactByName( + const artifact: FunctionArtifact | undefined = await this.contractDataProvider.getFunctionArtifactByName( contractAddress, 'process_log', ); @@ -872,19 +885,19 @@ export class PXEDataProvider implements ExecutionDataProvider { } storeCapsule(contractAddress: AztecAddress, slot: Fr, capsule: Fr[]): Promise { - return this.db.storeCapsule(contractAddress, slot, capsule); + return this.capsuleDataProvider.storeCapsule(contractAddress, slot, capsule); } loadCapsule(contractAddress: AztecAddress, slot: Fr): Promise { - return this.db.loadCapsule(contractAddress, slot); + return this.capsuleDataProvider.loadCapsule(contractAddress, slot); } deleteCapsule(contractAddress: AztecAddress, slot: Fr): Promise { - return this.db.deleteCapsule(contractAddress, slot); + return this.capsuleDataProvider.deleteCapsule(contractAddress, slot); } copyCapsule(contractAddress: AztecAddress, srcSlot: Fr, dstSlot: Fr, numEntries: number): Promise { - return this.db.copyCapsule(contractAddress, srcSlot, dstSlot, numEntries); + return this.capsuleDataProvider.copyCapsule(contractAddress, srcSlot, dstSlot, numEntries); } } diff --git a/yarn-project/pxe/src/pxe_data_provider/pxe_data_provider.test.ts b/yarn-project/pxe/src/pxe_oracle_interface/pxe_oracle_interface.test.ts similarity index 85% rename from yarn-project/pxe/src/pxe_data_provider/pxe_data_provider.test.ts rename to yarn-project/pxe/src/pxe_oracle_interface/pxe_oracle_interface.test.ts index e491e2132fb..a1e5e180c0d 100644 --- a/yarn-project/pxe/src/pxe_data_provider/pxe_data_provider.test.ts +++ b/yarn-project/pxe/src/pxe_oracle_interface/pxe_oracle_interface.test.ts @@ -19,10 +19,14 @@ import { TxEffect, TxHash } from '@aztec/stdlib/tx'; import { jest } from '@jest/globals'; import { type MockProxy, mock } from 'jest-mock-extended'; -import { ContractDataProvider } from '../contract_data_provider/index.js'; -import type { PxeDatabase } from '../database/index.js'; -import { KVPxeDatabase } from '../database/kv_pxe_database.js'; -import { PXEDataProvider } from './index.js'; +import { AddressDataProvider } from '../storage/address_data_provider/address_data_provider.js'; +import { AuthWitnessDataProvider } from '../storage/auth_witness_data_provider/auth_witness_data_provider.js'; +import { CapsuleDataProvider } from '../storage/capsule_data_provider/capsule_data_provider.js'; +import { ContractDataProvider } from '../storage/contract_data_provider/contract_data_provider.js'; +import { NoteDataProvider } from '../storage/note_data_provider/note_data_provider.js'; +import { SyncDataProvider } from '../storage/sync_data_provider/sync_data_provider.js'; +import { TaggingDataProvider } from '../storage/tagging_data_provider/tagging_data_provider.js'; +import { PXEOracleInterface } from './index.js'; import { WINDOW_HALF_SIZE } from './tagging_utils.js'; const TXS_PER_BLOCK = 4; @@ -102,31 +106,54 @@ async function computeSiloedTagForIndex( return poseidon2Hash([contractAddress, tag]); } -describe('PXE data provider', () => { +describe('PXEOracleInterface', () => { let aztecNode: MockProxy; - let database: PxeDatabase; + + let addressDataProvider: AddressDataProvider; + let authWitnessDataProvider: AuthWitnessDataProvider; let contractDataProvider: ContractDataProvider; - let pxeDataProvider: PXEDataProvider; + let noteDataProvider: NoteDataProvider; + let syncDataProvider: SyncDataProvider; + let taggingDataProvider: TaggingDataProvider; + let capsuleDataProvider: CapsuleDataProvider; let keyStore: KeyStore; let simulationProvider: SimulationProvider; let recipient: CompleteAddress; let contractAddress: AztecAddress; + let pxeOracleInterface: PXEOracleInterface; + beforeEach(async () => { - const db = await openTmpStore('test'); + const store = await openTmpStore('test'); aztecNode = mock(); - database = await KVPxeDatabase.create(db); - contractDataProvider = new ContractDataProvider(database); + contractDataProvider = new ContractDataProvider(store); jest.spyOn(contractDataProvider, 'getDebugContractName').mockImplementation(() => Promise.resolve('TestContract')); - keyStore = new KeyStore(db); + + addressDataProvider = new AddressDataProvider(store); + authWitnessDataProvider = new AuthWitnessDataProvider(store); + noteDataProvider = await NoteDataProvider.create(store); + syncDataProvider = new SyncDataProvider(store); + taggingDataProvider = new TaggingDataProvider(store); + capsuleDataProvider = new CapsuleDataProvider(store); + keyStore = new KeyStore(store); simulationProvider = new WASMSimulator(); - pxeDataProvider = new PXEDataProvider(database, keyStore, aztecNode, simulationProvider, contractDataProvider); - // Set up contract address + pxeOracleInterface = new PXEOracleInterface( + aztecNode, + keyStore, + simulationProvider, + contractDataProvider, + noteDataProvider, + capsuleDataProvider, + syncDataProvider, + taggingDataProvider, + addressDataProvider, + authWitnessDataProvider, + ); // Set up contract address contractAddress = await AztecAddress.random(); // Set up recipient account recipient = await keyStore.addAccount(new Fr(69), Fr.random()); - await database.addCompleteAddress(recipient); + await addressDataProvider.addCompleteAddress(recipient); }); describe('sync tagged logs', () => { @@ -217,7 +244,7 @@ describe('PXE data provider', () => { return { completeAddress, ivsk: keys.masterIncomingViewingSecretKey, secretKey: new Fr(index) }; }); for (const sender of senders) { - await database.addSenderAddress(sender.completeAddress.address); + await taggingDataProvider.addSenderAddress(sender.completeAddress.address); } aztecNode.getLogsByTags.mockReset(); }); @@ -225,7 +252,7 @@ describe('PXE data provider', () => { it('should sync tagged logs', async () => { const tagIndex = 0; await generateMockLogs(tagIndex); - const syncedLogs = await pxeDataProvider.syncTaggedLogs(contractAddress, 3); + const syncedLogs = await pxeOracleInterface.syncTaggedLogs(contractAddress, 3); // We expect to have all logs intended for the recipient, one per sender + 1 with a duplicated tag for the first // one + half of the logs for the second index expect(syncedLogs.get(recipient.address.toString())).toHaveLength(NUM_SENDERS + 1 + NUM_SENDERS / 2); @@ -247,7 +274,7 @@ describe('PXE data provider', () => { // First sender should have 2 logs, but keep index 1 since they were built using the same tag // Next 4 senders should also have index 1 = offset + 1 // Last 5 senders should have index 2 = offset + 2 - const indexes = await database.getTaggingSecretsIndexesAsRecipient(secrets); + const indexes = await taggingDataProvider.getTaggingSecretsIndexesAsRecipient(secrets); expect(indexes).toHaveLength(NUM_SENDERS); expect(indexes).toEqual([1, 1, 1, 1, 1, 2, 2, 2, 2, 2]); @@ -259,7 +286,7 @@ describe('PXE data provider', () => { it('should sync tagged logs as senders', async () => { for (const sender of senders) { - await database.addCompleteAddress(sender.completeAddress); + await addressDataProvider.addCompleteAddress(sender.completeAddress); await keyStore.addAccount(sender.secretKey, sender.completeAddress.partialAddress); } @@ -279,20 +306,20 @@ describe('PXE data provider', () => { }), ); - const indexesAsSender = await database.getTaggingSecretsIndexesAsSender(secrets); + const indexesAsSender = await taggingDataProvider.getTaggingSecretsIndexesAsSender(secrets); expect(indexesAsSender).toStrictEqual([0, 0, 0, 0, 0, 0, 0, 0, 0, 0]); expect(aztecNode.getLogsByTags.mock.calls.length).toBe(0); for (let i = 0; i < senders.length; i++) { - await pxeDataProvider.syncTaggedLogsAsSender( + await pxeOracleInterface.syncTaggedLogsAsSender( contractAddress, senders[i].completeAddress.address, recipient.address, ); } - let indexesAsSenderAfterSync = await database.getTaggingSecretsIndexesAsSender(secrets); + let indexesAsSenderAfterSync = await taggingDataProvider.getTaggingSecretsIndexesAsSender(secrets); expect(indexesAsSenderAfterSync).toStrictEqual([1, 1, 1, 1, 1, 2, 2, 2, 2, 2]); // Only 1 window is obtained for each sender @@ -304,14 +331,14 @@ describe('PXE data provider', () => { tagIndex = 11; await generateMockLogs(tagIndex); for (let i = 0; i < senders.length; i++) { - await pxeDataProvider.syncTaggedLogsAsSender( + await pxeOracleInterface.syncTaggedLogsAsSender( contractAddress, senders[i].completeAddress.address, recipient.address, ); } - indexesAsSenderAfterSync = await database.getTaggingSecretsIndexesAsSender(secrets); + indexesAsSenderAfterSync = await taggingDataProvider.getTaggingSecretsIndexesAsSender(secrets); expect(indexesAsSenderAfterSync).toStrictEqual([12, 12, 12, 12, 12, 13, 13, 13, 13, 13]); expect(aztecNode.getLogsByTags.mock.calls.length).toBe(NUM_SENDERS * 2); @@ -320,7 +347,7 @@ describe('PXE data provider', () => { it('should sync tagged logs with a sender index offset', async () => { const tagIndex = 5; await generateMockLogs(tagIndex); - const syncedLogs = await pxeDataProvider.syncTaggedLogs(contractAddress, 3); + const syncedLogs = await pxeOracleInterface.syncTaggedLogs(contractAddress, 3); // We expect to have all logs intended for the recipient, one per sender + 1 with a duplicated tag for the first one + half of the logs for the second index expect(syncedLogs.get(recipient.address.toString())).toHaveLength(NUM_SENDERS + 1 + NUM_SENDERS / 2); @@ -340,7 +367,7 @@ describe('PXE data provider', () => { // First sender should have 2 logs, but keep index 1 since they were built using the same tag // Next 4 senders should also have index 6 = offset + 1 // Last 5 senders should have index 7 = offset + 2 - const indexes = await database.getTaggingSecretsIndexesAsRecipient(secrets); + const indexes = await taggingDataProvider.getTaggingSecretsIndexesAsRecipient(secrets); expect(indexes).toHaveLength(NUM_SENDERS); expect(indexes).toEqual([6, 6, 6, 6, 6, 7, 7, 7, 7, 7]); @@ -368,9 +395,11 @@ describe('PXE data provider', () => { ); // Increase our indexes to 2 - await database.setTaggingSecretsIndexesAsRecipient(secrets.map(secret => new IndexedTaggingSecret(secret, 2))); + await taggingDataProvider.setTaggingSecretsIndexesAsRecipient( + secrets.map(secret => new IndexedTaggingSecret(secret, 2)), + ); - const syncedLogs = await pxeDataProvider.syncTaggedLogs(contractAddress, 3); + const syncedLogs = await pxeOracleInterface.syncTaggedLogs(contractAddress, 3); // Even if our index as recipient is higher than what the sender sent, we should be able to find the logs // since the window starts at Math.max(0, 2 - window_size) = 0 @@ -379,7 +408,7 @@ describe('PXE data provider', () => { // First sender should have 2 logs, but keep index 2 since they were built using the same tag // Next 4 senders should also have index 2 = tagIndex + 1 // Last 5 senders should have index 3 = tagIndex + 2 - const indexes = await database.getTaggingSecretsIndexesAsRecipient(secrets); + const indexes = await taggingDataProvider.getTaggingSecretsIndexesAsRecipient(secrets); expect(indexes).toHaveLength(NUM_SENDERS); expect(indexes).toEqual([2, 2, 2, 2, 2, 3, 3, 3, 3, 3]); @@ -408,17 +437,17 @@ describe('PXE data provider', () => { // We set the indexes to WINDOW_HALF_SIZE + 1 so that it's outside the window and for this reason no updates // should be triggered. - await database.setTaggingSecretsIndexesAsRecipient( + await taggingDataProvider.setTaggingSecretsIndexesAsRecipient( secrets.map(secret => new IndexedTaggingSecret(secret, WINDOW_HALF_SIZE + 1)), ); - const syncedLogs = await pxeDataProvider.syncTaggedLogs(contractAddress, 3); + const syncedLogs = await pxeOracleInterface.syncTaggedLogs(contractAddress, 3); // Only half of the logs should be synced since we start from index 1 = (11 - window_size), the other half should be skipped expect(syncedLogs.get(recipient.address.toString())).toHaveLength(NUM_SENDERS / 2); // Indexes should remain where we set them (window_size + 1) - const indexes = await database.getTaggingSecretsIndexesAsRecipient(secrets); + const indexes = await taggingDataProvider.getTaggingSecretsIndexesAsRecipient(secrets); expect(indexes).toHaveLength(NUM_SENDERS); expect(indexes).toEqual([11, 11, 11, 11, 11, 11, 11, 11, 11, 11]); @@ -444,11 +473,11 @@ describe('PXE data provider', () => { }), ); - await database.setTaggingSecretsIndexesAsRecipient( + await taggingDataProvider.setTaggingSecretsIndexesAsRecipient( secrets.map(secret => new IndexedTaggingSecret(secret, WINDOW_HALF_SIZE + 2)), ); - let syncedLogs = await pxeDataProvider.syncTaggedLogs(contractAddress, 3); + let syncedLogs = await pxeOracleInterface.syncTaggedLogs(contractAddress, 3); // No logs should be synced since we start from index 2 = 12 - window_size expect(syncedLogs.get(recipient.address.toString())).toHaveLength(0); @@ -459,14 +488,14 @@ describe('PXE data provider', () => { aztecNode.getLogsByTags.mockClear(); // Wipe the database - await database.resetNoteSyncData(); + await taggingDataProvider.resetNoteSyncData(); - syncedLogs = await pxeDataProvider.syncTaggedLogs(contractAddress, 3); + syncedLogs = await pxeOracleInterface.syncTaggedLogs(contractAddress, 3); // First sender should have 2 logs, but keep index 1 since they were built using the same tag // Next 4 senders should also have index 1 = offset + 1 // Last 5 senders should have index 2 = offset + 2 - const indexes = await database.getTaggingSecretsIndexesAsRecipient(secrets); + const indexes = await taggingDataProvider.getTaggingSecretsIndexesAsRecipient(secrets); expect(indexes).toHaveLength(NUM_SENDERS); expect(indexes).toEqual([1, 1, 1, 1, 1, 2, 2, 2, 2, 2]); @@ -479,7 +508,7 @@ describe('PXE data provider', () => { it('should not sync tagged logs with a blockNumber > maxBlockNumber', async () => { const tagIndex = 0; await generateMockLogs(tagIndex); - const syncedLogs = await pxeDataProvider.syncTaggedLogs(contractAddress, 1); + const syncedLogs = await pxeOracleInterface.syncTaggedLogs(contractAddress, 1); // Only NUM_SENDERS + 1 logs should be synched, since the rest have blockNumber > 1 expect(syncedLogs.get(recipient.address.toString())).toHaveLength(NUM_SENDERS + 1); @@ -500,7 +529,7 @@ describe('PXE data provider', () => { aztecNode.getLogsByTags.mockImplementation(tags => { return Promise.resolve(tags.map(tag => logs[tag.toString()] ?? [])); }); - const syncedLogs = await pxeDataProvider.syncTaggedLogs(contractAddress, 1); + const syncedLogs = await pxeOracleInterface.syncTaggedLogs(contractAddress, 1); // We expect the above log to be discarded, and so none to be synced expect(syncedLogs.get(recipient.address.toString())).toHaveLength(0); @@ -535,13 +564,13 @@ describe('PXE data provider', () => { const contractInstance = await randomContractInstanceWithAddress(); const contractArtifact = randomContractArtifact(); contractArtifact.functions = [processLogFuncArtifact]; - await database.addContractInstance(contractInstance); - await database.addContractArtifact(contractInstance.currentContractClassId, contractArtifact); + await contractDataProvider.addContractInstance(contractInstance); + await contractDataProvider.addContractArtifact(contractInstance.currentContractClassId, contractArtifact); contractAddress = contractInstance.address; - addNotesSpy = jest.spyOn(database, 'addNotes'); - getNotesSpy = jest.spyOn(database, 'getNotes'); - removeNullifiedNotesSpy = jest.spyOn(database, 'removeNullifiedNotes'); + addNotesSpy = jest.spyOn(noteDataProvider, 'addNotes'); + getNotesSpy = jest.spyOn(noteDataProvider, 'getNotes'); + removeNullifiedNotesSpy = jest.spyOn(noteDataProvider, 'removeNullifiedNotes'); removeNullifiedNotesSpy.mockImplementation(() => Promise.resolve([])); simulator = mock(); simulator.runUnconstrained.mockImplementation(() => Promise.resolve({})); @@ -632,7 +661,7 @@ describe('PXE data provider', () => { const taggedLogs = await mockTaggedLogs(requests); - await pxeDataProvider.processTaggedLogs(taggedLogs, recipient.address, simulator); + await pxeOracleInterface.processTaggedLogs(taggedLogs, recipient.address, simulator); // We test that a call to `processLog` is made with the correct function artifact and contract address expect(runUnconstrainedSpy).toHaveBeenCalledTimes(3); @@ -653,7 +682,7 @@ describe('PXE data provider', () => { const taggedLogs = await mockTaggedLogs(requests); - await pxeDataProvider.processTaggedLogs(taggedLogs, recipient.address, simulator); + await pxeOracleInterface.processTaggedLogs(taggedLogs, recipient.address, simulator); expect(addNotesSpy).toHaveBeenCalledTimes(0); }); @@ -675,7 +704,7 @@ describe('PXE data provider', () => { return [await wrapInBlock(1n, await L2Block.random(2)), undefined, undefined]; }); - await pxeDataProvider.removeNullifiedNotes(contractAddress); + await pxeOracleInterface.removeNullifiedNotes(contractAddress); expect(removeNullifiedNotesSpy).toHaveBeenCalledTimes(1); expect(removeNullifiedNotesSpy).toHaveBeenCalledWith( diff --git a/yarn-project/pxe/src/pxe_data_provider/tagging_utils.ts b/yarn-project/pxe/src/pxe_oracle_interface/tagging_utils.ts similarity index 100% rename from yarn-project/pxe/src/pxe_data_provider/tagging_utils.ts rename to yarn-project/pxe/src/pxe_oracle_interface/tagging_utils.ts diff --git a/yarn-project/pxe/src/pxe_service/error_enriching.ts b/yarn-project/pxe/src/pxe_service/error_enriching.ts index d50e3b7b347..4dd95907797 100644 --- a/yarn-project/pxe/src/pxe_service/error_enriching.ts +++ b/yarn-project/pxe/src/pxe_service/error_enriching.ts @@ -6,15 +6,18 @@ import { FunctionSelector } from '@aztec/stdlib/abi'; import { AztecAddress } from '@aztec/stdlib/aztec-address'; import { type SimulationError, isNoirCallStackUnresolved } from '@aztec/stdlib/errors'; -import { ContractDataProvider } from '../contract_data_provider/index.js'; -import type { PxeDatabase } from '../database/interfaces/pxe_database.js'; +import type { ContractDataProvider } from '../storage/contract_data_provider/contract_data_provider.js'; /** * Adds contract and function names to a simulation error, if they * can be found in the PXE database * @param err - The error to enrich. */ -export async function enrichSimulationError(err: SimulationError, db: PxeDatabase, logger: Logger) { +export async function enrichSimulationError( + err: SimulationError, + contractDataProvider: ContractDataProvider, + logger: Logger, +) { // Maps contract addresses to the set of function names that were in error. // Map and Set do reference equality for their keys instead of value equality, so we store the string // representation to get e.g. different contract address objects with the same address value to match. @@ -30,7 +33,7 @@ export async function enrichSimulationError(err: SimulationError, db: PxeDatabas await Promise.all( [...mentionedFunctions.entries()].map(async ([contractAddress, fnNames]) => { const parsedContractAddress = AztecAddress.fromString(contractAddress); - const contract = await db.getContract(parsedContractAddress); + const contract = await contractDataProvider.getContract(parsedContractAddress); if (contract) { err.enrichWithContractName(parsedContractAddress, contract.name); for (const fnName of fnNames) { @@ -59,7 +62,6 @@ export async function enrichSimulationError(err: SimulationError, db: PxeDatabas export async function enrichPublicSimulationError( err: SimulationError, contractDataProvider: ContractDataProvider, - db: PxeDatabase, logger: Logger, ) { const callStack = err.getCallStack(); @@ -99,6 +101,6 @@ export async function enrichPublicSimulationError( ); } } - await enrichSimulationError(err, db, logger); + await enrichSimulationError(err, contractDataProvider, logger); } } diff --git a/yarn-project/pxe/src/pxe_service/pxe_service.ts b/yarn-project/pxe/src/pxe_service/pxe_service.ts index 0e1f5d52f40..f10fb2300e1 100644 --- a/yarn-project/pxe/src/pxe_service/pxe_service.ts +++ b/yarn-project/pxe/src/pxe_service/pxe_service.ts @@ -3,8 +3,9 @@ import { Fr, type Point } from '@aztec/foundation/fields'; import { type Logger, createLogger } from '@aztec/foundation/log'; import { Timer } from '@aztec/foundation/timer'; import type { SiblingPath } from '@aztec/foundation/trees'; -import type { KeyStore } from '@aztec/key-store'; -import type { L2TipsStore } from '@aztec/kv-store/stores'; +import { KeyStore } from '@aztec/key-store'; +import type { AztecAsyncKVStore } from '@aztec/kv-store'; +import { L2TipsStore } from '@aztec/kv-store/stores'; import { ProtocolContractAddress, type ProtocolContractsProvider, @@ -24,12 +25,12 @@ import { import type { AuthWitness } from '@aztec/stdlib/auth-witness'; import type { AztecAddress } from '@aztec/stdlib/aztec-address'; import type { InBlock, L2Block } from '@aztec/stdlib/block'; -import type { +import { CompleteAddress, - ContractClassWithId, - ContractInstanceWithAddress, - NodeInfo, - PartialAddress, + type ContractClassWithId, + type ContractInstanceWithAddress, + type NodeInfo, + type PartialAddress, } from '@aztec/stdlib/contract'; import { computeContractAddressFromInstance, getContractClassFromArtifact } from '@aztec/stdlib/contract'; import { SimulationError } from '@aztec/stdlib/errors'; @@ -50,6 +51,7 @@ import { computeAddressSecret } from '@aztec/stdlib/keys'; import type { LogFilter } from '@aztec/stdlib/logs'; import { getNonNullifiedL1ToL2MessageWitness } from '@aztec/stdlib/messaging'; import { type NotesFilter, UniqueNote } from '@aztec/stdlib/note'; +import { MerkleTreeId } from '@aztec/stdlib/trees'; import { PrivateExecutionResult, PrivateSimulationResult, @@ -67,11 +69,16 @@ import { inspect } from 'util'; import type { PXEServiceConfig } from '../config/index.js'; import { getPackageInfo } from '../config/package_info.js'; -import { ContractDataProvider } from '../contract_data_provider/index.js'; -import type { PxeDatabase } from '../database/index.js'; import { KernelOracle } from '../kernel_oracle/index.js'; import { KernelProver, type ProvingConfig } from '../kernel_prover/kernel_prover.js'; -import { PXEDataProvider } from '../pxe_data_provider/index.js'; +import { PXEOracleInterface } from '../pxe_oracle_interface/index.js'; +import { AddressDataProvider } from '../storage/address_data_provider/address_data_provider.js'; +import { AuthWitnessDataProvider } from '../storage/auth_witness_data_provider/auth_witness_data_provider.js'; +import { CapsuleDataProvider } from '../storage/capsule_data_provider/capsule_data_provider.js'; +import { ContractDataProvider } from '../storage/contract_data_provider/contract_data_provider.js'; +import { NoteDataProvider } from '../storage/note_data_provider/note_data_provider.js'; +import { SyncDataProvider } from '../storage/sync_data_provider/sync_data_provider.js'; +import { TaggingDataProvider } from '../storage/tagging_data_provider/tagging_data_provider.js'; import { Synchronizer } from '../synchronizer/index.js'; import { enrichPublicSimulationError, enrichSimulationError } from './error_enriching.js'; @@ -79,53 +86,102 @@ import { enrichPublicSimulationError, enrichSimulationError } from './error_enri * A Private eXecution Environment (PXE) implementation. */ export class PXEService implements PXE { - private synchronizer: Synchronizer; - private contractDataProvider: ContractDataProvider; - private pxeDataProvider: PXEDataProvider; - private simulator: AcirSimulator; - private log: Logger; - private packageVersion: string; - private proverEnabled: boolean; - - constructor( - private keyStore: KeyStore, + private constructor( private node: AztecNode, - private db: PxeDatabase, - tipsStore: L2TipsStore, + private synchronizer: Synchronizer, + private keyStore: KeyStore, + private contractDataProvider: ContractDataProvider, + private noteDataProvider: NoteDataProvider, + private capsuleDataProvider: CapsuleDataProvider, + private syncDataProvider: SyncDataProvider, + private taggingDataProvider: TaggingDataProvider, + private addressDataProvider: AddressDataProvider, + private authWitnessDataProvider: AuthWitnessDataProvider, + private simulator: AcirSimulator, + private packageVersion: string, + private proverEnabled: boolean, private proofCreator: PrivateKernelProver, - simulationProvider: SimulationProvider, private protocolContractsProvider: ProtocolContractsProvider, + private log: Logger, + ) {} + + /** + * Creates an instance of a PXE Service by instantiating all the necessary data providers and services. + * Also triggers the registration of the protocol contracts and makes sure the provided node + * can be contacted. + * + * @returns A promise that resolves PXE service is ready to be used. + */ + public static async create( + node: AztecNode, + store: AztecAsyncKVStore, + proofCreator: PrivateKernelProver, + simulationProvider: SimulationProvider, + protocolContractsProvider: ProtocolContractsProvider, config: PXEServiceConfig, loggerOrSuffix?: string | Logger, ) { - this.log = + const log = !loggerOrSuffix || typeof loggerOrSuffix === 'string' ? createLogger(loggerOrSuffix ? `pxe:service:${loggerOrSuffix}` : `pxe:service`) : loggerOrSuffix; - this.synchronizer = new Synchronizer(node, db, tipsStore, config, loggerOrSuffix); - this.contractDataProvider = new ContractDataProvider(db); - this.pxeDataProvider = new PXEDataProvider( - db, - keyStore, + + const packageVersion = getPackageInfo().version; + const proverEnabled = !!config.proverEnabled; + const addressDataProvider = new AddressDataProvider(store); + const authWitnessDataProvider = new AuthWitnessDataProvider(store); + const contractDataProvider = new ContractDataProvider(store); + const noteDataProvider = await NoteDataProvider.create(store); + const syncDataProvider = new SyncDataProvider(store); + const taggingDataProvider = new TaggingDataProvider(store); + const capsuleDataProvider = new CapsuleDataProvider(store); + const keyStore = new KeyStore(store); + const tipsStore = new L2TipsStore(store, 'pxe'); + const synchronizer = new Synchronizer( node, + syncDataProvider, + noteDataProvider, + taggingDataProvider, + tipsStore, + config, + loggerOrSuffix, + ); + const pxeOracleInterface = new PXEOracleInterface( + node, + keyStore, simulationProvider, - this.contractDataProvider, - this.log, + contractDataProvider, + noteDataProvider, + capsuleDataProvider, + syncDataProvider, + taggingDataProvider, + addressDataProvider, + authWitnessDataProvider, + log, ); - this.simulator = new AcirSimulator(this.pxeDataProvider, simulationProvider); - this.packageVersion = getPackageInfo().version; - this.proverEnabled = !!config.proverEnabled; - } - - /** - * Starts the PXE Service by beginning the synchronization process between the Aztec node and the database. - * - * @returns A promise that resolves when the server has started successfully. - */ - public async init() { - await this.#registerProtocolContracts(); - const info = await this.getNodeInfo(); - this.log.info(`Started PXE connected to chain ${info.l1ChainId} version ${info.protocolVersion}`); + const simulator = new AcirSimulator(pxeOracleInterface, simulationProvider); + const pxeService = new PXEService( + node, + synchronizer, + keyStore, + contractDataProvider, + noteDataProvider, + capsuleDataProvider, + syncDataProvider, + taggingDataProvider, + addressDataProvider, + authWitnessDataProvider, + simulator, + packageVersion, + proverEnabled, + proofCreator, + protocolContractsProvider, + log, + ); + await pxeService.#registerProtocolContracts(); + const info = await pxeService.getNodeInfo(); + log.info(`Started PXE connected to chain ${info.l1ChainId} version ${info.protocolVersion}`); + return pxeService; } isL1ToL2MessageSynced(l1ToL2Message: Fr): Promise { @@ -133,24 +189,34 @@ export class PXEService implements PXE { } /** Returns an estimate of the db size in bytes. */ - public estimateDbSize() { - return this.db.estimateSize(); + public async estimateDbSize() { + const treeRootsSize = Object.keys(MerkleTreeId).length * Fr.SIZE_IN_BYTES; + const dbSizes = await Promise.all([ + this.addressDataProvider.getSize(), + this.authWitnessDataProvider.getSize(), + this.capsuleDataProvider.getSize(), + this.contractDataProvider.getSize(), + this.noteDataProvider.getSize(), + this.syncDataProvider.getSize(), + this.taggingDataProvider.getSize(), + ]); + return [...dbSizes, treeRootsSize].reduce((sum, size) => sum + size, 0); } public addAuthWitness(witness: AuthWitness) { - return this.db.addAuthWitness(witness.requestHash, witness.witness); + return this.authWitnessDataProvider.addAuthWitness(witness.requestHash, witness.witness); } public getAuthWitness(messageHash: Fr): Promise { - return this.db.getAuthWitness(messageHash); + return this.authWitnessDataProvider.getAuthWitness(messageHash); } public storeCapsule(contract: AztecAddress, storageSlot: Fr, capsule: Fr[]) { - return this.db.storeCapsule(contract, storageSlot, capsule); + return this.capsuleDataProvider.storeCapsule(contract, storageSlot, capsule); } public getContractInstance(address: AztecAddress): Promise { - return this.db.getContractInstance(address); + return this.contractDataProvider.getContractInstance(address); } public async getContractClassMetadata( @@ -161,7 +227,12 @@ export class PXEService implements PXE { isContractClassPubliclyRegistered: boolean; artifact: ContractArtifact | undefined; }> { - const artifact = await this.db.getContractArtifact(id); + let artifact; + try { + artifact = await this.contractDataProvider.getContractArtifact(id); + } catch { + this.log.warn(`No artifact found for contract class ${id.toString()} when looking for its metadata`); + } return { contractClass: artifact && (await getContractClassFromArtifact(artifact)), @@ -175,8 +246,14 @@ export class PXEService implements PXE { isContractInitialized: boolean; isContractPubliclyDeployed: boolean; }> { + let instance; + try { + instance = await this.contractDataProvider.getContractInstance(address); + } catch { + this.log.warn(`No instance found for contract ${address.toString()} when looking for its metadata`); + } return { - contractInstance: await this.db.getContractInstance(address), + contractInstance: instance, isContractInitialized: await this.#isContractInitialized(address), isContractPubliclyDeployed: await this.#isContractPubliclyDeployed(address), }; @@ -193,7 +270,8 @@ export class PXEService implements PXE { this.log.debug(`Registered account\n ${accountCompleteAddress.toReadableString()}`); } - await this.db.addCompleteAddress(accountCompleteAddress); + await this.addressDataProvider.addCompleteAddress(accountCompleteAddress); + await this.noteDataProvider.addScope(accountCompleteAddress.address); return accountCompleteAddress; } @@ -204,7 +282,7 @@ export class PXEService implements PXE { return address; } - const wasAdded = await this.db.addSenderAddress(address); + const wasAdded = await this.taggingDataProvider.addSenderAddress(address); if (wasAdded) { this.log.info(`Added sender:\n ${address.toString()}`); @@ -216,13 +294,13 @@ export class PXEService implements PXE { } public getSenders(): Promise { - const senders = this.db.getSenderAddresses(); + const senders = this.taggingDataProvider.getSenderAddresses(); return Promise.resolve(senders); } public async removeSender(address: AztecAddress): Promise { - const wasRemoved = await this.db.removeSenderAddress(address); + const wasRemoved = await this.taggingDataProvider.removeSenderAddress(address); if (wasRemoved) { this.log.info(`Removed sender:\n ${address.toString()}`); @@ -235,7 +313,7 @@ export class PXEService implements PXE { public async getRegisteredAccounts(): Promise { // Get complete addresses of both the recipients and the accounts - const completeAddresses = await this.db.getCompleteAddresses(); + const completeAddresses = await this.addressDataProvider.getCompleteAddresses(); // Filter out the addresses not corresponding to accounts const accounts = await this.keyStore.getAccounts(); return completeAddresses.filter(completeAddress => @@ -245,7 +323,7 @@ export class PXEService implements PXE { public async registerContractClass(artifact: ContractArtifact): Promise { const { id: contractClassId } = await getContractClassFromArtifact(artifact); - await this.db.addContractArtifact(contractClassId, artifact); + await this.contractDataProvider.addContractArtifact(contractClassId, artifact); this.log.info(`Added contract class ${artifact.name} with id ${contractClassId}`); } @@ -266,8 +344,7 @@ export class PXEService implements PXE { if (!computedAddress.equals(instance.address)) { throw new Error('Added a contract in which the address does not match the contract instance.'); } - - await this.db.addContractArtifact(contractClass.id, artifact); + await this.contractDataProvider.addContractArtifact(contractClass.id, artifact); const publicFunctionSignatures = artifact.functions .filter(fn => fn.functionType === FunctionType.PUBLIC) @@ -278,41 +355,33 @@ export class PXEService implements PXE { await this.node.addContractClass({ ...contractClass, privateFunctions: [], unconstrainedFunctions: [] }); } else { // Otherwise, make sure there is an artifact already registered for that class id - artifact = await this.db.getContractArtifact(instance.currentContractClassId); - if (!artifact) { - throw new Error( - `Missing contract artifact for class id ${instance.currentContractClassId} for contract ${instance.address}`, - ); - } + artifact = await this.contractDataProvider.getContractArtifact(instance.currentContractClassId); } - await this.db.addContractInstance(instance); + await this.contractDataProvider.addContractInstance(instance); this.log.info( `Added contract ${artifact.name} at ${instance.address.toString()} with class ${instance.currentContractClassId}`, ); } public async updateContract(contractAddress: AztecAddress, artifact: ContractArtifact): Promise { - const currentInstance = await this.db.getContractInstance(contractAddress); - if (!currentInstance) { - throw new Error(`Contract ${contractAddress.toString()} is not registered.`); - } + const currentInstance = await this.contractDataProvider.getContractInstance(contractAddress); const contractClass = await getContractClassFromArtifact(artifact); await this.synchronizer.sync(); - const header = await this.db.getBlockHeader(); + const header = await this.syncDataProvider.getBlockHeader(); const currentClassId = await readCurrentClassId( contractAddress, currentInstance, - this.pxeDataProvider, + this.node, header.globalVariables.blockNumber.toNumber(), ); if (!contractClass.id.equals(currentClassId)) { throw new Error('Could not update contract to a class different from the current one.'); } - await this.db.addContractArtifact(contractClass.id, artifact); + await this.contractDataProvider.addContractArtifact(contractClass.id, artifact); const publicFunctionSignatures = artifact.functions .filter(fn => fn.functionType === FunctionType.PUBLIC) @@ -322,28 +391,25 @@ export class PXEService implements PXE { // TODO(#10007): Node should get public contract class from the registration event, not from PXE registration await this.node.addContractClass({ ...contractClass, privateFunctions: [], unconstrainedFunctions: [] }); currentInstance.currentContractClassId = contractClass.id; - await this.db.addContractInstance(currentInstance); + await this.contractDataProvider.addContractInstance(currentInstance); this.log.info(`Updated contract ${artifact.name} at ${contractAddress.toString()} to class ${contractClass.id}`); } public getContracts(): Promise { - return this.db.getContractsAddresses(); + return this.contractDataProvider.getContractsAddresses(); } public async getPublicStorageAt(contract: AztecAddress, slot: Fr) { - if (!(await this.getContractInstance(contract))) { - throw new Error(`Contract ${contract.toString()} is not deployed`); - } return await this.node.getPublicStorageAt('latest', contract, slot); } public async getNotes(filter: NotesFilter): Promise { - const noteDaos = await this.db.getNotes(filter); + const noteDaos = await this.noteDataProvider.getNotes(filter); const extendedNotes = noteDaos.map(async dao => { let owner = filter.owner; if (owner === undefined) { - const completeAddresses = await this.db.getCompleteAddresses(); + const completeAddresses = await this.addressDataProvider.getCompleteAddresses(); const completeAddressIndex = ( await Promise.all(completeAddresses.map(completeAddresses => completeAddresses.address.toAddressPoint())) ).findIndex(addressPoint => addressPoint.equals(dao.addressPoint)); @@ -559,7 +625,7 @@ export class PXEService implements PXE { } async #getFunctionCall(functionName: string, args: any[], to: AztecAddress): Promise { - const contract = await this.db.getContract(to); + const contract = await this.contractDataProvider.getContract(to); if (!contract) { throw new Error( `Unknown contract ${to}: add it to PXE Service by calling server.addContracts(...).\nSee docs for context: https://docs.aztec.network/developers/reference/debugging/aztecnr-errors#unknown-contract-0x0-add-it-to-pxe-by-calling-serveraddcontracts`, @@ -622,8 +688,8 @@ export class PXEService implements PXE { for (const name of protocolContractNames) { const { address, contractClass, instance, artifact } = await this.protocolContractsProvider.getProtocolContractArtifact(name); - await this.db.addContractArtifact(contractClass.id, artifact); - await this.db.addContractInstance(instance); + await this.contractDataProvider.addContractArtifact(contractClass.id, artifact); + await this.contractDataProvider.addContractInstance(instance); registered[name] = address.toString(); } this.log.verbose(`Registered protocol contracts in pxe`, registered); @@ -661,7 +727,7 @@ export class PXEService implements PXE { return result; } catch (err) { if (err instanceof SimulationError) { - await enrichSimulationError(err, this.db, this.log); + await enrichSimulationError(err, this.contractDataProvider, this.log); } throw err; } @@ -687,7 +753,7 @@ export class PXEService implements PXE { return result; } catch (err) { if (err instanceof SimulationError) { - await enrichSimulationError(err, this.db, this.log); + await enrichSimulationError(err, this.contractDataProvider, this.log); } throw err; } @@ -711,7 +777,7 @@ export class PXEService implements PXE { } catch (err) { if (err instanceof SimulationError) { try { - await enrichPublicSimulationError(err, this.contractDataProvider, this.db, this.log); + await enrichPublicSimulationError(err, this.contractDataProvider, this.log); } catch (enrichErr) { this.log.error(`Failed to enrich public simulation error: ${enrichErr}`); } @@ -864,7 +930,7 @@ export class PXEService implements PXE { } async resetNoteSyncData() { - return await this.db.resetNoteSyncData(); + return await this.taggingDataProvider.resetNoteSyncData(); } private contextualizeError(err: Error, ...context: string[]): Error { diff --git a/yarn-project/pxe/src/storage/address_data_provider/address_data_provider.test.ts b/yarn-project/pxe/src/storage/address_data_provider/address_data_provider.test.ts new file mode 100644 index 00000000000..ee7cc84d3c6 --- /dev/null +++ b/yarn-project/pxe/src/storage/address_data_provider/address_data_provider.test.ts @@ -0,0 +1,69 @@ +import { timesParallel } from '@aztec/foundation/collection'; +import { Point } from '@aztec/foundation/fields'; +import { openTmpStore } from '@aztec/kv-store/lmdb-v2'; +import { CompleteAddress } from '@aztec/stdlib/contract'; +import { PublicKeys } from '@aztec/stdlib/keys'; + +import { AddressDataProvider } from './address_data_provider.js'; + +describe('addresses', () => { + let addressDataProvider: AddressDataProvider; + + beforeEach(async () => { + const store = await openTmpStore('address_data_provider_test'); + addressDataProvider = new AddressDataProvider(store); + }); + + it('stores and retrieves addresses', async () => { + const address = await CompleteAddress.random(); + await expect(addressDataProvider.addCompleteAddress(address)).resolves.toBe(true); + await expect(addressDataProvider.getCompleteAddress(address.address)).resolves.toEqual(address); + }); + + it('silently ignores an address it already knows about', async () => { + const address = await CompleteAddress.random(); + await expect(addressDataProvider.addCompleteAddress(address)).resolves.toBe(true); + await expect(addressDataProvider.addCompleteAddress(address)).resolves.toBe(false); + }); + + it.skip('refuses to overwrite an address with a different public key', async () => { + const address = await CompleteAddress.random(); + const otherAddress = await CompleteAddress.create( + address.address, + new PublicKeys(await Point.random(), await Point.random(), await Point.random(), await Point.random()), + address.partialAddress, + ); + + await addressDataProvider.addCompleteAddress(address); + await expect(addressDataProvider.addCompleteAddress(otherAddress)).rejects.toThrow(); + }); + + it('returns all addresses', async () => { + const addresses = await timesParallel(10, () => CompleteAddress.random()); + for (const address of addresses) { + await addressDataProvider.addCompleteAddress(address); + } + + const result = await addressDataProvider.getCompleteAddresses(); + expect(result).toEqual(expect.arrayContaining(addresses)); + }); + + it('returns a single address', async () => { + const addresses = await timesParallel(10, () => CompleteAddress.random()); + for (const address of addresses) { + await addressDataProvider.addCompleteAddress(address); + } + + const result = await addressDataProvider.getCompleteAddress(addresses[3].address); + expect(result).toEqual(addresses[3]); + }); + + it("returns an empty array if it doesn't have addresses", async () => { + expect(await addressDataProvider.getCompleteAddresses()).toEqual([]); + }); + + it("returns undefined if it doesn't have an address", async () => { + const completeAddress = await CompleteAddress.random(); + expect(await addressDataProvider.getCompleteAddress(completeAddress.address)).toBeUndefined(); + }); +}); diff --git a/yarn-project/pxe/src/storage/address_data_provider/address_data_provider.ts b/yarn-project/pxe/src/storage/address_data_provider/address_data_provider.ts new file mode 100644 index 00000000000..c59c5368acd --- /dev/null +++ b/yarn-project/pxe/src/storage/address_data_provider/address_data_provider.ts @@ -0,0 +1,71 @@ +import { toArray } from '@aztec/foundation/iterable'; +import type { AztecAsyncArray, AztecAsyncKVStore, AztecAsyncMap } from '@aztec/kv-store'; +import type { AztecAddress } from '@aztec/stdlib/aztec-address'; +import { CompleteAddress } from '@aztec/stdlib/contract'; + +import type { DataProvider } from '../data_provider.js'; + +export class AddressDataProvider implements DataProvider { + #store: AztecAsyncKVStore; + #completeAddresses: AztecAsyncArray; + #completeAddressIndex: AztecAsyncMap; + + constructor(store: AztecAsyncKVStore) { + this.#store = store; + + this.#completeAddresses = this.#store.openArray('complete_addresses'); + this.#completeAddressIndex = this.#store.openMap('complete_address_index'); + } + + addCompleteAddress(completeAddress: CompleteAddress): Promise { + return this.#store.transactionAsync(async () => { + // TODO readd this + // await this.#addScope(completeAddress.address); + + const addressString = completeAddress.address.toString(); + const buffer = completeAddress.toBuffer(); + const existing = await this.#completeAddressIndex.getAsync(addressString); + if (existing === undefined) { + const index = await this.#completeAddresses.lengthAsync(); + await this.#completeAddresses.push(buffer); + await this.#completeAddressIndex.set(addressString, index); + + return true; + } else { + const existingBuffer = await this.#completeAddresses.atAsync(existing); + + if (existingBuffer && Buffer.from(existingBuffer).equals(buffer)) { + return false; + } + + throw new Error( + `Complete address with aztec address ${addressString} but different public key or partial key already exists in memory database`, + ); + } + }); + } + + async #getCompleteAddress(address: AztecAddress): Promise { + const index = await this.#completeAddressIndex.getAsync(address.toString()); + if (index === undefined) { + return undefined; + } + + const value = await this.#completeAddresses.atAsync(index); + return value ? await CompleteAddress.fromBuffer(value) : undefined; + } + + getCompleteAddress(account: AztecAddress): Promise { + return this.#getCompleteAddress(account); + } + + async getCompleteAddresses(): Promise { + return await Promise.all( + (await toArray(this.#completeAddresses.valuesAsync())).map(v => CompleteAddress.fromBuffer(v)), + ); + } + + async getSize(): Promise { + return (await this.#completeAddresses.lengthAsync()) * CompleteAddress.SIZE_IN_BYTES; + } +} diff --git a/yarn-project/pxe/src/storage/address_data_provider/index.ts b/yarn-project/pxe/src/storage/address_data_provider/index.ts new file mode 100644 index 00000000000..c4263314cee --- /dev/null +++ b/yarn-project/pxe/src/storage/address_data_provider/index.ts @@ -0,0 +1 @@ +export { AddressDataProvider } from './address_data_provider.js'; diff --git a/yarn-project/pxe/src/storage/auth_witness_data_provider/auth_witness_data_provider.ts b/yarn-project/pxe/src/storage/auth_witness_data_provider/auth_witness_data_provider.ts new file mode 100644 index 00000000000..1b130691a1e --- /dev/null +++ b/yarn-project/pxe/src/storage/auth_witness_data_provider/auth_witness_data_provider.ts @@ -0,0 +1,34 @@ +import { Fr } from '@aztec/foundation/fields'; +import { toArray } from '@aztec/foundation/iterable'; +import type { AztecAsyncKVStore, AztecAsyncMap } from '@aztec/kv-store'; + +import type { DataProvider } from '../data_provider.js'; + +export class AuthWitnessDataProvider implements DataProvider { + #store: AztecAsyncKVStore; + #authWitnesses: AztecAsyncMap; + + constructor(store: AztecAsyncKVStore) { + this.#store = store; + this.#authWitnesses = this.#store.openMap('auth_witnesses'); + } + + async addAuthWitness(messageHash: Fr, witness: Fr[]): Promise { + await this.#authWitnesses.set( + messageHash.toString(), + witness.map(w => w.toBuffer()), + ); + } + + async getAuthWitness(messageHash: Fr): Promise { + const witness = await this.#authWitnesses.getAsync(messageHash.toString()); + return witness?.map(w => Fr.fromBuffer(w)); + } + + async getSize(): Promise { + return (await toArray(this.#authWitnesses.valuesAsync())).reduce( + (sum, value) => sum + value.length * Fr.SIZE_IN_BYTES, + 0, + ); + } +} diff --git a/yarn-project/pxe/src/storage/auth_witness_data_provider/auth_witness_data_providert.test.ts b/yarn-project/pxe/src/storage/auth_witness_data_provider/auth_witness_data_providert.test.ts new file mode 100644 index 00000000000..e2cbb088c0e --- /dev/null +++ b/yarn-project/pxe/src/storage/auth_witness_data_provider/auth_witness_data_providert.test.ts @@ -0,0 +1,33 @@ +import { Fr } from '@aztec/foundation/fields'; +import { openTmpStore } from '@aztec/kv-store/lmdb-v2'; + +import { AuthWitnessDataProvider } from './auth_witness_data_provider.js'; + +describe('auth witnesses', () => { + let authWitnessDataProvider: AuthWitnessDataProvider; + + beforeEach(async () => { + const store = await openTmpStore('auth_witness_data_provider_test'); + authWitnessDataProvider = new AuthWitnessDataProvider(store); + }); + it('stores and retrieves auth witnesses', async () => { + const messageHash = Fr.random(); + const witness = [Fr.random(), Fr.random()]; + + await authWitnessDataProvider.addAuthWitness(messageHash, witness); + await expect(authWitnessDataProvider.getAuthWitness(messageHash)).resolves.toEqual(witness); + }); + + it("returns undefined if it doesn't have auth witnesses for the message", async () => { + const messageHash = Fr.random(); + await expect(authWitnessDataProvider.getAuthWitness(messageHash)).resolves.toBeUndefined(); + }); + + it.skip('refuses to overwrite auth witnesses for the same message', async () => { + const messageHash = Fr.random(); + const witness = [Fr.random(), Fr.random()]; + + await authWitnessDataProvider.addAuthWitness(messageHash, witness); + await expect(authWitnessDataProvider.addAuthWitness(messageHash, witness)).rejects.toThrow(); + }); +}); diff --git a/yarn-project/pxe/src/storage/auth_witness_data_provider/index.ts b/yarn-project/pxe/src/storage/auth_witness_data_provider/index.ts new file mode 100644 index 00000000000..9fcc2e76e80 --- /dev/null +++ b/yarn-project/pxe/src/storage/auth_witness_data_provider/index.ts @@ -0,0 +1 @@ +export { AuthWitnessDataProvider } from './auth_witness_data_provider.js'; diff --git a/yarn-project/pxe/src/storage/capsule_data_provider/capsule_data_provider.test.ts b/yarn-project/pxe/src/storage/capsule_data_provider/capsule_data_provider.test.ts new file mode 100644 index 00000000000..ad11e9b5790 --- /dev/null +++ b/yarn-project/pxe/src/storage/capsule_data_provider/capsule_data_provider.test.ts @@ -0,0 +1,171 @@ +import { Fr } from '@aztec/foundation/fields'; +import { openTmpStore } from '@aztec/kv-store/lmdb-v2'; +import { AztecAddress } from '@aztec/stdlib/aztec-address'; + +import { CapsuleDataProvider } from './capsule_data_provider.js'; + +describe('contract non-volatile database', () => { + let contract: AztecAddress; + let capsuleDataProvider: CapsuleDataProvider; + + beforeEach(async () => { + // Setup mock contract address + contract = await AztecAddress.random(); + // Setup data provider + const store = await openTmpStore('capsule_data_provider_test'); + capsuleDataProvider = new CapsuleDataProvider(store); + }); + + it('stores and loads a single value', async () => { + const slot = new Fr(1); + const values = [new Fr(42)]; + + await capsuleDataProvider.storeCapsule(contract, slot, values); + const result = await capsuleDataProvider.loadCapsule(contract, slot); + expect(result).toEqual(values); + }); + + it('stores and loads multiple values', async () => { + const slot = new Fr(1); + const values = [new Fr(42), new Fr(43), new Fr(44)]; + + await capsuleDataProvider.storeCapsule(contract, slot, values); + const result = await capsuleDataProvider.loadCapsule(contract, slot); + expect(result).toEqual(values); + }); + + it('overwrites existing values', async () => { + const slot = new Fr(1); + const initialValues = [new Fr(42)]; + const newValues = [new Fr(100)]; + + await capsuleDataProvider.storeCapsule(contract, slot, initialValues); + await capsuleDataProvider.storeCapsule(contract, slot, newValues); + + const result = await capsuleDataProvider.loadCapsule(contract, slot); + expect(result).toEqual(newValues); + }); + + it('stores values for different contracts independently', async () => { + const anotherContract = await AztecAddress.random(); + const slot = new Fr(1); + const values1 = [new Fr(42)]; + const values2 = [new Fr(100)]; + + await capsuleDataProvider.storeCapsule(contract, slot, values1); + await capsuleDataProvider.storeCapsule(anotherContract, slot, values2); + + const result1 = await capsuleDataProvider.loadCapsule(contract, slot); + const result2 = await capsuleDataProvider.loadCapsule(anotherContract, slot); + + expect(result1).toEqual(values1); + expect(result2).toEqual(values2); + }); + + it('returns null for non-existent slots', async () => { + const slot = Fr.random(); + const result = await capsuleDataProvider.loadCapsule(contract, slot); + expect(result).toBeNull(); + }); + + it('deletes a slot', async () => { + const slot = new Fr(1); + const values = [new Fr(42)]; + + await capsuleDataProvider.storeCapsule(contract, slot, values); + await capsuleDataProvider.deleteCapsule(contract, slot); + + expect(await capsuleDataProvider.loadCapsule(contract, slot)).toBeNull(); + }); + + it('deletes an empty slot', async () => { + const slot = new Fr(1); + await capsuleDataProvider.deleteCapsule(contract, slot); + + expect(await capsuleDataProvider.loadCapsule(contract, slot)).toBeNull(); + }); + + it('copies a single value', async () => { + const slot = new Fr(1); + const values = [new Fr(42)]; + + await capsuleDataProvider.storeCapsule(contract, slot, values); + + const dstSlot = new Fr(5); + await capsuleDataProvider.copyCapsule(contract, slot, dstSlot, 1); + + expect(await capsuleDataProvider.loadCapsule(contract, dstSlot)).toEqual(values); + }); + + it('copies multiple non-overlapping values', async () => { + const src = new Fr(1); + const valuesArray = [[new Fr(42)], [new Fr(1337)], [new Fr(13)]]; + + await capsuleDataProvider.storeCapsule(contract, src, valuesArray[0]); + await capsuleDataProvider.storeCapsule(contract, src.add(new Fr(1)), valuesArray[1]); + await capsuleDataProvider.storeCapsule(contract, src.add(new Fr(2)), valuesArray[2]); + + const dst = new Fr(5); + await capsuleDataProvider.copyCapsule(contract, src, dst, 3); + + expect(await capsuleDataProvider.loadCapsule(contract, dst)).toEqual(valuesArray[0]); + expect(await capsuleDataProvider.loadCapsule(contract, dst.add(new Fr(1)))).toEqual(valuesArray[1]); + expect(await capsuleDataProvider.loadCapsule(contract, dst.add(new Fr(2)))).toEqual(valuesArray[2]); + }); + + it('copies overlapping values with src ahead', async () => { + const src = new Fr(1); + const valuesArray = [[new Fr(42)], [new Fr(1337)], [new Fr(13)]]; + + await capsuleDataProvider.storeCapsule(contract, src, valuesArray[0]); + await capsuleDataProvider.storeCapsule(contract, src.add(new Fr(1)), valuesArray[1]); + await capsuleDataProvider.storeCapsule(contract, src.add(new Fr(2)), valuesArray[2]); + + const dst = new Fr(2); + await capsuleDataProvider.copyCapsule(contract, src, dst, 3); + + expect(await capsuleDataProvider.loadCapsule(contract, dst)).toEqual(valuesArray[0]); + expect(await capsuleDataProvider.loadCapsule(contract, dst.add(new Fr(1)))).toEqual(valuesArray[1]); + expect(await capsuleDataProvider.loadCapsule(contract, dst.add(new Fr(2)))).toEqual(valuesArray[2]); + + // Slots 2 and 3 (src[1] and src[2]) should have been overwritten since they are also dst[0] and dst[1] + expect(await capsuleDataProvider.loadCapsule(contract, src)).toEqual(valuesArray[0]); // src[0] (unchanged) + expect(await capsuleDataProvider.loadCapsule(contract, src.add(new Fr(1)))).toEqual(valuesArray[0]); // dst[0] + expect(await capsuleDataProvider.loadCapsule(contract, src.add(new Fr(2)))).toEqual(valuesArray[1]); // dst[1] + }); + + it('copies overlapping values with dst ahead', async () => { + const src = new Fr(5); + const valuesArray = [[new Fr(42)], [new Fr(1337)], [new Fr(13)]]; + + await capsuleDataProvider.storeCapsule(contract, src, valuesArray[0]); + await capsuleDataProvider.storeCapsule(contract, src.add(new Fr(1)), valuesArray[1]); + await capsuleDataProvider.storeCapsule(contract, src.add(new Fr(2)), valuesArray[2]); + + const dst = new Fr(4); + await capsuleDataProvider.copyCapsule(contract, src, dst, 3); + + expect(await capsuleDataProvider.loadCapsule(contract, dst)).toEqual(valuesArray[0]); + expect(await capsuleDataProvider.loadCapsule(contract, dst.add(new Fr(1)))).toEqual(valuesArray[1]); + expect(await capsuleDataProvider.loadCapsule(contract, dst.add(new Fr(2)))).toEqual(valuesArray[2]); + + // Slots 5 and 6 (src[0] and src[1]) should have been overwritten since they are also dst[1] and dst[2] + expect(await capsuleDataProvider.loadCapsule(contract, src)).toEqual(valuesArray[1]); // dst[1] + expect(await capsuleDataProvider.loadCapsule(contract, src.add(new Fr(1)))).toEqual(valuesArray[2]); // dst[2] + expect(await capsuleDataProvider.loadCapsule(contract, src.add(new Fr(2)))).toEqual(valuesArray[2]); // src[2] (unchanged) + }); + + it('copying fails if any value is empty', async () => { + const src = new Fr(1); + const valuesArray = [[new Fr(42)], [new Fr(1337)], [new Fr(13)]]; + + await capsuleDataProvider.storeCapsule(contract, src, valuesArray[0]); + // We skip src[1] + await capsuleDataProvider.storeCapsule(contract, src.add(new Fr(2)), valuesArray[2]); + + const dst = new Fr(5); + await expect(capsuleDataProvider.copyCapsule(contract, src, dst, 3)).rejects.toThrow( + 'Attempted to copy empty slot', + ); + }); +}); diff --git a/yarn-project/pxe/src/storage/capsule_data_provider/capsule_data_provider.ts b/yarn-project/pxe/src/storage/capsule_data_provider/capsule_data_provider.ts new file mode 100644 index 00000000000..0d987ee0fe1 --- /dev/null +++ b/yarn-project/pxe/src/storage/capsule_data_provider/capsule_data_provider.ts @@ -0,0 +1,80 @@ +import { Fr } from '@aztec/foundation/fields'; +import { toArray } from '@aztec/foundation/iterable'; +import { type LogFn, createDebugOnlyLogger } from '@aztec/foundation/log'; +import type { AztecAsyncKVStore, AztecAsyncMap } from '@aztec/kv-store'; +import type { AztecAddress } from '@aztec/stdlib/aztec-address'; + +import type { DataProvider } from '../data_provider.js'; + +export class CapsuleDataProvider implements DataProvider { + #store: AztecAsyncKVStore; + + // Arbitrary data stored by contracts. Key is computed as `${contractAddress}:${key}` + #capsules: AztecAsyncMap; + + debug: LogFn; + + constructor(store: AztecAsyncKVStore) { + this.#store = store; + + this.#capsules = this.#store.openMap('capsules'); + + this.debug = createDebugOnlyLogger('pxe:capsule-data-provider'); + } + + async storeCapsule(contractAddress: AztecAddress, slot: Fr, capsule: Fr[]): Promise { + await this.#capsules.set(dbSlotToKey(contractAddress, slot), Buffer.concat(capsule.map(value => value.toBuffer()))); + } + + async loadCapsule(contractAddress: AztecAddress, slot: Fr): Promise { + const dataBuffer = await this.#capsules.getAsync(dbSlotToKey(contractAddress, slot)); + if (!dataBuffer) { + this.debug(`Data not found for contract ${contractAddress.toString()} and slot ${slot.toString()}`); + return null; + } + const capsule: Fr[] = []; + for (let i = 0; i < dataBuffer.length; i += Fr.SIZE_IN_BYTES) { + capsule.push(Fr.fromBuffer(dataBuffer.subarray(i, i + Fr.SIZE_IN_BYTES))); + } + return capsule; + } + + async deleteCapsule(contractAddress: AztecAddress, slot: Fr): Promise { + await this.#capsules.delete(dbSlotToKey(contractAddress, slot)); + } + + async copyCapsule(contractAddress: AztecAddress, srcSlot: Fr, dstSlot: Fr, numEntries: number): Promise { + // In order to support overlapping source and destination regions, we need to check the relative positions of source + // and destination. If destination is ahead of source, then by the time we overwrite source elements using forward + // indexes we'll have already read those. On the contrary, if source is ahead of destination we need to use backward + // indexes to avoid reading elements that've been overwritten. + + const indexes = Array.from(Array(numEntries).keys()); + if (srcSlot.lt(dstSlot)) { + indexes.reverse(); + } + + for (const i of indexes) { + const currentSrcSlot = dbSlotToKey(contractAddress, srcSlot.add(new Fr(i))); + const currentDstSlot = dbSlotToKey(contractAddress, dstSlot.add(new Fr(i))); + + const toCopy = await this.#capsules.getAsync(currentSrcSlot); + if (!toCopy) { + throw new Error(`Attempted to copy empty slot ${currentSrcSlot} for contract ${contractAddress.toString()}`); + } + + await this.#capsules.set(currentDstSlot, toCopy); + } + } + + public async getSize() { + return (await toArray(this.#capsules.valuesAsync())).reduce( + (sum, value) => sum + value.length * Fr.SIZE_IN_BYTES, + 0, + ); + } +} + +function dbSlotToKey(contractAddress: AztecAddress, slot: Fr): string { + return `${contractAddress.toString()}:${slot.toString()}`; +} diff --git a/yarn-project/pxe/src/storage/capsule_data_provider/index.ts b/yarn-project/pxe/src/storage/capsule_data_provider/index.ts new file mode 100644 index 00000000000..69397935116 --- /dev/null +++ b/yarn-project/pxe/src/storage/capsule_data_provider/index.ts @@ -0,0 +1 @@ +export { CapsuleDataProvider } from './capsule_data_provider.js'; diff --git a/yarn-project/pxe/src/storage/contract_data_provider/contract_data_provider.test.ts b/yarn-project/pxe/src/storage/contract_data_provider/contract_data_provider.test.ts new file mode 100644 index 00000000000..83547f0d4a1 --- /dev/null +++ b/yarn-project/pxe/src/storage/contract_data_provider/contract_data_provider.test.ts @@ -0,0 +1,45 @@ +import { Fr } from '@aztec/foundation/fields'; +import { openTmpStore } from '@aztec/kv-store/lmdb-v2'; +import { BenchmarkingContractArtifact } from '@aztec/noir-contracts.js/Benchmarking'; +import { TestContractArtifact } from '@aztec/noir-contracts.js/Test'; +import { FunctionType } from '@aztec/stdlib/abi'; +import { AztecAddress } from '@aztec/stdlib/aztec-address'; +import { SerializableContractInstance } from '@aztec/stdlib/contract'; + +import { ContractDataProvider } from './contract_data_provider.js'; + +describe('ContractDataProvider', () => { + let contractDataProvider: ContractDataProvider; + + beforeEach(async () => { + const store = await openTmpStore('contract_data_provider_test'); + contractDataProvider = new ContractDataProvider(store); + }); + + it('stores a contract artifact', async () => { + const artifact = BenchmarkingContractArtifact; + const id = Fr.random(); + await contractDataProvider.addContractArtifact(id, artifact); + await expect(contractDataProvider.getContractArtifact(id)).resolves.toEqual(artifact); + }); + + it('does not store a contract artifact with a duplicate private function selector', async () => { + const artifact = TestContractArtifact; + const index = artifact.functions.findIndex(fn => fn.functionType === FunctionType.PRIVATE); + + const copiedFn = structuredClone(artifact.functions[index]); + artifact.functions.push(copiedFn); + + const id = Fr.random(); + await expect(contractDataProvider.addContractArtifact(id, artifact)).rejects.toThrow( + 'Repeated function selectors of private functions', + ); + }); + + it('stores a contract instance', async () => { + const address = await AztecAddress.random(); + const instance = (await SerializableContractInstance.random()).withAddress(address); + await contractDataProvider.addContractInstance(instance); + await expect(contractDataProvider.getContractInstance(address)).resolves.toEqual(instance); + }); +}); diff --git a/yarn-project/pxe/src/contract_data_provider/contract_data_provider.ts b/yarn-project/pxe/src/storage/contract_data_provider/contract_data_provider.ts similarity index 67% rename from yarn-project/pxe/src/contract_data_provider/contract_data_provider.ts rename to yarn-project/pxe/src/storage/contract_data_provider/contract_data_provider.ts index a6003000942..adb494d64ab 100644 --- a/yarn-project/pxe/src/contract_data_provider/contract_data_provider.ts +++ b/yarn-project/pxe/src/storage/contract_data_provider/contract_data_provider.ts @@ -1,18 +1,26 @@ import type { Fr } from '@aztec/foundation/fields'; +import { toArray } from '@aztec/foundation/iterable'; import type { MembershipWitness } from '@aztec/foundation/trees'; +import type { AztecAsyncKVStore, AztecAsyncMap } from '@aztec/kv-store'; import { ContractClassNotFoundError, ContractNotFoundError } from '@aztec/simulator/client'; import { type ContractArtifact, type FunctionArtifact, type FunctionDebugMetadata, - type FunctionSelector, + FunctionSelector, + FunctionType, + contractArtifactFromBuffer, + contractArtifactToBuffer, getFunctionDebugMetadata, } from '@aztec/stdlib/abi'; -import type { AztecAddress } from '@aztec/stdlib/aztec-address'; -import type { ContractClass, ContractInstance } from '@aztec/stdlib/contract'; +import { AztecAddress } from '@aztec/stdlib/aztec-address'; +import { + type ContractClass, + type ContractInstanceWithAddress, + SerializableContractInstance, +} from '@aztec/stdlib/contract'; -import type { ContractArtifactDatabase } from '../database/interfaces/contract_artifact_db.js'; -import type { ContractInstanceDatabase } from '../database/interfaces/contract_instance_db.js'; +import type { DataProvider } from '../data_provider.js'; import { PrivateFunctionsTree } from './private_functions_tree.js'; /** @@ -22,30 +30,133 @@ import { PrivateFunctionsTree } from './private_functions_tree.js'; * to efficiently serve the requested data. It interacts with the ContractDatabase and AztecNode to fetch * the required information and facilitate cryptographic proof generation. */ -export class ContractDataProvider { +export class ContractDataProvider implements DataProvider { /** Map from contract class id to private function tree. */ - private contractClasses: Map = new Map(); + private contractClassesCache: Map = new Map(); + + #contractArtifacts: AztecAsyncMap; + #contractInstances: AztecAsyncMap; + + constructor(store: AztecAsyncKVStore) { + this.#contractArtifacts = store.openMap('contract_artifacts'); + this.#contractInstances = store.openMap('contracts_instances'); + } + + // Setters + + public async addContractArtifact(id: Fr, contract: ContractArtifact): Promise { + const privateFunctions = contract.functions.filter( + functionArtifact => functionArtifact.functionType === FunctionType.PRIVATE, + ); + + const privateSelectors = await Promise.all( + privateFunctions.map(async privateFunctionArtifact => + ( + await FunctionSelector.fromNameAndParameters(privateFunctionArtifact.name, privateFunctionArtifact.parameters) + ).toString(), + ), + ); + + if (privateSelectors.length !== new Set(privateSelectors).size) { + throw new Error('Repeated function selectors of private functions'); + } + + await this.#contractArtifacts.set(id.toString(), contractArtifactToBuffer(contract)); + } + + async addContractInstance(contract: ContractInstanceWithAddress): Promise { + await this.#contractInstances.set( + contract.address.toString(), + new SerializableContractInstance(contract).toBuffer(), + ); + } + + // Private getters + + async #getContractInstance(address: AztecAddress): Promise { + const contract = await this.#contractInstances.getAsync(address.toString()); + return contract && SerializableContractInstance.fromBuffer(contract).withAddress(address); + } - constructor(private db: ContractArtifactDatabase & ContractInstanceDatabase) {} + async #getContractArtifact(id: Fr): Promise { + const contract = await this.#contractArtifacts.getAsync(id.toString()); + // TODO(@spalladino): AztecAsyncMap lies and returns Uint8Arrays instead of Buffers, hence the extra Buffer.from. + return contract && contractArtifactFromBuffer(Buffer.from(contract)); + } + + /** + * Retrieve or create a ContractTree instance based on the provided class id. + * If an existing tree with the same class id is found in the cache, it will be returned. + * Otherwise, a new ContractTree instance will be created using the contract data from the database + * and added to the cache before returning. + * + * @param classId - The class id of the contract for which the ContractTree is required. + * @returns A ContractTree instance associated with the specified contract address. + * @throws An Error if the contract is not found in the ContractDatabase. + */ + private async getTreeForClassId(classId: Fr): Promise { + if (!this.contractClassesCache.has(classId.toString())) { + const artifact = await this.#getContractArtifact(classId); + if (!artifact) { + throw new ContractClassNotFoundError(classId.toString()); + } + const tree = await PrivateFunctionsTree.create(artifact); + this.contractClassesCache.set(classId.toString(), tree); + } + return this.contractClassesCache.get(classId.toString())!; + } + + /** + * Retrieve or create a ContractTree instance based on the provided AztecAddress. + * If an existing tree with the same contract address is found in the cache, it will be returned. + * Otherwise, a new ContractTree instance will be created using the contract data from the database + * and added to the cache before returning. + * + * @param contractAddress - The AztecAddress of the contract for which the ContractTree is required. + * @returns A ContractTree instance associated with the specified contract address. + * @throws An Error if the contract is not found in the ContractDatabase. + */ + private async getTreeForAddress(contractAddress: AztecAddress): Promise { + const instance = await this.getContractInstance(contractAddress); + return this.getTreeForClassId(instance.currentContractClassId); + } + + // Public getters + + async getContractsAddresses(): Promise { + const keys = await toArray(this.#contractInstances.keysAsync()); + return keys.map(AztecAddress.fromString); + } /** Returns a contract instance for a given address. Throws if not found. */ - public async getContractInstance(contractAddress: AztecAddress): Promise { - const instance = await this.db.getContractInstance(contractAddress); + public async getContractInstance(contractAddress: AztecAddress): Promise { + const instance = await this.#getContractInstance(contractAddress); if (!instance) { throw new ContractNotFoundError(contractAddress.toString()); } return instance; } + public async getContractArtifact(contractClassId: Fr): Promise { + const tree = await this.getTreeForClassId(contractClassId); + return tree.getArtifact(); + } + /** Returns a contract class for a given class id. Throws if not found. */ public async getContractClass(contractClassId: Fr): Promise { const tree = await this.getTreeForClassId(contractClassId); return tree.getContractClass(); } - public async getContractArtifact(contractClassId: Fr): Promise { - const tree = await this.getTreeForClassId(contractClassId); - return tree.getArtifact(); + public async getContract( + address: AztecAddress, + ): Promise<(ContractInstanceWithAddress & ContractArtifact) | undefined> { + const instance = await this.getContractInstance(address); + const artifact = instance && (await this.getContractArtifact(instance?.currentContractClassId)); + if (!instance || !artifact) { + return undefined; + } + return { ...instance, ...artifact }; } /** @@ -142,40 +253,9 @@ export class ContractDataProvider { return `${contractName}:${functionName}`; } - /** - * Retrieve or create a ContractTree instance based on the provided class id. - * If an existing tree with the same class id is found in the cache, it will be returned. - * Otherwise, a new ContractTree instance will be created using the contract data from the database - * and added to the cache before returning. - * - * @param classId - The class id of the contract for which the ContractTree is required. - * @returns A ContractTree instance associated with the specified contract address. - * @throws An Error if the contract is not found in the ContractDatabase. - */ - private async getTreeForClassId(classId: Fr): Promise { - if (!this.contractClasses.has(classId.toString())) { - const artifact = await this.db.getContractArtifact(classId); - if (!artifact) { - throw new ContractClassNotFoundError(classId.toString()); - } - const tree = await PrivateFunctionsTree.create(artifact); - this.contractClasses.set(classId.toString(), tree); - } - return this.contractClasses.get(classId.toString())!; - } - - /** - * Retrieve or create a ContractTree instance based on the provided AztecAddress. - * If an existing tree with the same contract address is found in the cache, it will be returned. - * Otherwise, a new ContractTree instance will be created using the contract data from the database - * and added to the cache before returning. - * - * @param contractAddress - The AztecAddress of the contract for which the ContractTree is required. - * @returns A ContractTree instance associated with the specified contract address. - * @throws An Error if the contract is not found in the ContractDatabase. - */ - private async getTreeForAddress(contractAddress: AztecAddress): Promise { - const instance = await this.getContractInstance(contractAddress); - return this.getTreeForClassId(instance.currentContractClassId); + public async getSize() { + return (await toArray(this.#contractInstances.valuesAsync())) + .concat(await toArray(this.#contractArtifacts.valuesAsync())) + .reduce((sum, value) => sum + value.length, 0); } } diff --git a/yarn-project/pxe/src/contract_data_provider/index.ts b/yarn-project/pxe/src/storage/contract_data_provider/index.ts similarity index 100% rename from yarn-project/pxe/src/contract_data_provider/index.ts rename to yarn-project/pxe/src/storage/contract_data_provider/index.ts diff --git a/yarn-project/pxe/src/contract_data_provider/private_functions_tree.ts b/yarn-project/pxe/src/storage/contract_data_provider/private_functions_tree.ts similarity index 100% rename from yarn-project/pxe/src/contract_data_provider/private_functions_tree.ts rename to yarn-project/pxe/src/storage/contract_data_provider/private_functions_tree.ts diff --git a/yarn-project/pxe/src/storage/data_provider.ts b/yarn-project/pxe/src/storage/data_provider.ts new file mode 100644 index 00000000000..e70adf3baab --- /dev/null +++ b/yarn-project/pxe/src/storage/data_provider.ts @@ -0,0 +1,3 @@ +export interface DataProvider { + getSize(): Promise; +} diff --git a/yarn-project/pxe/src/storage/index.ts b/yarn-project/pxe/src/storage/index.ts new file mode 100644 index 00000000000..be6fb54e6a5 --- /dev/null +++ b/yarn-project/pxe/src/storage/index.ts @@ -0,0 +1,10 @@ +export * from './address_data_provider/index.js'; +export * from './auth_witness_data_provider/index.js'; +export * from './capsule_data_provider/index.js'; +export * from './contract_data_provider/index.js'; +export * from './note_data_provider/index.js'; +export * from './sync_data_provider/index.js'; +export * from './tagging_data_provider/index.js'; +export * from './data_provider.js'; + +export const PXE_DATA_SCHEMA_VERSION = 2; diff --git a/yarn-project/pxe/src/storage/note_data_provider/index.ts b/yarn-project/pxe/src/storage/note_data_provider/index.ts new file mode 100644 index 00000000000..37c34f153d4 --- /dev/null +++ b/yarn-project/pxe/src/storage/note_data_provider/index.ts @@ -0,0 +1,2 @@ +export { NoteDao } from './note_dao.js'; +export { NoteDataProvider } from './note_data_provider.js'; diff --git a/yarn-project/pxe/src/database/note_dao.test.ts b/yarn-project/pxe/src/storage/note_data_provider/note_dao.test.ts similarity index 100% rename from yarn-project/pxe/src/database/note_dao.test.ts rename to yarn-project/pxe/src/storage/note_data_provider/note_dao.test.ts diff --git a/yarn-project/pxe/src/database/note_dao.ts b/yarn-project/pxe/src/storage/note_data_provider/note_dao.ts similarity index 100% rename from yarn-project/pxe/src/database/note_dao.ts rename to yarn-project/pxe/src/storage/note_data_provider/note_dao.ts diff --git a/yarn-project/pxe/src/storage/note_data_provider/note_data_provider.test.ts b/yarn-project/pxe/src/storage/note_data_provider/note_data_provider.test.ts new file mode 100644 index 00000000000..d859bbfddf6 --- /dev/null +++ b/yarn-project/pxe/src/storage/note_data_provider/note_data_provider.test.ts @@ -0,0 +1,266 @@ +import { timesParallel } from '@aztec/foundation/collection'; +import { Fr } from '@aztec/foundation/fields'; +import { openTmpStore } from '@aztec/kv-store/lmdb-v2'; +import { AztecAddress } from '@aztec/stdlib/aztec-address'; +import { CompleteAddress } from '@aztec/stdlib/contract'; +import { NoteStatus, type NotesFilter } from '@aztec/stdlib/note'; +import { randomTxHash } from '@aztec/stdlib/testing'; + +import times from 'lodash.times'; + +import { NoteDao } from './note_dao.js'; +import { NoteDataProvider } from './note_data_provider.js'; + +describe('NoteDataProvider', () => { + let noteDataProvider: NoteDataProvider; + let owners: CompleteAddress[]; + let contractAddresses: AztecAddress[]; + let storageSlots: Fr[]; + let notes: NoteDao[]; + + beforeEach(async () => { + const store = await openTmpStore('note_data_provider_test'); + noteDataProvider = await NoteDataProvider.create(store); + }); + + const filteringTests: [() => Promise, () => Promise][] = [ + [() => Promise.resolve({}), () => Promise.resolve(notes)], + + [ + () => Promise.resolve({ contractAddress: contractAddresses[0] }), + () => Promise.resolve(notes.filter(note => note.contractAddress.equals(contractAddresses[0]))), + ], + [async () => ({ contractAddress: await AztecAddress.random() }), () => Promise.resolve([])], + + [ + () => Promise.resolve({ storageSlot: storageSlots[0] }), + () => Promise.resolve(notes.filter(note => note.storageSlot.equals(storageSlots[0]))), + ], + [() => Promise.resolve({ storageSlot: Fr.random() }), () => Promise.resolve([])], + + [() => Promise.resolve({ txHash: notes[0].txHash }), () => Promise.resolve([notes[0]])], + [() => Promise.resolve({ txHash: randomTxHash() }), () => Promise.resolve([])], + + [ + () => Promise.resolve({ owner: owners[0].address }), + async () => { + const ownerAddressPoint = await owners[0].address.toAddressPoint(); + return notes.filter(note => note.addressPoint.equals(ownerAddressPoint)); + }, + ], + + [ + () => Promise.resolve({ contractAddress: contractAddresses[0], storageSlot: storageSlots[0] }), + () => + Promise.resolve( + notes.filter( + note => note.contractAddress.equals(contractAddresses[0]) && note.storageSlot.equals(storageSlots[0]), + ), + ), + ], + [ + () => Promise.resolve({ contractAddress: contractAddresses[0], storageSlot: storageSlots[1] }), + () => Promise.resolve([]), + ], + ]; + + beforeEach(async () => { + owners = await timesParallel(2, () => CompleteAddress.random()); + contractAddresses = await timesParallel(2, () => AztecAddress.random()); + storageSlots = times(2, () => Fr.random()); + + notes = await timesParallel(10, async i => { + const addressPoint = await owners[i % owners.length].address.toAddressPoint(); + return NoteDao.random({ + contractAddress: contractAddresses[i % contractAddresses.length], + storageSlot: storageSlots[i % storageSlots.length], + addressPoint, + index: BigInt(i), + l2BlockNumber: i, + }); + }); + + for (const owner of owners) { + await noteDataProvider.addScope(owner.address); + } + }); + + it.each(filteringTests)('stores notes in bulk and retrieves notes', async (getFilter, getExpected) => { + await noteDataProvider.addNotes(notes); + const returnedNotes = await noteDataProvider.getNotes(await getFilter()); + const expected = await getExpected(); + expect(returnedNotes.sort()).toEqual(expected.sort()); + }); + + it.each(filteringTests)('stores notes one by one and retrieves notes', async (getFilter, getExpected) => { + for (const note of notes) { + await noteDataProvider.addNote(note); + } + + const returnedNotes = await noteDataProvider.getNotes(await getFilter()); + + const expected = await getExpected(); + expect(returnedNotes.sort()).toEqual(expected.sort()); + }); + + it.each(filteringTests)('retrieves nullified notes', async (getFilter, getExpected) => { + await noteDataProvider.addNotes(notes); + + // Nullify all notes and use the same filter as other test cases + for (const owner of owners) { + const ownerAddressPoint = await owner.address.toAddressPoint(); + const notesToNullify = notes.filter(note => note.addressPoint.equals(ownerAddressPoint)); + const nullifiers = notesToNullify.map(note => ({ + data: note.siloedNullifier, + l2BlockNumber: note.l2BlockNumber, + l2BlockHash: note.l2BlockHash, + })); + await expect(noteDataProvider.removeNullifiedNotes(nullifiers, ownerAddressPoint)).resolves.toEqual( + notesToNullify, + ); + } + const filter = await getFilter(); + const returnedNotes = await noteDataProvider.getNotes({ ...filter, status: NoteStatus.ACTIVE_OR_NULLIFIED }); + const expected = await getExpected(); + expect(returnedNotes.sort()).toEqual(expected.sort()); + }); + + it('skips nullified notes by default or when requesting active', async () => { + await noteDataProvider.addNotes(notes); + const ownerAddressPoint = await owners[0].address.toAddressPoint(); + const notesToNullify = notes.filter(note => note.addressPoint.equals(ownerAddressPoint)); + const nullifiers = notesToNullify.map(note => ({ + data: note.siloedNullifier, + l2BlockNumber: note.l2BlockNumber, + l2BlockHash: note.l2BlockHash, + })); + await expect(noteDataProvider.removeNullifiedNotes(nullifiers, notesToNullify[0].addressPoint)).resolves.toEqual( + notesToNullify, + ); + + const actualNotesWithDefault = await noteDataProvider.getNotes({}); + const actualNotesWithActive = await noteDataProvider.getNotes({ status: NoteStatus.ACTIVE }); + + expect(actualNotesWithDefault).toEqual(actualNotesWithActive); + expect(actualNotesWithActive).toEqual(notes.filter(note => !notesToNullify.includes(note))); + }); + + it('handles note unnullification', async () => { + await noteDataProvider.addNotes(notes); + const ownerAddressPoint = await owners[0].address.toAddressPoint(); + + const notesToNullify = notes.filter(note => note.addressPoint.equals(ownerAddressPoint)); + const nullifiers = notesToNullify.map(note => ({ + data: note.siloedNullifier, + l2BlockNumber: 99, + l2BlockHash: Fr.random().toString(), + })); + await expect(noteDataProvider.removeNullifiedNotes(nullifiers, notesToNullify[0].addressPoint)).resolves.toEqual( + notesToNullify, + ); + await expect(noteDataProvider.unnullifyNotesAfter(98)).resolves.toEqual(undefined); + + const result = await noteDataProvider.getNotes({ status: NoteStatus.ACTIVE, owner: owners[0].address }); + + expect(result.sort()).toEqual([...notesToNullify].sort()); + }); + + it('returns active and nullified notes when requesting either', async () => { + await noteDataProvider.addNotes(notes); + const ownerAddressPoint = await owners[0].address.toAddressPoint(); + + const notesToNullify = notes.filter(note => note.addressPoint.equals(ownerAddressPoint)); + const nullifiers = notesToNullify.map(note => ({ + data: note.siloedNullifier, + l2BlockNumber: note.l2BlockNumber, + l2BlockHash: note.l2BlockHash, + })); + await expect(noteDataProvider.removeNullifiedNotes(nullifiers, notesToNullify[0].addressPoint)).resolves.toEqual( + notesToNullify, + ); + + const result = await noteDataProvider.getNotes({ + status: NoteStatus.ACTIVE_OR_NULLIFIED, + }); + + // We have to compare the sorted arrays since the database does not return the same order as when originally + // inserted combining active and nullified results. + expect(result.sort()).toEqual([...notes].sort()); + }); + + it('stores notes one by one and retrieves notes with siloed account', async () => { + for (const note of notes.slice(0, 5)) { + await noteDataProvider.addNote(note, owners[0].address); + } + + for (const note of notes.slice(5)) { + await noteDataProvider.addNote(note, owners[1].address); + } + + const owner0Notes = await noteDataProvider.getNotes({ + scopes: [owners[0].address], + }); + + expect(owner0Notes.sort()).toEqual(notes.slice(0, 5).sort()); + + const owner1Notes = await noteDataProvider.getNotes({ + scopes: [owners[1].address], + }); + + expect(owner1Notes.sort()).toEqual(notes.slice(5).sort()); + + const bothOwnerNotes = await noteDataProvider.getNotes({ + scopes: [owners[0].address, owners[1].address], + }); + + expect(bothOwnerNotes.sort()).toEqual(notes.sort()); + }); + + it('a nullified note removes notes from all accounts in the pxe', async () => { + await noteDataProvider.addNote(notes[0], owners[0].address); + await noteDataProvider.addNote(notes[0], owners[1].address); + + await expect( + noteDataProvider.getNotes({ + scopes: [owners[0].address], + }), + ).resolves.toEqual([notes[0]]); + await expect( + noteDataProvider.getNotes({ + scopes: [owners[1].address], + }), + ).resolves.toEqual([notes[0]]); + const ownerAddressPoint = await owners[0].address.toAddressPoint(); + await expect( + noteDataProvider.removeNullifiedNotes( + [ + { + data: notes[0].siloedNullifier, + l2BlockHash: notes[0].l2BlockHash, + l2BlockNumber: notes[0].l2BlockNumber, + }, + ], + ownerAddressPoint, + ), + ).resolves.toEqual([notes[0]]); + + await expect( + noteDataProvider.getNotes({ + scopes: [owners[0].address], + }), + ).resolves.toEqual([]); + await expect( + noteDataProvider.getNotes({ + scopes: [owners[1].address], + }), + ).resolves.toEqual([]); + }); + + it('removes notes after a given block', async () => { + await noteDataProvider.addNotes(notes, owners[0].address); + + await noteDataProvider.removeNotesAfter(5); + const result = await noteDataProvider.getNotes({ scopes: [owners[0].address] }); + expect(new Set(result)).toEqual(new Set(notes.slice(0, 6))); + }); +}); diff --git a/yarn-project/pxe/src/storage/note_data_provider/note_data_provider.ts b/yarn-project/pxe/src/storage/note_data_provider/note_data_provider.ts new file mode 100644 index 00000000000..fe0865bef22 --- /dev/null +++ b/yarn-project/pxe/src/storage/note_data_provider/note_data_provider.ts @@ -0,0 +1,359 @@ +import { toBufferBE } from '@aztec/foundation/bigint-buffer'; +import type { Fr, Point } from '@aztec/foundation/fields'; +import { toArray } from '@aztec/foundation/iterable'; +import type { AztecAsyncKVStore, AztecAsyncMap, AztecAsyncMultiMap } from '@aztec/kv-store'; +import { AztecAddress } from '@aztec/stdlib/aztec-address'; +import type { InBlock } from '@aztec/stdlib/block'; +import type { PublicKey } from '@aztec/stdlib/keys'; +import { NoteStatus, type NotesFilter } from '@aztec/stdlib/note'; + +import type { DataProvider } from '../data_provider.js'; +import { NoteDao } from './note_dao.js'; + +export class NoteDataProvider implements DataProvider { + #store: AztecAsyncKVStore; + + #notes: AztecAsyncMap; + #nullifiedNotes: AztecAsyncMap; + #nullifierToNoteId: AztecAsyncMap; + #nullifiersByBlockNumber: AztecAsyncMultiMap; + + #nullifiedNotesToScope: AztecAsyncMultiMap; + #nullifiedNotesByContract: AztecAsyncMultiMap; + #nullifiedNotesByStorageSlot: AztecAsyncMultiMap; + #nullifiedNotesByTxHash: AztecAsyncMultiMap; + #nullifiedNotesByAddressPoint: AztecAsyncMultiMap; + #nullifiedNotesByNullifier: AztecAsyncMap; + + #scopes: AztecAsyncMap; + #notesToScope: AztecAsyncMultiMap; + #notesByContractAndScope: Map>; + #notesByStorageSlotAndScope: Map>; + #notesByTxHashAndScope: Map>; + #notesByAddressPointAndScope: Map>; + + private constructor(store: AztecAsyncKVStore) { + this.#store = store; + this.#notes = store.openMap('notes'); + this.#nullifiedNotes = store.openMap('nullified_notes'); + this.#nullifierToNoteId = store.openMap('nullifier_to_note'); + this.#nullifiersByBlockNumber = store.openMultiMap('nullifier_to_block_number'); + + this.#nullifiedNotesToScope = store.openMultiMap('nullified_notes_to_scope'); + this.#nullifiedNotesByContract = store.openMultiMap('nullified_notes_by_contract'); + this.#nullifiedNotesByStorageSlot = store.openMultiMap('nullified_notes_by_storage_slot'); + this.#nullifiedNotesByTxHash = store.openMultiMap('nullified_notes_by_tx_hash'); + this.#nullifiedNotesByAddressPoint = store.openMultiMap('nullified_notes_by_address_point'); + this.#nullifiedNotesByNullifier = store.openMap('nullified_notes_by_nullifier'); + + this.#scopes = store.openMap('scopes'); + this.#notesToScope = store.openMultiMap('notes_to_scope'); + this.#notesByContractAndScope = new Map>(); + this.#notesByStorageSlotAndScope = new Map>(); + this.#notesByTxHashAndScope = new Map>(); + this.#notesByAddressPointAndScope = new Map>(); + } + + public static async create(store: AztecAsyncKVStore): Promise { + const pxeDB = new NoteDataProvider(store); + for await (const scope of pxeDB.#scopes.keysAsync()) { + pxeDB.#notesByContractAndScope.set(scope, store.openMultiMap(`${scope}:notes_by_contract`)); + pxeDB.#notesByStorageSlotAndScope.set(scope, store.openMultiMap(`${scope}:notes_by_storage_slot`)); + pxeDB.#notesByTxHashAndScope.set(scope, store.openMultiMap(`${scope}:notes_by_tx_hash`)); + pxeDB.#notesByAddressPointAndScope.set(scope, store.openMultiMap(`${scope}:notes_by_address_point`)); + } + return pxeDB; + } + + public async addScope(scope: AztecAddress): Promise { + const scopeString = scope.toString(); + + if (await this.#scopes.hasAsync(scopeString)) { + return false; + } + + await this.#scopes.set(scopeString, true); + this.#notesByContractAndScope.set(scopeString, this.#store.openMultiMap(`${scopeString}:notes_by_contract`)); + this.#notesByStorageSlotAndScope.set(scopeString, this.#store.openMultiMap(`${scopeString}:notes_by_storage_slot`)); + this.#notesByTxHashAndScope.set(scopeString, this.#store.openMultiMap(`${scopeString}:notes_by_tx_hash`)); + this.#notesByAddressPointAndScope.set( + scopeString, + this.#store.openMultiMap(`${scopeString}:notes_by_address_point`), + ); + + return true; + } + + async addNote(note: NoteDao, scope?: AztecAddress): Promise { + await this.addNotes([note], scope); + } + + async addNotes(notes: NoteDao[], scope: AztecAddress = AztecAddress.ZERO): Promise { + if (!(await this.#scopes.hasAsync(scope.toString()))) { + await this.addScope(scope); + } + + return this.#store.transactionAsync(async () => { + for (const dao of notes) { + // store notes by their index in the notes hash tree + // this provides the uniqueness we need to store individual notes + // and should also return notes in the order that they were created. + // Had we stored them by their nullifier, they would be returned in random order + const noteIndex = toBufferBE(dao.index, 32).toString('hex'); + await this.#notes.set(noteIndex, dao.toBuffer()); + await this.#notesToScope.set(noteIndex, scope.toString()); + await this.#nullifierToNoteId.set(dao.siloedNullifier.toString(), noteIndex); + + await this.#notesByContractAndScope.get(scope.toString())!.set(dao.contractAddress.toString(), noteIndex); + await this.#notesByStorageSlotAndScope.get(scope.toString())!.set(dao.storageSlot.toString(), noteIndex); + await this.#notesByTxHashAndScope.get(scope.toString())!.set(dao.txHash.toString(), noteIndex); + await this.#notesByAddressPointAndScope.get(scope.toString())!.set(dao.addressPoint.toString(), noteIndex); + } + }); + } + + public removeNotesAfter(blockNumber: number): Promise { + return this.#store.transactionAsync(async () => { + const notes = await toArray(this.#notes.valuesAsync()); + for (const note of notes) { + const noteDao = NoteDao.fromBuffer(note); + if (noteDao.l2BlockNumber > blockNumber) { + const noteIndex = toBufferBE(noteDao.index, 32).toString('hex'); + await this.#notes.delete(noteIndex); + await this.#notesToScope.delete(noteIndex); + await this.#nullifierToNoteId.delete(noteDao.siloedNullifier.toString()); + const scopes = await toArray(this.#scopes.keysAsync()); + for (const scope of scopes) { + await this.#notesByAddressPointAndScope.get(scope)!.deleteValue(noteDao.addressPoint.toString(), noteIndex); + await this.#notesByTxHashAndScope.get(scope)!.deleteValue(noteDao.txHash.toString(), noteIndex); + await this.#notesByContractAndScope.get(scope)!.deleteValue(noteDao.contractAddress.toString(), noteIndex); + await this.#notesByStorageSlotAndScope.get(scope)!.deleteValue(noteDao.storageSlot.toString(), noteIndex); + } + } + } + }); + } + + public async unnullifyNotesAfter(blockNumber: number, synchedBlockNumber?: number): Promise { + const nullifiersToUndo: string[] = []; + const currentBlockNumber = blockNumber + 1; + const maxBlockNumber = synchedBlockNumber ?? currentBlockNumber; + for (let i = currentBlockNumber; i <= maxBlockNumber; i++) { + nullifiersToUndo.push(...(await toArray(this.#nullifiersByBlockNumber.getValuesAsync(i)))); + } + const notesIndexesToReinsert = await Promise.all( + nullifiersToUndo.map(nullifier => this.#nullifiedNotesByNullifier.getAsync(nullifier)), + ); + const notNullNoteIndexes = notesIndexesToReinsert.filter(noteIndex => noteIndex != undefined); + const nullifiedNoteBuffers = await Promise.all( + notNullNoteIndexes.map(noteIndex => this.#nullifiedNotes.getAsync(noteIndex!)), + ); + const noteDaos = nullifiedNoteBuffers + .filter(buffer => buffer != undefined) + .map(buffer => NoteDao.fromBuffer(buffer!)); + + await this.#store.transactionAsync(async () => { + for (const dao of noteDaos) { + const noteIndex = toBufferBE(dao.index, 32).toString('hex'); + await this.#notes.set(noteIndex, dao.toBuffer()); + await this.#nullifierToNoteId.set(dao.siloedNullifier.toString(), noteIndex); + + let scopes = (await toArray(this.#nullifiedNotesToScope.getValuesAsync(noteIndex))) ?? []; + + if (scopes.length === 0) { + scopes = [new AztecAddress(dao.addressPoint.x).toString()]; + } + + for (const scope of scopes) { + await this.#notesByContractAndScope.get(scope.toString())!.set(dao.contractAddress.toString(), noteIndex); + await this.#notesByStorageSlotAndScope.get(scope.toString())!.set(dao.storageSlot.toString(), noteIndex); + await this.#notesByTxHashAndScope.get(scope.toString())!.set(dao.txHash.toString(), noteIndex); + await this.#notesByAddressPointAndScope.get(scope.toString())!.set(dao.addressPoint.toString(), noteIndex); + await this.#notesToScope.set(noteIndex, scope); + } + + await this.#nullifiedNotes.delete(noteIndex); + await this.#nullifiedNotesToScope.delete(noteIndex); + await this.#nullifiersByBlockNumber.deleteValue(dao.l2BlockNumber, dao.siloedNullifier.toString()); + await this.#nullifiedNotesByContract.deleteValue(dao.contractAddress.toString(), noteIndex); + await this.#nullifiedNotesByStorageSlot.deleteValue(dao.storageSlot.toString(), noteIndex); + await this.#nullifiedNotesByTxHash.deleteValue(dao.txHash.toString(), noteIndex); + await this.#nullifiedNotesByAddressPoint.deleteValue(dao.addressPoint.toString(), noteIndex); + await this.#nullifiedNotesByNullifier.delete(dao.siloedNullifier.toString()); + } + }); + } + + async getNotes(filter: NotesFilter): Promise { + const publicKey: PublicKey | undefined = filter.owner ? await filter.owner.toAddressPoint() : undefined; + + filter.status = filter.status ?? NoteStatus.ACTIVE; + + const candidateNoteSources = []; + + filter.scopes ??= (await toArray(this.#scopes.keysAsync())).map(addressString => + AztecAddress.fromString(addressString), + ); + + const activeNoteIdsPerScope: string[][] = []; + + for (const scope of new Set(filter.scopes)) { + const formattedScopeString = scope.toString(); + if (!(await this.#scopes.hasAsync(formattedScopeString))) { + throw new Error('Trying to get incoming notes of an scope that is not in the PXE database'); + } + + activeNoteIdsPerScope.push( + publicKey + ? await toArray( + this.#notesByAddressPointAndScope.get(formattedScopeString)!.getValuesAsync(publicKey.toString()), + ) + : filter.txHash + ? await toArray( + this.#notesByTxHashAndScope.get(formattedScopeString)!.getValuesAsync(filter.txHash.toString()), + ) + : filter.contractAddress + ? await toArray( + this.#notesByContractAndScope + .get(formattedScopeString)! + .getValuesAsync(filter.contractAddress.toString()), + ) + : filter.storageSlot + ? await toArray( + this.#notesByStorageSlotAndScope.get(formattedScopeString)!.getValuesAsync(filter.storageSlot.toString()), + ) + : await toArray(this.#notesByAddressPointAndScope.get(formattedScopeString)!.valuesAsync()), + ); + } + + candidateNoteSources.push({ + ids: new Set(activeNoteIdsPerScope.flat()), + notes: this.#notes, + }); + + if (filter.status == NoteStatus.ACTIVE_OR_NULLIFIED) { + candidateNoteSources.push({ + ids: publicKey + ? await toArray(this.#nullifiedNotesByAddressPoint.getValuesAsync(publicKey.toString())) + : filter.txHash + ? await toArray(this.#nullifiedNotesByTxHash.getValuesAsync(filter.txHash.toString())) + : filter.contractAddress + ? await toArray(this.#nullifiedNotesByContract.getValuesAsync(filter.contractAddress.toString())) + : filter.storageSlot + ? await toArray(this.#nullifiedNotesByStorageSlot.getValuesAsync(filter.storageSlot.toString())) + : await toArray(this.#nullifiedNotes.keysAsync()), + notes: this.#nullifiedNotes, + }); + } + + const result: NoteDao[] = []; + for (const { ids, notes } of candidateNoteSources) { + for (const id of ids) { + const serializedNote = await notes.getAsync(id); + if (!serializedNote) { + continue; + } + + const note = NoteDao.fromBuffer(serializedNote); + if (filter.contractAddress && !note.contractAddress.equals(filter.contractAddress)) { + continue; + } + + if (filter.txHash && !note.txHash.equals(filter.txHash)) { + continue; + } + + if (filter.storageSlot && !note.storageSlot.equals(filter.storageSlot!)) { + continue; + } + + if (publicKey && !note.addressPoint.equals(publicKey)) { + continue; + } + + if (filter.siloedNullifier && !note.siloedNullifier.equals(filter.siloedNullifier)) { + continue; + } + + result.push(note); + } + } + + return result; + } + + removeNullifiedNotes(nullifiers: InBlock[], accountAddressPoint: Point): Promise { + if (nullifiers.length === 0) { + return Promise.resolve([]); + } + + return this.#store.transactionAsync(async () => { + const nullifiedNotes: NoteDao[] = []; + + for (const blockScopedNullifier of nullifiers) { + const { data: nullifier, l2BlockNumber: blockNumber } = blockScopedNullifier; + const noteIndex = await this.#nullifierToNoteId.getAsync(nullifier.toString()); + if (!noteIndex) { + continue; + } + + const noteBuffer = noteIndex ? await this.#notes.getAsync(noteIndex) : undefined; + + if (!noteBuffer) { + // note doesn't exist. Maybe it got nullified already + continue; + } + const noteScopes = (await toArray(this.#notesToScope.getValuesAsync(noteIndex))) ?? []; + const note = NoteDao.fromBuffer(noteBuffer); + if (!note.addressPoint.equals(accountAddressPoint)) { + // tried to nullify someone else's note + continue; + } + + nullifiedNotes.push(note); + + await this.#notes.delete(noteIndex); + await this.#notesToScope.delete(noteIndex); + + const scopes = await toArray(this.#scopes.keysAsync()); + + for (const scope of scopes) { + await this.#notesByAddressPointAndScope.get(scope)!.deleteValue(accountAddressPoint.toString(), noteIndex); + await this.#notesByTxHashAndScope.get(scope)!.deleteValue(note.txHash.toString(), noteIndex); + await this.#notesByContractAndScope.get(scope)!.deleteValue(note.contractAddress.toString(), noteIndex); + await this.#notesByStorageSlotAndScope.get(scope)!.deleteValue(note.storageSlot.toString(), noteIndex); + } + + if (noteScopes !== undefined) { + for (const scope of noteScopes) { + await this.#nullifiedNotesToScope.set(noteIndex, scope); + } + } + await this.#nullifiedNotes.set(noteIndex, note.toBuffer()); + await this.#nullifiersByBlockNumber.set(blockNumber, nullifier.toString()); + await this.#nullifiedNotesByContract.set(note.contractAddress.toString(), noteIndex); + await this.#nullifiedNotesByStorageSlot.set(note.storageSlot.toString(), noteIndex); + await this.#nullifiedNotesByTxHash.set(note.txHash.toString(), noteIndex); + await this.#nullifiedNotesByAddressPoint.set(note.addressPoint.toString(), noteIndex); + await this.#nullifiedNotesByNullifier.set(nullifier.toString(), noteIndex); + + await this.#nullifierToNoteId.delete(nullifier.toString()); + } + return nullifiedNotes; + }); + } + + async addNullifiedNote(note: NoteDao): Promise { + const noteIndex = toBufferBE(note.index, 32).toString('hex'); + + await this.#nullifiedNotes.set(noteIndex, note.toBuffer()); + await this.#nullifiedNotesByContract.set(note.contractAddress.toString(), noteIndex); + await this.#nullifiedNotesByStorageSlot.set(note.storageSlot.toString(), noteIndex); + await this.#nullifiedNotesByTxHash.set(note.txHash.toString(), noteIndex); + await this.#nullifiedNotesByAddressPoint.set(note.addressPoint.toString(), noteIndex); + } + + async getSize() { + return (await this.getNotes({})).reduce((sum, note) => sum + note.getSize(), 0); + } +} diff --git a/yarn-project/pxe/src/storage/sync_data_provider/index.ts b/yarn-project/pxe/src/storage/sync_data_provider/index.ts new file mode 100644 index 00000000000..92419737c0f --- /dev/null +++ b/yarn-project/pxe/src/storage/sync_data_provider/index.ts @@ -0,0 +1 @@ +export { SyncDataProvider } from './sync_data_provider.js'; diff --git a/yarn-project/pxe/src/storage/sync_data_provider/sync_data_provider.test.ts b/yarn-project/pxe/src/storage/sync_data_provider/sync_data_provider.test.ts new file mode 100644 index 00000000000..3fd32fd2fcb --- /dev/null +++ b/yarn-project/pxe/src/storage/sync_data_provider/sync_data_provider.test.ts @@ -0,0 +1,26 @@ +import { INITIAL_L2_BLOCK_NUM } from '@aztec/constants'; +import { randomInt } from '@aztec/foundation/crypto'; +import { openTmpStore } from '@aztec/kv-store/lmdb-v2'; +import { makeHeader } from '@aztec/stdlib/testing'; + +import { SyncDataProvider } from './sync_data_provider.js'; + +describe('block header', () => { + let syncDataProvider: SyncDataProvider; + + beforeEach(async () => { + const store = await openTmpStore('sync_data_provider_test'); + syncDataProvider = new SyncDataProvider(store); + }); + + it('stores and retrieves the block header', async () => { + const header = makeHeader(randomInt(1000), INITIAL_L2_BLOCK_NUM, 0 /** slot number */); + + await syncDataProvider.setHeader(header); + await expect(syncDataProvider.getBlockHeader()).resolves.toEqual(header); + }); + + it('rejects getting header if no block set', async () => { + await expect(() => syncDataProvider.getBlockHeader()).rejects.toThrow(); + }); +}); diff --git a/yarn-project/pxe/src/storage/sync_data_provider/sync_data_provider.ts b/yarn-project/pxe/src/storage/sync_data_provider/sync_data_provider.ts new file mode 100644 index 00000000000..32fd14040d0 --- /dev/null +++ b/yarn-project/pxe/src/storage/sync_data_provider/sync_data_provider.ts @@ -0,0 +1,40 @@ +import type { AztecAsyncKVStore, AztecAsyncSingleton } from '@aztec/kv-store'; +import { BlockHeader } from '@aztec/stdlib/tx'; + +import type { DataProvider } from '../data_provider.js'; + +export class SyncDataProvider implements DataProvider { + #store: AztecAsyncKVStore; + #synchronizedHeader: AztecAsyncSingleton; + + constructor(store: AztecAsyncKVStore) { + this.#store = store; + this.#synchronizedHeader = this.#store.openSingleton('header'); + } + + async setHeader(header: BlockHeader): Promise { + await this.#synchronizedHeader.set(header.toBuffer()); + } + + async getBlockNumber(): Promise { + const headerBuffer = await this.#synchronizedHeader.getAsync(); + if (!headerBuffer) { + return undefined; + } + + return Number(BlockHeader.fromBuffer(headerBuffer).globalVariables.blockNumber.toBigInt()); + } + + async getBlockHeader(): Promise { + const headerBuffer = await this.#synchronizedHeader.getAsync(); + if (!headerBuffer) { + throw new Error(`Header not set`); + } + + return BlockHeader.fromBuffer(headerBuffer); + } + + async getSize(): Promise { + return (await this.#synchronizedHeader.getAsync())?.length ?? 0; + } +} diff --git a/yarn-project/pxe/src/storage/tagging_data_provider/index.ts b/yarn-project/pxe/src/storage/tagging_data_provider/index.ts new file mode 100644 index 00000000000..42085f6867a --- /dev/null +++ b/yarn-project/pxe/src/storage/tagging_data_provider/index.ts @@ -0,0 +1 @@ +export { TaggingDataProvider } from './tagging_data_provider.js'; diff --git a/yarn-project/pxe/src/storage/tagging_data_provider/tagging_data_provider.ts b/yarn-project/pxe/src/storage/tagging_data_provider/tagging_data_provider.ts new file mode 100644 index 00000000000..2d121a28446 --- /dev/null +++ b/yarn-project/pxe/src/storage/tagging_data_provider/tagging_data_provider.ts @@ -0,0 +1,92 @@ +import type { Fr } from '@aztec/foundation/fields'; +import { toArray } from '@aztec/foundation/iterable'; +import type { AztecAsyncKVStore, AztecAsyncMap } from '@aztec/kv-store'; +import { AztecAddress } from '@aztec/stdlib/aztec-address'; +import type { IndexedTaggingSecret } from '@aztec/stdlib/logs'; + +export class TaggingDataProvider { + #store: AztecAsyncKVStore; + #addressBook: AztecAsyncMap; + + // Stores the last index used for each tagging secret, taking direction into account + // This is necessary to avoid reusing the same index for the same secret, which happens if + // sender and recipient are the same + #taggingSecretIndexesForSenders: AztecAsyncMap; + #taggingSecretIndexesForRecipients: AztecAsyncMap; + + constructor(store: AztecAsyncKVStore) { + this.#store = store; + + this.#addressBook = this.#store.openMap('address_book'); + + this.#taggingSecretIndexesForSenders = this.#store.openMap('tagging_secret_indexes_for_senders'); + this.#taggingSecretIndexesForRecipients = this.#store.openMap('tagging_secret_indexes_for_recipients'); + } + + async setTaggingSecretsIndexesAsSender(indexedSecrets: IndexedTaggingSecret[]): Promise { + await this.#setTaggingSecretsIndexes(indexedSecrets, this.#taggingSecretIndexesForSenders); + } + + async setTaggingSecretsIndexesAsRecipient(indexedSecrets: IndexedTaggingSecret[]): Promise { + await this.#setTaggingSecretsIndexes(indexedSecrets, this.#taggingSecretIndexesForRecipients); + } + + async #setTaggingSecretsIndexes(indexedSecrets: IndexedTaggingSecret[], storageMap: AztecAsyncMap) { + await Promise.all( + indexedSecrets.map(indexedSecret => + storageMap.set(indexedSecret.appTaggingSecret.toString(), indexedSecret.index), + ), + ); + } + + async getTaggingSecretsIndexesAsRecipient(appTaggingSecrets: Fr[]) { + return await this.#getTaggingSecretsIndexes(appTaggingSecrets, this.#taggingSecretIndexesForRecipients); + } + + async getTaggingSecretsIndexesAsSender(appTaggingSecrets: Fr[]) { + return await this.#getTaggingSecretsIndexes(appTaggingSecrets, this.#taggingSecretIndexesForSenders); + } + + #getTaggingSecretsIndexes(appTaggingSecrets: Fr[], storageMap: AztecAsyncMap): Promise { + return Promise.all(appTaggingSecrets.map(async secret => (await storageMap.getAsync(`${secret.toString()}`)) ?? 0)); + } + + resetNoteSyncData(): Promise { + return this.#store.transactionAsync(async () => { + const recipients = await toArray(this.#taggingSecretIndexesForRecipients.keysAsync()); + await Promise.all(recipients.map(recipient => this.#taggingSecretIndexesForRecipients.delete(recipient))); + const senders = await toArray(this.#taggingSecretIndexesForSenders.keysAsync()); + await Promise.all(senders.map(sender => this.#taggingSecretIndexesForSenders.delete(sender))); + }); + } + + async addSenderAddress(address: AztecAddress): Promise { + if (await this.#addressBook.hasAsync(address.toString())) { + return false; + } + + await this.#addressBook.set(address.toString(), true); + + return true; + } + + async getSenderAddresses(): Promise { + return (await toArray(this.#addressBook.keysAsync())).map(AztecAddress.fromString); + } + + async removeSenderAddress(address: AztecAddress): Promise { + if (!(await this.#addressBook.hasAsync(address.toString()))) { + return false; + } + + await this.#addressBook.delete(address.toString()); + + return true; + } + + async getSize() { + const addressesCount = (await toArray(this.#addressBook.keysAsync())).length; + // All keys are addresses + return 3 * addressesCount * AztecAddress.SIZE_IN_BYTES; + } +} diff --git a/yarn-project/pxe/src/synchronizer/synchronizer.test.ts b/yarn-project/pxe/src/synchronizer/synchronizer.test.ts index cbe65671435..f676fd12d2a 100644 --- a/yarn-project/pxe/src/synchronizer/synchronizer.test.ts +++ b/yarn-project/pxe/src/synchronizer/synchronizer.test.ts @@ -7,15 +7,17 @@ import type { AztecNode } from '@aztec/stdlib/interfaces/client'; import { jest } from '@jest/globals'; import { type MockProxy, mock } from 'jest-mock-extended'; -import type { PxeDatabase } from '../database/index.js'; -import { KVPxeDatabase } from '../database/kv_pxe_database.js'; +import { NoteDataProvider } from '../storage/note_data_provider/note_data_provider.js'; +import { SyncDataProvider } from '../storage/sync_data_provider/sync_data_provider.js'; +import { TaggingDataProvider } from '../storage/tagging_data_provider/tagging_data_provider.js'; import { Synchronizer } from './synchronizer.js'; describe('Synchronizer', () => { - let database: PxeDatabase; let synchronizer: Synchronizer; - let tipsStore: L2TipsStore; // eslint-disable-line @typescript-eslint/no-unused-vars - + let tipsStore: L2TipsStore; + let syncDataProvider: SyncDataProvider; + let noteDataProvider: NoteDataProvider; + let taggingDataProvider: TaggingDataProvider; let aztecNode: MockProxy; let blockStream: MockProxy; @@ -29,23 +31,31 @@ describe('Synchronizer', () => { const store = await openTmpStore('test'); blockStream = mock(); aztecNode = mock(); - database = await KVPxeDatabase.create(store); tipsStore = new L2TipsStore(store, 'pxe'); - synchronizer = new TestSynchronizer(aztecNode, database, tipsStore); + syncDataProvider = new SyncDataProvider(store); + noteDataProvider = await NoteDataProvider.create(store); + taggingDataProvider = new TaggingDataProvider(store); + synchronizer = new TestSynchronizer(aztecNode, syncDataProvider, noteDataProvider, taggingDataProvider, tipsStore); }); it('sets header from latest block', async () => { const block = await L2Block.random(1, 4); await synchronizer.handleBlockStreamEvent({ type: 'blocks-added', blocks: [block] }); - const obtainedHeader = await database.getBlockHeader(); + const obtainedHeader = await syncDataProvider.getBlockHeader(); expect(obtainedHeader).toEqual(block.header); }); it('removes notes from db on a reorg', async () => { - const removeNotesAfter = jest.spyOn(database, 'removeNotesAfter').mockImplementation(() => Promise.resolve()); - const unnullifyNotesAfter = jest.spyOn(database, 'unnullifyNotesAfter').mockImplementation(() => Promise.resolve()); - const resetNoteSyncData = jest.spyOn(database, 'resetNoteSyncData').mockImplementation(() => Promise.resolve()); + const removeNotesAfter = jest + .spyOn(noteDataProvider, 'removeNotesAfter') + .mockImplementation(() => Promise.resolve()); + const unnullifyNotesAfter = jest + .spyOn(noteDataProvider, 'unnullifyNotesAfter') + .mockImplementation(() => Promise.resolve()); + const resetNoteSyncData = jest + .spyOn(taggingDataProvider, 'resetNoteSyncData') + .mockImplementation(() => Promise.resolve()); aztecNode.getBlockHeader.mockImplementation( async blockNumber => (await L2Block.random(blockNumber as number)).header, ); @@ -57,7 +67,7 @@ describe('Synchronizer', () => { await synchronizer.handleBlockStreamEvent({ type: 'chain-pruned', blockNumber: 3 }); expect(removeNotesAfter).toHaveBeenCalledWith(3); - expect(unnullifyNotesAfter).toHaveBeenCalledWith(3); + expect(unnullifyNotesAfter).toHaveBeenCalledWith(3, 4); expect(resetNoteSyncData).toHaveBeenCalled(); }); }); diff --git a/yarn-project/pxe/src/synchronizer/synchronizer.ts b/yarn-project/pxe/src/synchronizer/synchronizer.ts index ba89d5a4da1..d418feacf1d 100644 --- a/yarn-project/pxe/src/synchronizer/synchronizer.ts +++ b/yarn-project/pxe/src/synchronizer/synchronizer.ts @@ -5,7 +5,9 @@ import { L2BlockStream, type L2BlockStreamEvent, type L2BlockStreamEventHandler import type { AztecNode } from '@aztec/stdlib/interfaces/client'; import type { PXEConfig } from '../config/index.js'; -import type { PxeDatabase } from '../database/index.js'; +import type { NoteDataProvider } from '../storage/note_data_provider/note_data_provider.js'; +import type { SyncDataProvider } from '../storage/sync_data_provider/sync_data_provider.js'; +import type { TaggingDataProvider } from '../storage/tagging_data_provider/tagging_data_provider.js'; /** * The Synchronizer class manages the synchronization with the aztec node, allowing PXE to retrieve the @@ -21,7 +23,9 @@ export class Synchronizer implements L2BlockStreamEventHandler { constructor( private node: AztecNode, - private db: PxeDatabase, + private syncDataProvider: SyncDataProvider, + private noteDataProvider: NoteDataProvider, + private taggingDataProvider: TaggingDataProvider, private l2TipsStore: L2TipsStore, config: Partial> = {}, loggerOrSuffix?: string | Logger, @@ -51,23 +55,24 @@ export class Synchronizer implements L2BlockStreamEventHandler { archive: lastBlock.archive.root.toString(), header: lastBlock.header.toInspect(), }); - await this.db.setHeader(lastBlock.header); + await this.syncDataProvider.setHeader(lastBlock.header); break; } case 'chain-pruned': { this.log.warn(`Pruning data after block ${event.blockNumber} due to reorg`); // We first unnullify and then remove so that unnullified notes that were created after the block number end up deleted. - await this.db.unnullifyNotesAfter(event.blockNumber); - await this.db.removeNotesAfter(event.blockNumber); + const lastSynchedBlockNumber = await this.syncDataProvider.getBlockNumber(); + await this.noteDataProvider.unnullifyNotesAfter(event.blockNumber, lastSynchedBlockNumber); + await this.noteDataProvider.removeNotesAfter(event.blockNumber); // Remove all note tagging indexes to force a full resync. This is suboptimal, but unless we track the // block number in which each index is used it's all we can do. - await this.db.resetNoteSyncData(); + await this.taggingDataProvider.resetNoteSyncData(); // Update the header to the last block. const newHeader = await this.node.getBlockHeader(event.blockNumber); if (!newHeader) { this.log.error(`Block header not found for block number ${event.blockNumber} during chain prune`); } else { - await this.db.setHeader(newHeader); + await this.syncDataProvider.setHeader(newHeader); } break; } @@ -99,18 +104,18 @@ export class Synchronizer implements L2BlockStreamEventHandler { let currentHeader; try { - currentHeader = await this.db.getBlockHeader(); + currentHeader = await this.syncDataProvider.getBlockHeader(); } catch (e) { this.log.debug('Header is not set, requesting from the node'); } if (!currentHeader) { // REFACTOR: We should know the header of the genesis block without having to request it from the node. - await this.db.setHeader((await this.node.getBlockHeader(0))!); + await this.syncDataProvider.setHeader((await this.node.getBlockHeader(0))!); } await this.blockStream.sync(); } public async getSynchedBlockNumber() { - return (await this.db.getBlockNumber()) ?? this.initialSyncBlockNumber; + return (await this.syncDataProvider.getBlockNumber()) ?? this.initialSyncBlockNumber; } } diff --git a/yarn-project/pxe/src/pxe_service/test/pxe_service.test.ts b/yarn-project/pxe/src/test/pxe_service.test.ts similarity index 79% rename from yarn-project/pxe/src/pxe_service/test/pxe_service.test.ts rename to yarn-project/pxe/src/test/pxe_service.test.ts index e0e26b7d491..9cfa935f5ed 100644 --- a/yarn-project/pxe/src/pxe_service/test/pxe_service.test.ts +++ b/yarn-project/pxe/src/test/pxe_service.test.ts @@ -2,9 +2,8 @@ import { BBWASMBundlePrivateKernelProver } from '@aztec/bb-prover/wasm/bundle'; import { INITIAL_L2_BLOCK_NUM } from '@aztec/constants'; import type { L1ContractAddresses } from '@aztec/ethereum/l1-contract-addresses'; import { EthAddress } from '@aztec/foundation/eth-address'; -import { KeyStore } from '@aztec/key-store'; +import type { AztecAsyncKVStore } from '@aztec/kv-store'; import { openTmpStore } from '@aztec/kv-store/lmdb-v2'; -import { L2TipsStore } from '@aztec/kv-store/stores'; import type { ProtocolContractsProvider } from '@aztec/protocol-contracts'; import { BundledProtocolContractsProvider } from '@aztec/protocol-contracts/providers/bundle'; import { type SimulationProvider, WASMSimulator } from '@aztec/simulator/client'; @@ -15,20 +14,15 @@ import { TxEffect } from '@aztec/stdlib/tx'; import { type MockProxy, mock } from 'jest-mock-extended'; -import type { PxeDatabase } from '../../database/interfaces/pxe_database.js'; -import { KVPxeDatabase } from '../../database/kv_pxe_database.js'; -import type { PXEServiceConfig } from '../../index.js'; -import { PXEService } from '../pxe_service.js'; +import type { PXEServiceConfig } from '../config/index.js'; +import { PXEService } from '../pxe_service/pxe_service.js'; import { pxeTestSuite } from './pxe_test_suite.js'; async function createPXEService(): Promise { const kvStore = await openTmpStore('test'); - const keyStore = new KeyStore(kvStore); const node = mock(); - const db = await KVPxeDatabase.create(kvStore); const simulationProvider = new WASMSimulator(); const kernelProver = new BBWASMBundlePrivateKernelProver(simulationProvider); - const tips = new L2TipsStore(kvStore, 'pxe'); const protocolContractsProvider = new BundledProtocolContractsProvider(); const config: PXEServiceConfig = { l2StartingBlock: INITIAL_L2_BLOCK_NUM, @@ -59,29 +53,22 @@ async function createPXEService(): Promise { }; node.getL1ContractAddresses.mockResolvedValue(mockedContracts); - return Promise.resolve( - new PXEService(keyStore, node, db, tips, kernelProver, simulationProvider, protocolContractsProvider, config), - ); + return await PXEService.create(node, kvStore, kernelProver, simulationProvider, protocolContractsProvider, config); } pxeTestSuite('PXEService', createPXEService); describe('PXEService', () => { - let keyStore: KeyStore; + let kvStore: AztecAsyncKVStore; let node: MockProxy; - let db: PxeDatabase; let simulationProvider: SimulationProvider; let kernelProver: PrivateKernelProver; let config: PXEServiceConfig; - let tips: L2TipsStore; let protocolContractsProvider: ProtocolContractsProvider; beforeEach(async () => { - const kvStore = await openTmpStore('test'); - keyStore = new KeyStore(kvStore); + kvStore = await openTmpStore('test'); node = mock(); - tips = new L2TipsStore(kvStore, 'pxe'); - db = await KVPxeDatabase.create(kvStore); simulationProvider = new WASMSimulator(); kernelProver = new BBWASMBundlePrivateKernelProver(simulationProvider); protocolContractsProvider = new BundledProtocolContractsProvider(); @@ -103,11 +90,9 @@ describe('PXEService', () => { node.getTxEffect.mockResolvedValue(randomInBlock(settledTx)); - const pxe = new PXEService( - keyStore, + const pxe = await PXEService.create( node, - db, - tips, + kvStore, kernelProver, simulationProvider, protocolContractsProvider, diff --git a/yarn-project/pxe/src/pxe_service/test/pxe_test_suite.ts b/yarn-project/pxe/src/test/pxe_test_suite.ts similarity index 91% rename from yarn-project/pxe/src/pxe_service/test/pxe_test_suite.ts rename to yarn-project/pxe/src/test/pxe_test_suite.ts index 6764b008a2f..c1559dc13bb 100644 --- a/yarn-project/pxe/src/pxe_service/test/pxe_test_suite.ts +++ b/yarn-project/pxe/src/test/pxe_test_suite.ts @@ -81,7 +81,7 @@ export const pxeTestSuite = (testName: string, pxeSetup: () => Promise) => it('refuses to register a contract with a class that has not been registered', async () => { const instance = await randomContractInstanceWithAddress(); - await expect(pxe.registerContract({ instance })).rejects.toThrow(/Missing contract artifact/i); + await expect(pxe.registerContract({ instance })).rejects.toThrow(/DB has no contract class with id/i); }); it('refuses to register a contract with an artifact with mismatching class id', async () => { @@ -93,14 +93,7 @@ export const pxeTestSuite = (testName: string, pxeSetup: () => Promise) => // Note: Not testing a successful run of `proveTx`, `sendTx`, `getTxReceipt` and `simulateUnconstrained` here as it requires // a larger setup and it's sufficiently tested in the e2e tests. - it('throws when getting public storage for non-existent contract', async () => { - const contract = await AztecAddress.random(); - await expect(async () => await pxe.getPublicStorageAt(contract, new Fr(0n))).rejects.toThrow( - `Contract ${contract.toString()} is not deployed`, - ); - }); - - // Note: Not testing `getContractData` and `getPublicLogs` here as these + // Note: Not testing `getContractData`, `getPublicLogs` and `getPublicStorageAt` here as these // functions only call AztecNode and these methods are frequently used by the e2e tests. it('successfully gets a block number', async () => { diff --git a/yarn-project/sequencer-client/src/publisher/config.ts b/yarn-project/sequencer-client/src/publisher/config.ts index e706f3a52a8..92890cd339f 100644 --- a/yarn-project/sequencer-client/src/publisher/config.ts +++ b/yarn-project/sequencer-client/src/publisher/config.ts @@ -46,7 +46,7 @@ export const getTxSenderConfigMappings: ( defaultValue: EthAddress.ZERO, }, publisherPrivateKey: { - env: `${scope}_PUBLISHER_PRIVATE_KEY`, + env: scope === 'PROVER' ? `PROVER_PUBLISHER_PRIVATE_KEY` : `SEQ_PUBLISHER_PRIVATE_KEY`, description: 'The private key to be used by the publisher.', parseEnv: (val: string) => (val ? `0x${val.replace('0x', '')}` : NULL_KEY), defaultValue: NULL_KEY, @@ -61,7 +61,7 @@ export const getPublisherConfigMappings: ( scope: 'PROVER' | 'SEQ', ) => ConfigMappingsType = scope => ({ l1PublishRetryIntervalMS: { - env: `${scope}_PUBLISH_RETRY_INTERVAL_MS`, + env: scope === `PROVER` ? `PROVER_PUBLISH_RETRY_INTERVAL_MS` : `SEQ_PUBLISH_RETRY_INTERVAL_MS`, parseEnv: (val: string) => +val, defaultValue: 1000, description: 'The interval to wait between publish retries.', diff --git a/yarn-project/simulator/src/private/acvm/oracle/oracle.ts b/yarn-project/simulator/src/private/acvm/oracle/oracle.ts index c0305d30569..c0792338d1e 100644 --- a/yarn-project/simulator/src/private/acvm/oracle/oracle.ts +++ b/yarn-project/simulator/src/private/acvm/oracle/oracle.ts @@ -138,7 +138,7 @@ export class Oracle { const messageHashField = fromACVMField(messageHash); const witness = await this.typedOracle.getAuthWitness(messageHashField); if (!witness) { - throw new Error(`Authorization not found for message hash ${messageHashField}`); + throw new Error(`Unknown auth witness for message hash ${messageHashField}`); } return witness.map(toACVMField); } diff --git a/yarn-project/simulator/src/private/execution_data_provider.ts b/yarn-project/simulator/src/private/execution_data_provider.ts index d2913e529c1..5dff5778d87 100644 --- a/yarn-project/simulator/src/private/execution_data_provider.ts +++ b/yarn-project/simulator/src/private/execution_data_provider.ts @@ -54,7 +54,7 @@ export interface ExecutionDataProvider extends CommitmentsDB { * @param messageHash - The message hash. * @returns A Promise that resolves to an array of field elements representing the auth witness. */ - getAuthWitness(messageHash: Fr): Promise; + getAuthWitness(messageHash: Fr): Promise; /** * Retrieve keys associated with a specific master public key and app address. diff --git a/yarn-project/simulator/src/private/private_execution.ts b/yarn-project/simulator/src/private/private_execution.ts index 51bbfa26f5c..00673305192 100644 --- a/yarn-project/simulator/src/private/private_execution.ts +++ b/yarn-project/simulator/src/private/private_execution.ts @@ -6,6 +6,7 @@ import { ProtocolContractAddress } from '@aztec/protocol-contracts'; import { type FunctionArtifact, type FunctionSelector, countArgumentsSize } from '@aztec/stdlib/abi'; import type { AztecAddress } from '@aztec/stdlib/aztec-address'; import type { ContractInstance } from '@aztec/stdlib/contract'; +import type { AztecNode } from '@aztec/stdlib/interfaces/client'; import { PrivateCircuitPublicInputs } from '@aztec/stdlib/kernel'; import { SharedMutableValues, SharedMutableValuesWithHash } from '@aztec/stdlib/shared-mutable'; import type { CircuitWitnessGenerationStats } from '@aztec/stdlib/stats'; @@ -120,7 +121,7 @@ export function extractPrivateCircuitPublicInputs( export async function readCurrentClassId( contractAddress: AztecAddress, instance: ContractInstance, - executionDataProvider: ExecutionDataProvider, + executionDataProvider: ExecutionDataProvider | AztecNode, blockNumber: number, ) { const { sharedMutableSlot } = await SharedMutableValuesWithHash.getContractUpdateSlots(contractAddress); diff --git a/yarn-project/stdlib/src/database-version/README.md b/yarn-project/stdlib/src/database-version/README.md index 7fcb94cff82..ea2b739377c 100644 --- a/yarn-project/stdlib/src/database-version/README.md +++ b/yarn-project/stdlib/src/database-version/README.md @@ -12,8 +12,8 @@ The Version Manager helps manage database migrations and version compatibility a ## Usage ```typescript -import { EthAddress } from '@aztec/foundation/eth-address'; import { version } from '@aztec/foundation'; +import { EthAddress } from '@aztec/foundation/eth-address'; // Define your current database version const DB_VERSION = 3; @@ -44,7 +44,7 @@ await versionManager.checkVersionAndHandle( // Unsupported migration path, will fall back to reset throw new Error(`Cannot upgrade from ${oldVersion} to ${newVersion}`); } - } + }, ); // Get the data directory for your service @@ -60,4 +60,4 @@ The database will be reset in the following conditions: 3. Version has changed and no upgrade callback is provided 4. Upgrade callback throws an error -When a reset occurs, the data directory is deleted and recreated, and the reset callback is called to initialize a fresh database. \ No newline at end of file +When a reset occurs, the data directory is deleted and recreated, and the reset callback is called to initialize a fresh database. diff --git a/yarn-project/txe/src/index.ts b/yarn-project/txe/src/index.ts index 6af9cadb934..23918460c91 100644 --- a/yarn-project/txe/src/index.ts +++ b/yarn-project/txe/src/index.ts @@ -11,6 +11,8 @@ import { } from '@aztec/aztec.js'; import { createSafeJsonRpcServer } from '@aztec/foundation/json-rpc/server'; import type { Logger } from '@aztec/foundation/log'; +import { type ProtocolContract, protocolContractNames } from '@aztec/protocol-contracts'; +import { BundledProtocolContractsProvider } from '@aztec/protocol-contracts/providers/bundle'; import type { ApiSchemaFor, ZodFor } from '@aztec/stdlib/schemas'; import { readFile, readdir } from 'fs/promises'; @@ -59,6 +61,8 @@ const TXEForeignCallInputSchema = z.object({ }) satisfies ZodFor; class TXEDispatcher { + private protocolContracts!: ProtocolContract[]; + constructor(private logger: Logger) {} async #processDeployInputs({ inputs, root_path: rootPath, package_name: packageName }: TXEForeignCallInput) { @@ -162,7 +166,12 @@ class TXEDispatcher { if (!TXESessions.has(sessionId) && functionName != 'reset') { this.logger.debug(`Creating new session ${sessionId}`); - TXESessions.set(sessionId, await TXEService.init(this.logger)); + if (!this.protocolContracts) { + this.protocolContracts = await Promise.all( + protocolContractNames.map(name => new BundledProtocolContractsProvider().getProtocolContractArtifact(name)), + ); + } + TXESessions.set(sessionId, await TXEService.init(this.logger, this.protocolContracts)); } switch (functionName) { diff --git a/yarn-project/txe/src/oracle/txe_oracle.ts b/yarn-project/txe/src/oracle/txe_oracle.ts index 34839d032a0..2b9ff4973a7 100644 --- a/yarn-project/txe/src/oracle/txe_oracle.ts +++ b/yarn-project/txe/src/oracle/txe_oracle.ts @@ -11,20 +11,31 @@ import { PUBLIC_DISPATCH_SELECTOR, } from '@aztec/constants'; import { padArrayEnd } from '@aztec/foundation/collection'; -import { Schnorr, poseidon2Hash } from '@aztec/foundation/crypto'; +import { Aes128, Schnorr, poseidon2Hash } from '@aztec/foundation/crypto'; import { Fr } from '@aztec/foundation/fields'; -import { type LogFn, type Logger, applyStringFormatting, createDebugOnlyLogger } from '@aztec/foundation/log'; +import { type Logger, applyStringFormatting } from '@aztec/foundation/log'; import { Timer } from '@aztec/foundation/timer'; -import type { KeyStore } from '@aztec/key-store'; -import { ContractDataProvider, PXEDataProvider, enrichPublicSimulationError } from '@aztec/pxe'; +import { KeyStore } from '@aztec/key-store'; +import type { AztecAsyncKVStore } from '@aztec/kv-store'; +import type { ProtocolContract } from '@aztec/protocol-contracts'; +import { + AddressDataProvider, + AuthWitnessDataProvider, + CapsuleDataProvider, + ContractDataProvider, + NoteDataProvider, + PXEOracleInterface, + SyncDataProvider, + TaggingDataProvider, + enrichPublicSimulationError, +} from '@aztec/pxe/server'; import { ExecutionNoteCache, - type HashedValuesCache, + HashedValuesCache, type MessageLoadOracleInputs, type NoteData, Oracle, type TypedOracle, - UnconstrainedExecutionOracle, WASMSimulator, extractCallStack, extractPrivateCircuitPublicInputs, @@ -62,7 +73,7 @@ import { } from '@aztec/stdlib/hash'; import type { MerkleTreeReadOperations, MerkleTreeWriteOperations } from '@aztec/stdlib/interfaces/server'; import { type KeyValidationRequest, PrivateContextInputs } from '@aztec/stdlib/kernel'; -import { computeTaggingSecretPoint, deriveKeys } from '@aztec/stdlib/keys'; +import { deriveKeys } from '@aztec/stdlib/keys'; import { ContractClassLog, LogWithTxData } from '@aztec/stdlib/logs'; import { IndexedTaggingSecret, type PrivateLog, type PublicLog } from '@aztec/stdlib/logs'; import type { NoteStatus } from '@aztec/stdlib/note'; @@ -83,10 +94,10 @@ import { PublicDataWitness, } from '@aztec/stdlib/trees'; import { BlockHeader, CallContext, GlobalVariables, PublicExecutionRequest, TxEffect, TxHash } from '@aztec/stdlib/tx'; -import { ForkCheckpoint, type NativeWorldStateService } from '@aztec/world-state/native'; +import { ForkCheckpoint, NativeWorldStateService } from '@aztec/world-state/native'; import { TXENode } from '../node/txe_node.js'; -import type { TXEDatabase } from '../util/txe_database.js'; +import { TXEAccountDataProvider } from '../util/txe_account_data_provider.js'; import { TXEPublicContractDataSource } from '../util/txe_public_contract_data_source.js'; import { TXEWorldStateDB } from '../util/txe_world_state_db.js'; @@ -100,9 +111,7 @@ export class TXE implements TypedOracle { private nestedCallReturndata: Fr[] = []; private nestedCallSuccess: boolean = false; - private contractDataProvider: ContractDataProvider; - private pxeDataProvider: PXEDataProvider; - private viewDataOracle: UnconstrainedExecutionOracle; + private pxeOracleInterface: PXEOracleInterface; private publicDataWrites: PublicDataWrite[] = []; private uniqueNoteHashesFromPublic: Fr[] = []; @@ -121,57 +130,77 @@ export class TXE implements TypedOracle { private noteCache: ExecutionNoteCache; - debug: LogFn; - private constructor( private logger: Logger, - private executionCache: HashedValuesCache, private keyStore: KeyStore, - private txeDatabase: TXEDatabase, + private contractDataProvider: ContractDataProvider, + private noteDataProvider: NoteDataProvider, + private capsuleDataProvider: CapsuleDataProvider, + private syncDataProvider: SyncDataProvider, + private taggingDataProvider: TaggingDataProvider, + private addressDataProvider: AddressDataProvider, + private authWitnessDataProvider: AuthWitnessDataProvider, + private accountDataProvider: TXEAccountDataProvider, + private executionCache: HashedValuesCache, private contractAddress: AztecAddress, private nativeWorldStateService: NativeWorldStateService, private baseFork: MerkleTreeWriteOperations, ) { this.noteCache = new ExecutionNoteCache(this.getTxRequestHash()); - this.contractDataProvider = new ContractDataProvider(txeDatabase); this.node = new TXENode(this.blockNumber, this.VERSION, this.CHAIN_ID, nativeWorldStateService, baseFork); - // Default msg_sender (for entrypoints) is now Fr.max_value rather than 0 addr (see #7190 & #7404) this.msgSender = AztecAddress.fromField(Fr.MAX_FIELD_VALUE); - this.pxeDataProvider = new PXEDataProvider( - txeDatabase, - keyStore, + + this.pxeOracleInterface = new PXEOracleInterface( this.node, + this.keyStore, this.simulationProvider, this.contractDataProvider, + this.noteDataProvider, + this.capsuleDataProvider, + this.syncDataProvider, + this.taggingDataProvider, + this.addressDataProvider, + this.authWitnessDataProvider, + this.logger, ); + } - this.viewDataOracle = new UnconstrainedExecutionOracle( - this.contractAddress, - [] /* authWitnesses */, - [] /* capsules */, - this.pxeDataProvider, // note: PXEDataProvider implements ExecutionDataProvider - /* log, */ - /* scopes, */ - ); + static async create(logger: Logger, store: AztecAsyncKVStore, protocolContracts: ProtocolContract[]) { + const executionCache = new HashedValuesCache(); + const nativeWorldStateService = await NativeWorldStateService.tmp(); + const baseFork = await nativeWorldStateService.fork(); - this.debug = createDebugOnlyLogger('aztec:kv-pxe-database'); - } + const addressDataProvider = new AddressDataProvider(store); + const authWitnessDataProvider = new AuthWitnessDataProvider(store); + const contractDataProvider = new ContractDataProvider(store); + const noteDataProvider = await NoteDataProvider.create(store); + const syncDataProvider = new SyncDataProvider(store); + const taggingDataProvider = new TaggingDataProvider(store); + const capsuleDataProvider = new CapsuleDataProvider(store); + const keyStore = new KeyStore(store); + + const accountDataProvider = new TXEAccountDataProvider(store); + + // Register protocol contracts. + for (const { contractClass, instance, artifact } of protocolContracts) { + await contractDataProvider.addContractArtifact(contractClass.id, artifact); + await contractDataProvider.addContractInstance(instance); + } - static async create( - logger: Logger, - executionCache: HashedValuesCache, - keyStore: KeyStore, - txeDatabase: TXEDatabase, - nativeWorldStateService: NativeWorldStateService, - baseFork: MerkleTreeWriteOperations, - ) { return new TXE( logger, - executionCache, keyStore, - txeDatabase, + contractDataProvider, + noteDataProvider, + capsuleDataProvider, + syncDataProvider, + taggingDataProvider, + addressDataProvider, + authWitnessDataProvider, + accountDataProvider, + executionCache, await AztecAddress.random(), nativeWorldStateService, baseFork, @@ -233,20 +262,24 @@ export class TXE implements TypedOracle { return this.contractDataProvider; } - getTXEDatabase() { - return this.txeDatabase; - } - getKeyStore() { return this.keyStore; } + getAccountDataProvider() { + return this.accountDataProvider; + } + + getAddressDataProvider() { + return this.addressDataProvider; + } + async addContractInstance(contractInstance: ContractInstanceWithAddress) { - await this.txeDatabase.addContractInstance(contractInstance); + await this.contractDataProvider.addContractInstance(contractInstance); } async addContractArtifact(contractClassId: Fr, artifact: ContractArtifact) { - await this.txeDatabase.addContractArtifact(contractClassId, artifact); + await this.contractDataProvider.addContractArtifact(contractClassId, artifact); } async getPrivateContextInputs( @@ -287,12 +320,12 @@ export class TXE implements TypedOracle { } async addAuthWitness(address: AztecAddress, messageHash: Fr) { - const account = await this.txeDatabase.getAccount(address); + const account = await this.accountDataProvider.getAccount(address); const privateKey = await this.keyStore.getMasterSecretKey(account.publicKeys.masterIncomingViewingPublicKey); const schnorr = new Schnorr(); const signature = await schnorr.constructSignature(messageHash.toBuffer(), privateKey); const authWitness = new AuthWitness(messageHash, [...signature.toBuffer()]); - return this.txeDatabase.addAuthWitness(authWitness.requestHash, authWitness.witness); + return this.authWitnessDataProvider.addAuthWitness(authWitness.requestHash, authWitness.witness); } async addPublicDataWrites(writes: PublicDataWrite[]) { @@ -501,11 +534,11 @@ export class TXE implements TypedOracle { } getCompleteAddress(account: AztecAddress) { - return Promise.resolve(this.txeDatabase.getAccount(account)); + return Promise.resolve(this.accountDataProvider.getAccount(account)); } getAuthWitness(messageHash: Fr) { - return this.txeDatabase.getAuthWitness(messageHash); + return this.pxeOracleInterface.getAuthWitness(messageHash); } async getNotes( @@ -528,7 +561,7 @@ export class TXE implements TypedOracle { const pendingNotes = this.noteCache.getNotes(this.contractAddress, storageSlot); const pendingNullifiers = this.noteCache.getNullifiers(this.contractAddress); - const dbNotes = await this.pxeDataProvider.getNotes(this.contractAddress, storageSlot, status); + const dbNotes = await this.pxeOracleInterface.getNotes(this.contractAddress, storageSlot, status); const dbNotesFiltered = dbNotes.filter(n => !pendingNullifiers.has((n.siloedNullifier as Fr).value)); const notes = pickNotes([...dbNotesFiltered, ...pendingNotes], { @@ -986,12 +1019,7 @@ export class TXE implements TypedOracle { // Poor man's revert handling if (!executionResult.revertCode.isOK()) { if (executionResult.revertReason && executionResult.revertReason instanceof SimulationError) { - await enrichPublicSimulationError( - executionResult.revertReason, - this.contractDataProvider, - this.txeDatabase, - this.logger, - ); + await enrichPublicSimulationError(executionResult.revertReason, this.contractDataProvider, this.logger); throw new Error(executionResult.revertReason.message); } else { throw new Error(`Enqueued public function call reverted: ${executionResult.revertReason}`); @@ -1043,38 +1071,25 @@ export class TXE implements TypedOracle { } async incrementAppTaggingSecretIndexAsSender(sender: AztecAddress, recipient: AztecAddress): Promise { - const appSecret = await this.#calculateAppTaggingSecret(this.contractAddress, sender, recipient); - const [index] = await this.txeDatabase.getTaggingSecretsIndexesAsSender([appSecret]); - await this.txeDatabase.setTaggingSecretsIndexesAsSender([new IndexedTaggingSecret(appSecret, index + 1)]); + await this.pxeOracleInterface.incrementAppTaggingSecretIndexAsSender(this.contractAddress, sender, recipient); } async getIndexedTaggingSecretAsSender(sender: AztecAddress, recipient: AztecAddress): Promise { - const secret = await this.#calculateAppTaggingSecret(this.contractAddress, sender, recipient); - const [index] = await this.txeDatabase.getTaggingSecretsIndexesAsSender([secret]); - return new IndexedTaggingSecret(secret, index); - } - - async #calculateAppTaggingSecret(contractAddress: AztecAddress, sender: AztecAddress, recipient: AztecAddress) { - const senderCompleteAddress = await this.getCompleteAddress(sender); - const senderIvsk = await this.keyStore.getMasterIncomingViewingSecretKey(sender); - const secretPoint = await computeTaggingSecretPoint(senderCompleteAddress, senderIvsk, recipient); - // Silo the secret to the app so it can't be used to track other app's notes - const appSecret = poseidon2Hash([secretPoint.x, secretPoint.y, contractAddress]); - return appSecret; + return await this.pxeOracleInterface.getIndexedTaggingSecretAsSender(this.contractAddress, sender, recipient); } async syncNotes() { - const taggedLogsByRecipient = await this.pxeDataProvider.syncTaggedLogs( + const taggedLogsByRecipient = await this.pxeOracleInterface.syncTaggedLogs( this.contractAddress, await this.getBlockNumber(), undefined, ); for (const [recipient, taggedLogs] of taggedLogsByRecipient.entries()) { - await this.pxeDataProvider.processTaggedLogs(taggedLogs, AztecAddress.fromString(recipient)); + await this.pxeOracleInterface.processTaggedLogs(taggedLogs, AztecAddress.fromString(recipient)); } - await this.pxeDataProvider.removeNullifiedNotes(this.contractAddress); + await this.pxeOracleInterface.removeNullifiedNotes(this.contractAddress); return Promise.resolve(); } @@ -1093,7 +1108,7 @@ export class TXE implements TypedOracle { } async getLogByTag(tag: Fr): Promise { - return await this.pxeDataProvider.getLogByTag(tag); + return await this.pxeOracleInterface.getLogByTag(tag); } // AVM oracles @@ -1192,7 +1207,7 @@ export class TXE implements TypedOracle { // TODO(#10727): instead of this check that this.contractAddress is allowed to access the external DB throw new Error(`Contract ${contractAddress} is not allowed to access ${this.contractAddress}'s PXE DB`); } - return this.txeDatabase.storeCapsule(this.contractAddress, slot, capsule); + return this.pxeOracleInterface.storeCapsule(this.contractAddress, slot, capsule); } loadCapsule(contractAddress: AztecAddress, slot: Fr): Promise { @@ -1200,7 +1215,7 @@ export class TXE implements TypedOracle { // TODO(#10727): instead of this check that this.contractAddress is allowed to access the external DB throw new Error(`Contract ${contractAddress} is not allowed to access ${this.contractAddress}'s PXE DB`); } - return this.txeDatabase.loadCapsule(this.contractAddress, slot); + return this.pxeOracleInterface.loadCapsule(this.contractAddress, slot); } deleteCapsule(contractAddress: AztecAddress, slot: Fr): Promise { @@ -1208,7 +1223,7 @@ export class TXE implements TypedOracle { // TODO(#10727): instead of this check that this.contractAddress is allowed to access the external DB throw new Error(`Contract ${contractAddress} is not allowed to access ${this.contractAddress}'s PXE DB`); } - return this.txeDatabase.deleteCapsule(this.contractAddress, slot); + return this.pxeOracleInterface.deleteCapsule(this.contractAddress, slot); } copyCapsule(contractAddress: AztecAddress, srcSlot: Fr, dstSlot: Fr, numEntries: number): Promise { @@ -1216,10 +1231,11 @@ export class TXE implements TypedOracle { // TODO(#10727): instead of this check that this.contractAddress is allowed to access the external DB throw new Error(`Contract ${contractAddress} is not allowed to access ${this.contractAddress}'s PXE DB`); } - return this.txeDatabase.copyCapsule(this.contractAddress, srcSlot, dstSlot, numEntries); + return this.pxeOracleInterface.copyCapsule(this.contractAddress, srcSlot, dstSlot, numEntries); } aes128Decrypt(ciphertext: Buffer, iv: Buffer, symKey: Buffer): Promise { - return this.viewDataOracle.aes128Decrypt(ciphertext, iv, symKey); + const aes128 = new Aes128(); + return aes128.decryptBufferCBC(ciphertext, iv, symKey); } } diff --git a/yarn-project/txe/src/txe_service/txe_service.ts b/yarn-project/txe/src/txe_service/txe_service.ts index 6c5b09606ab..ee2831fa21c 100644 --- a/yarn-project/txe/src/txe_service/txe_service.ts +++ b/yarn-project/txe/src/txe_service/txe_service.ts @@ -1,12 +1,10 @@ import { type ContractInstanceWithAddress, Fr } from '@aztec/aztec.js'; import { DEPLOYER_CONTRACT_ADDRESS } from '@aztec/constants'; import type { Logger } from '@aztec/foundation/log'; -import { KeyStore } from '@aztec/key-store'; import { openTmpStore } from '@aztec/kv-store/lmdb-v2'; -import { protocolContractNames } from '@aztec/protocol-contracts'; -import { BundledProtocolContractsProvider } from '@aztec/protocol-contracts/providers/bundle'; -import { enrichPublicSimulationError } from '@aztec/pxe'; -import { HashedValuesCache, type TypedOracle } from '@aztec/simulator/client'; +import type { ProtocolContract } from '@aztec/protocol-contracts'; +import { enrichPublicSimulationError } from '@aztec/pxe/server'; +import type { TypedOracle } from '@aztec/simulator/client'; import { type ContractArtifact, FunctionSelector, NoteSelector } from '@aztec/stdlib/abi'; import { PublicDataWrite } from '@aztec/stdlib/avm'; import { AztecAddress } from '@aztec/stdlib/aztec-address'; @@ -14,7 +12,6 @@ import { computePartialAddress } from '@aztec/stdlib/contract'; import { SimulationError } from '@aztec/stdlib/errors'; import { computePublicDataTreeLeafSlot, siloNullifier } from '@aztec/stdlib/hash'; import { MerkleTreeId } from '@aztec/stdlib/trees'; -import { NativeWorldStateService } from '@aztec/world-state'; import { TXE } from '../oracle/txe_oracle.js'; import { @@ -31,28 +28,14 @@ import { toSingle, } from '../util/encoding.js'; import { ExpectedFailureError } from '../util/expected_failure_error.js'; -import { TXEDatabase } from '../util/txe_database.js'; export class TXEService { constructor(private logger: Logger, private typedOracle: TypedOracle) {} - static async init(logger: Logger) { - const store = await openTmpStore('test'); - const executionCache = new HashedValuesCache(); - const nativeWorldStateService = await NativeWorldStateService.tmp(); - const baseFork = await nativeWorldStateService.fork(); - - const keyStore = new KeyStore(store); - const txeDatabase = new TXEDatabase(store); - // Register protocol contracts. - const provider = new BundledProtocolContractsProvider(); - for (const name of protocolContractNames) { - const { contractClass, instance, artifact } = await provider.getProtocolContractArtifact(name); - await txeDatabase.addContractArtifact(contractClass.id, artifact); - await txeDatabase.addContractInstance(instance); - } + static async init(logger: Logger, protocolContracts: ProtocolContract[]) { logger.debug(`TXE service initialized`); - const txe = await TXE.create(logger, executionCache, keyStore, txeDatabase, nativeWorldStateService, baseFork); + const store = await openTmpStore('test'); + const txe = await TXE.create(logger, store, protocolContracts); const service = new TXEService(logger, txe); await service.advanceBlocksBy(toSingle(new Fr(1n))); return service; @@ -140,8 +123,10 @@ export class TXEService { const secretFr = fromSingle(secret); // This is a footgun ! const completeAddress = await keyStore.addAccount(secretFr, secretFr); - const accountStore = (this.typedOracle as TXE).getTXEDatabase(); - await accountStore.setAccount(completeAddress.address, completeAddress); + const accountDataProvider = (this.typedOracle as TXE).getAccountDataProvider(); + await accountDataProvider.setAccount(completeAddress.address, completeAddress); + const addressDataProvider = (this.typedOracle as TXE).getAddressDataProvider(); + await addressDataProvider.addCompleteAddress(completeAddress); this.logger.debug(`Created account ${completeAddress.address}`); return toForeignCallResult([ toSingle(completeAddress.address), @@ -156,8 +141,10 @@ export class TXEService { const keyStore = (this.typedOracle as TXE).getKeyStore(); const completeAddress = await keyStore.addAccount(fromSingle(secret), await computePartialAddress(instance)); - const accountStore = (this.typedOracle as TXE).getTXEDatabase(); - await accountStore.setAccount(completeAddress.address, completeAddress); + const accountDataProvider = (this.typedOracle as TXE).getAccountDataProvider(); + await accountDataProvider.setAccount(completeAddress.address, completeAddress); + const addressDataProvider = (this.typedOracle as TXE).getAddressDataProvider(); + await addressDataProvider.addCompleteAddress(completeAddress); this.logger.debug(`Created account ${completeAddress.address}`); return toForeignCallResult([ toSingle(completeAddress.address), @@ -715,7 +702,6 @@ export class TXEService { await enrichPublicSimulationError( result.revertReason, (this.typedOracle as TXE).getContractDataProvider(), - (this.typedOracle as TXE).getTXEDatabase(), this.logger, ); throw new Error(result.revertReason.message); @@ -745,7 +731,6 @@ export class TXEService { await enrichPublicSimulationError( result.revertReason, (this.typedOracle as TXE).getContractDataProvider(), - (this.typedOracle as TXE).getTXEDatabase(), this.logger, ); throw new Error(result.revertReason.message); diff --git a/yarn-project/txe/src/util/txe_database.ts b/yarn-project/txe/src/util/txe_account_data_provider.ts similarity index 73% rename from yarn-project/txe/src/util/txe_database.ts rename to yarn-project/txe/src/util/txe_account_data_provider.ts index b4fcf8eb66b..feeb2bb723c 100644 --- a/yarn-project/txe/src/util/txe_database.ts +++ b/yarn-project/txe/src/util/txe_account_data_provider.ts @@ -1,14 +1,12 @@ import type { AztecAsyncKVStore, AztecAsyncMap } from '@aztec/kv-store'; -import { KVPxeDatabase } from '@aztec/pxe'; import type { AztecAddress } from '@aztec/stdlib/aztec-address'; import { CompleteAddress } from '@aztec/stdlib/contract'; -export class TXEDatabase extends KVPxeDatabase { +export class TXEAccountDataProvider { #accounts: AztecAsyncMap; - constructor(db: AztecAsyncKVStore) { - super(db); - this.#accounts = db.openMap('accounts'); + constructor(store: AztecAsyncKVStore) { + this.#accounts = store.openMap('accounts'); } async getAccount(key: AztecAddress) { @@ -21,6 +19,5 @@ export class TXEDatabase extends KVPxeDatabase { async setAccount(key: AztecAddress, value: CompleteAddress) { await this.#accounts.set(key.toString(), value.toBuffer()); - await this.addCompleteAddress(value); } } diff --git a/yarn-project/txe/src/util/txe_public_contract_data_source.ts b/yarn-project/txe/src/util/txe_public_contract_data_source.ts index 6014dd3b3fa..6446a843862 100644 --- a/yarn-project/txe/src/util/txe_public_contract_data_source.ts +++ b/yarn-project/txe/src/util/txe_public_contract_data_source.ts @@ -1,6 +1,6 @@ import { PUBLIC_DISPATCH_SELECTOR } from '@aztec/constants'; import { Fr } from '@aztec/foundation/fields'; -import { PrivateFunctionsTree } from '@aztec/pxe'; +import { PrivateFunctionsTree } from '@aztec/pxe/server'; import { type ContractArtifact, FunctionSelector } from '@aztec/stdlib/abi'; import type { AztecAddress } from '@aztec/stdlib/aztec-address'; import {