diff --git a/.github/workflows/spellcheck.yml b/.github/workflows/spellcheck.yml index 89fcdd12d12..83d67325775 100644 --- a/.github/workflows/spellcheck.yml +++ b/.github/workflows/spellcheck.yml @@ -8,6 +8,32 @@ concurrency: cancel-in-progress: true jobs: - spellcheck: - name: Spellcheck - uses: noir-lang/.github/.github/workflows/spellcheck.yml@main + code: + name: Code + runs-on: ubuntu-latest + steps: + - name: Checkout sources + uses: actions/checkout@v4 + + - name: Check spelling + uses: streetsidesoftware/cspell-action@v2 + with: + strict: false # Do not fail, if a spelling mistake is found (This can be annoying for contributors) + incremental_files_only: true # Run this action on files which have changed in PR + files: | + **/*.{md,rs} + + docs: + name: Documentation + runs-on: ubuntu-latest + steps: + - name: Checkout sources + uses: actions/checkout@v4 + + - name: Check spelling + uses: streetsidesoftware/cspell-action@v2 + with: + strict: true # Documentation has higher standards for correctness. + incremental_files_only: true # Run this action on files which have changed in PR + files: | + ./docs/**/*.md diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 9cbbeeb677f..d2553b003f8 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -92,6 +92,10 @@ This strategy avoids scenarios where pull requests grow too large/out-of-scope a The easiest way to do this is to have multiple Conventional Commits while you work and then you can cherry-pick the smaller changes into separate branches for pull requesting. +### Typos and other small changes + +Significant changes, like new features or important bug fixes, typically have a more pronounced impact on the project’s overall development. For smaller fixes, such as typos, we encourage you to report them instead of opening PRs. This approach helps us manage our resources effectively and ensures that every change contributes meaningfully to the project. PRs involving such smaller fixes will likely be closed and incorporated in PRs authored by the core team. + ### Reviews For any repository in the noir-lang organization, we require code review & approval by __one__ Noir team member before the changes are merged, as enforced by GitHub branch protection. Non-breaking pull requests may be merged at any time. Breaking pull requests should only be merged when the team has general agreement of the changes and is preparing a breaking release. diff --git a/Cargo.lock b/Cargo.lock index 0c4638f4001..5c507c8c5f2 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -7,7 +7,7 @@ name = "acir" version = "0.38.0" dependencies = [ "acir_field", - "base64", + "base64 0.21.2", "bincode", "brillig", "flate2", @@ -28,7 +28,7 @@ dependencies = [ "ark-bls12-381", "ark-bn254", "ark-ff", - "cfg-if", + "cfg-if 1.0.0", "hex", "num-bigint", "num-traits", @@ -47,7 +47,7 @@ dependencies = [ "num-traits", "paste", "proptest", - "rand", + "rand 0.8.5", "thiserror", "tracing", ] @@ -72,14 +72,13 @@ dependencies = [ "acvm", "bn254_blackbox_solver", "build-data", - "cfg-if", + "cfg-if 1.0.0", "console_error_panic_hook", "const-str", "gloo-utils", "js-sys", "pkg-config", "serde", - "tracing", "tracing-subscriber", "tracing-web", "wasm-bindgen", @@ -108,7 +107,7 @@ version = "0.7.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5a824f2aa7e75a0c98c5a504fceb80649e9c35265d44525b5f94de4771a395cd" dependencies = [ - "getrandom", + "getrandom 0.2.10", "once_cell", "version_check", ] @@ -119,8 +118,8 @@ version = "0.8.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2c99f64d1e06488f620f932677e24bc6e2897582980441ae90a671415bd7ec2f" dependencies = [ - "cfg-if", - "getrandom", + "cfg-if 1.0.0", + "getrandom 0.2.10", "once_cell", "version_check", ] @@ -342,7 +341,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "94893f1e0c6eeab764ade8dc4c0db24caf4fe7cbbaafc0eba0a9030f447b5185" dependencies = [ "num-traits", - "rand", + "rand 0.8.5", ] [[package]] @@ -387,7 +386,7 @@ version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "138985dd8aefbefeaa66b01b7f5b2b6b4c333fcef1cc5f32c63a2aabe37d6de3" dependencies = [ - "futures", + "futures 0.3.28", "lsp-types 0.94.1", "pin-project-lite", "rustix", @@ -443,7 +442,7 @@ checksum = "4319208da049c43661739c5fade2ba182f09d1dc2299b32298d3a31692b17e12" dependencies = [ "addr2line", "cc", - "cfg-if", + "cfg-if 1.0.0", "libc", "miniz_oxide", "object", @@ -456,6 +455,12 @@ version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "349a06037c7bf932dd7e7d1f653678b2038b9ad46a74102f1fc7bd7872678cce" +[[package]] +name = "base64" +version = "0.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9e1b586273c5702936fe7b7d6896644d8be71e6314cfe09d3167c95f712589e8" + [[package]] name = "base64" version = "0.21.2" @@ -561,7 +566,7 @@ dependencies = [ "ark-ec", "ark-ff", "flate2", - "getrandom", + "getrandom 0.2.10", "grumpkin", "js-sys", "num-bigint", @@ -727,6 +732,12 @@ version = "1.0.79" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "50d30906286121d95be3d479533b458f87493b30a4b5f79a607db8f5d11aa91f" +[[package]] +name = "cfg-if" +version = "0.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4785bdd1c96b2a846b2bd7cc02e86b6b3dbf14e7e53446c4f54c92a361040822" + [[package]] name = "cfg-if" version = "1.0.0" @@ -852,7 +863,7 @@ checksum = "fc4159b76af02757139baf42c0c971c6dc155330999fbfd8eddb29b97fb2db68" dependencies = [ "codespan-reporting", "lsp-types 0.88.0", - "url", + "url 2.4.0", ] [[package]] @@ -917,7 +928,7 @@ version = "0.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a06aeb73f470f66dcdbf7223caeebb85984942f22f1adb2a088cf9668146bbbc" dependencies = [ - "cfg-if", + "cfg-if 1.0.0", "wasm-bindgen", ] @@ -953,6 +964,12 @@ dependencies = [ "unicode-xid", ] +[[package]] +name = "convert_case" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6245d59a3e82a7fc217c5828a6692dbc6dfb63a0c8c90495621f7b9d79704a0e" + [[package]] name = "core-foundation-sys" version = "0.8.4" @@ -966,7 +983,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "80128832c58ea9cbd041d2a759ec449224487b2c1e400453d99d244eead87a8e" dependencies = [ "autocfg", - "cfg-if", + "cfg-if 1.0.0", "libc", "scopeguard", "windows-sys 0.33.0", @@ -978,7 +995,7 @@ version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ee34052ee3d93d6d8f3e6f81d85c47921f6653a19a7b70e939e3e602d893a674" dependencies = [ - "cfg-if", + "cfg-if 1.0.0", ] [[package]] @@ -1079,7 +1096,7 @@ version = "1.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b540bd8bc810d3885c6ea91e2018302f68baba2129ab3e88f32389ee9370880d" dependencies = [ - "cfg-if", + "cfg-if 1.0.0", ] [[package]] @@ -1124,7 +1141,7 @@ version = "0.5.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "14c3242926edf34aec4ac3a77108ad4854bffaa2e4ddc1824124ce59231302d5" dependencies = [ - "cfg-if", + "cfg-if 1.0.0", "crossbeam-utils", ] @@ -1134,7 +1151,7 @@ version = "0.8.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ce6fd6f855243022dcecf8702fef0c297d4338e226845fe067f6341ad9fa0cef" dependencies = [ - "cfg-if", + "cfg-if 1.0.0", "crossbeam-epoch", "crossbeam-utils", ] @@ -1146,7 +1163,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ae211234986c545741a7dc064309f67ee1e5ad243d0e48335adc0484d960bcc7" dependencies = [ "autocfg", - "cfg-if", + "cfg-if 1.0.0", "crossbeam-utils", "memoffset 0.9.0", "scopeguard", @@ -1158,7 +1175,7 @@ version = "0.3.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d1cfb3ea8a53f37c40dea2c7bedcbd88bdfae54f5e2175d6ecaff1c988353add" dependencies = [ - "cfg-if", + "cfg-if 1.0.0", "crossbeam-utils", ] @@ -1168,7 +1185,7 @@ version = "0.8.17" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c06d96137f14f244c37f989d9fff8f95e6c18b918e71f36638f8c49112e4c78f" dependencies = [ - "cfg-if", + "cfg-if 1.0.0", ] [[package]] @@ -1178,7 +1195,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ef2b4b23cddf68b89b8f8069890e8c270d54e2d5fe1b143820234805e4cb17ef" dependencies = [ "generic-array", - "rand_core", + "rand_core 0.6.4", "subtle", "zeroize", ] @@ -1266,11 +1283,11 @@ version = "5.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "978747c1d849a7d2ee5e8adc0159961c48fb7e5db2f06af6723b80123bb53856" dependencies = [ - "cfg-if", + "cfg-if 1.0.0", "hashbrown 0.14.0", "lock_api", "once_cell", - "parking_lot_core", + "parking_lot_core 0.9.8", ] [[package]] @@ -1312,6 +1329,19 @@ dependencies = [ "syn 1.0.109", ] +[[package]] +name = "derive_more" +version = "0.99.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4fb810d30a7c1953f91334de7244731fc3f3c10d7fe163338a35b9f640960321" +dependencies = [ + "convert_case", + "proc-macro2", + "quote", + "rustc_version", + "syn 1.0.109", +] + [[package]] name = "difflib" version = "0.4.0" @@ -1344,7 +1374,7 @@ version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b98cf8ebf19c3d1b223e151f99a4f9f0690dca41414773390fc824184ac833e1" dependencies = [ - "cfg-if", + "cfg-if 1.0.0", "dirs-sys-next", ] @@ -1423,7 +1453,7 @@ dependencies = [ "generic-array", "group", "pkcs8", - "rand_core", + "rand_core 0.6.4", "sec1", "subtle", "zeroize", @@ -1447,7 +1477,7 @@ version = "0.8.32" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "071a31f4ee85403370b58aca746f01041ede6f0da2730960ad001edc2b71b394" dependencies = [ - "cfg-if", + "cfg-if 1.0.0", ] [[package]] @@ -1551,7 +1581,7 @@ version = "3.0.13" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ef033ed5e9bad94e55838ca0ca906db0e043f517adda0c8b79c7a8c66c93c1b5" dependencies = [ - "cfg-if", + "cfg-if 1.0.0", "rustix", "windows-sys 0.48.0", ] @@ -1562,7 +1592,7 @@ version = "0.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d013fc25338cc558c5c2cfbad646908fb23591e2404481826742b651c9af7160" dependencies = [ - "rand_core", + "rand_core 0.6.4", "subtle", ] @@ -1581,7 +1611,7 @@ version = "0.2.22" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d4029edd3e734da6fe05b6cd7bd2960760a616bd2ddd0d59a0124746d6272af0" dependencies = [ - "cfg-if", + "cfg-if 1.0.0", "libc", "redox_syscall 0.3.5", "windows-sys 0.48.0", @@ -1640,7 +1670,7 @@ version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a62bc1cf6f830c2ec14a513a9fb124d0a213a629668a4186f329db21fe045652" dependencies = [ - "percent-encoding", + "percent-encoding 2.3.0", ] [[package]] @@ -1649,6 +1679,12 @@ version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e6d5a32815ae3f33302d95fdcb2ce17862f8c65363dcfd29360480ba1001fc9c" +[[package]] +name = "futures" +version = "0.1.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3a471a38ef8ed83cd6e40aa59c1ffe17db6855c18e3604d9c4ed8c08ebc28678" + [[package]] name = "futures" version = "0.3.28" @@ -1657,6 +1693,7 @@ checksum = "23342abe12aba583913b2e62f22225ff9c950774065e4bfb61a19cd9770fec40" dependencies = [ "futures-channel", "futures-core", + "futures-executor", "futures-io", "futures-sink", "futures-task", @@ -1679,6 +1716,18 @@ version = "0.3.28" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4bca583b7e26f571124fe5b7561d49cb2868d79116cfa0eefce955557c6fee8c" +[[package]] +name = "futures-executor" +version = "0.3.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ccecee823288125bd88b4d7f565c9e58e41858e47ab72e8ea2d64e93624386e0" +dependencies = [ + "futures-core", + "futures-task", + "futures-util", + "num_cpus", +] + [[package]] name = "futures-io" version = "0.3.28" @@ -1714,6 +1763,7 @@ version = "0.3.28" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "26b01e40b772d54cf6c6d721c1d1abd0647a0106a12ecaa1c186273392a69533" dependencies = [ + "futures 0.1.31", "futures-channel", "futures-core", "futures-io", @@ -1741,7 +1791,7 @@ version = "0.2.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "877e94aff08e743b651baaea359664321055749b398adff8740a7399af7796e7" dependencies = [ - "cfg-if", + "cfg-if 1.0.0", ] [[package]] @@ -1754,16 +1804,27 @@ dependencies = [ "version_check", ] +[[package]] +name = "getrandom" +version = "0.1.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8fc3cb4d91f53b50155bdcfd23f6a4c39ae1969c2ae85982b135750cccaf5fce" +dependencies = [ + "cfg-if 1.0.0", + "libc", + "wasi 0.9.0+wasi-snapshot-preview1", +] + [[package]] name = "getrandom" version = "0.2.10" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "be4136b2a15dd319360be1c07d9933517ccf0be8f16bf62a3bee4f0d618df427" dependencies = [ - "cfg-if", + "cfg-if 1.0.0", "js-sys", "libc", - "wasi", + "wasi 0.11.0+wasi-snapshot-preview1", "wasm-bindgen", ] @@ -1834,7 +1895,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5dfbfb3a6cfbd390d5c9564ab283a0349b9b9fcd46a706c1eb10e0db70bfbac7" dependencies = [ "ff", - "rand_core", + "rand_core 0.6.4", "subtle", ] @@ -1864,7 +1925,7 @@ dependencies = [ "indexmap 1.9.3", "slab", "tokio", - "tokio-util", + "tokio-util 0.7.8", "tracing", ] @@ -2050,6 +2111,17 @@ version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39" +[[package]] +name = "idna" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "38f09e0f0b1fb55fdee1f17470ad800da77af5186a1a76c026b679358b7e844e" +dependencies = [ + "matches", + "unicode-bidi", + "unicode-normalization", +] + [[package]] name = "idna" version = "0.4.0" @@ -2084,7 +2156,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d0acd33ff0285af998aaf9b57342af478078f53492322fafc47450e09397e0e9" dependencies = [ "bitmaps", - "rand_core", + "rand_core 0.6.4", "rand_xoshiro", "serde", "sized-chunks", @@ -2162,6 +2234,15 @@ dependencies = [ "str_stack", ] +[[package]] +name = "instant" +version = "0.1.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a5bbe824c507c5da5956355e86a746d82e0e1464f65d862cc5e71da70e94b2c" +dependencies = [ + "cfg-if 1.0.0", +] + [[package]] name = "ipnet" version = "2.8.0" @@ -2207,13 +2288,127 @@ dependencies = [ "wasm-bindgen", ] +[[package]] +name = "jsonrpc" +version = "0.16.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "34efde8d2422fb79ed56db1d3aea8fa5b583351d15a26770cdee2f88813dd702" +dependencies = [ + "base64 0.13.1", + "minreq", + "serde", + "serde_json", +] + +[[package]] +name = "jsonrpc-client-transports" +version = "18.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d2b99d4207e2a04fb4581746903c2bb7eb376f88de9c699d0f3e10feeac0cd3a" +dependencies = [ + "derive_more", + "futures 0.3.28", + "jsonrpc-core", + "jsonrpc-pubsub", + "log", + "serde", + "serde_json", + "url 1.7.2", +] + +[[package]] +name = "jsonrpc-core" +version = "18.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "14f7f76aef2d054868398427f6c54943cf3d1caa9a7ec7d0c38d69df97a965eb" +dependencies = [ + "futures 0.3.28", + "futures-executor", + "futures-util", + "log", + "serde", + "serde_derive", + "serde_json", +] + +[[package]] +name = "jsonrpc-core-client" +version = "18.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b51da17abecbdab3e3d4f26b01c5ec075e88d3abe3ab3b05dc9aa69392764ec0" +dependencies = [ + "futures 0.3.28", + "jsonrpc-client-transports", +] + +[[package]] +name = "jsonrpc-derive" +version = "18.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5b939a78fa820cdfcb7ee7484466746a7377760970f6f9c6fe19f9edcc8a38d2" +dependencies = [ + "proc-macro-crate", + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "jsonrpc-http-server" +version = "18.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e1dea6e07251d9ce6a552abfb5d7ad6bc290a4596c8dcc3d795fae2bbdc1f3ff" +dependencies = [ + "futures 0.3.28", + "hyper", + "jsonrpc-core", + "jsonrpc-server-utils", + "log", + "net2", + "parking_lot 0.11.2", + "unicase", +] + +[[package]] +name = "jsonrpc-pubsub" +version = "18.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "240f87695e6c6f62fb37f05c02c04953cf68d6408b8c1c89de85c7a0125b1011" +dependencies = [ + "futures 0.3.28", + "jsonrpc-core", + "lazy_static", + "log", + "parking_lot 0.11.2", + "rand 0.7.3", + "serde", +] + +[[package]] +name = "jsonrpc-server-utils" +version = "18.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fa4fdea130485b572c39a460d50888beb00afb3e35de23ccd7fad8ff19f0e0d4" +dependencies = [ + "bytes", + "futures 0.3.28", + "globset", + "jsonrpc-core", + "lazy_static", + "log", + "tokio", + "tokio-stream", + "tokio-util 0.6.10", + "unicase", +] + [[package]] name = "k256" version = "0.11.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "72c1e0b51e7ec0a97369623508396067a486bd0cbed95a2659a4b863d28cfc8b" dependencies = [ - "cfg-if", + "cfg-if 1.0.0", "ecdsa", "elliptic-curve", "sha2", @@ -2293,7 +2488,7 @@ dependencies = [ "serde", "serde_json", "serde_repr", - "url", + "url 2.4.0", ] [[package]] @@ -2306,7 +2501,7 @@ dependencies = [ "serde", "serde_json", "serde_repr", - "url", + "url 2.4.0", ] [[package]] @@ -2327,6 +2522,12 @@ dependencies = [ "regex-automata 0.1.10", ] +[[package]] +name = "matches" +version = "0.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2532096657941c2fea9c289d370a250971c689d4f143798ff67113ec042024a5" + [[package]] name = "memchr" version = "2.5.0" @@ -2360,15 +2561,6 @@ dependencies = [ "autocfg", ] -[[package]] -name = "memoffset" -version = "0.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d61c719bcfbcf5d62b3a09efa6088de8c54bc0bfcd3ea7ae39fcc186108b8de1" -dependencies = [ - "autocfg", -] - [[package]] name = "memoffset" version = "0.9.0" @@ -2393,6 +2585,17 @@ dependencies = [ "adler", ] +[[package]] +name = "minreq" +version = "2.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cb3371dfc7b772c540da1380123674a8e20583aca99907087d990ca58cf44203" +dependencies = [ + "log", + "serde", + "serde_json", +] + [[package]] name = "mio" version = "0.8.8" @@ -2400,7 +2603,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "927a765cd3fc26206e66b296465fa9d3e5ab003e651c1b3c060e7956d96b19d2" dependencies = [ "libc", - "wasi", + "wasi 0.11.0+wasi-snapshot-preview1", "windows-sys 0.48.0", ] @@ -2418,6 +2621,11 @@ dependencies = [ "codespan-reporting", "fm", "iter-extended", + "jsonrpc", + "jsonrpc-core", + "jsonrpc-core-client", + "jsonrpc-derive", + "jsonrpc-http-server", "noirc_abi", "noirc_driver", "noirc_errors", @@ -2427,6 +2635,7 @@ dependencies = [ "rayon", "rustc_version", "serde", + "serial_test", "tempfile", "thiserror", "tracing", @@ -2477,8 +2686,8 @@ dependencies = [ "test-binary", "thiserror", "tokio", - "tokio-util", - "toml", + "tokio-util 0.7.8", + "toml 0.7.6", "tower", "tracing-appender", "tracing-subscriber", @@ -2493,7 +2702,7 @@ dependencies = [ "serde", "similar-asserts", "thiserror", - "toml", + "toml 0.7.6", ] [[package]] @@ -2507,8 +2716,19 @@ dependencies = [ "semver", "serde", "thiserror", - "toml", - "url", + "toml 0.7.6", + "url 2.4.0", +] + +[[package]] +name = "net2" +version = "0.2.39" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b13b648036a2339d06de780866fbdfda0dde886de7b3af2ddeba8b14f4ee34ac" +dependencies = [ + "cfg-if 0.1.10", + "libc", + "winapi", ] [[package]] @@ -2528,21 +2748,20 @@ checksum = "8f3790c00a0150112de0f4cd161e3d7fc4b2d8a5542ffc35f099a2562aecb35c" dependencies = [ "bitflags 1.3.2", "cc", - "cfg-if", + "cfg-if 1.0.0", "libc", "memoffset 0.6.5", ] [[package]] name = "nix" -version = "0.26.2" +version = "0.26.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bfdda3d196821d6af13126e40375cdf7da646a96114af134d5f417a9a1dc8e1a" +checksum = "598beaf3cc6fdd9a5dfb1630c2800c7acd31df7aaf0f565796fba2b53ca1af1b" dependencies = [ "bitflags 1.3.2", - "cfg-if", + "cfg-if 1.0.0", "libc", - "static_assertions", ] [[package]] @@ -2560,6 +2779,7 @@ dependencies = [ "noirc_printable_type", "owo-colors", "serde_json", + "tempfile", "thiserror", ] @@ -2595,7 +2815,7 @@ dependencies = [ "build-data", "console_error_panic_hook", "fm", - "getrandom", + "getrandom 0.2.10", "gloo-utils", "js-sys", "nargo", @@ -2622,7 +2842,7 @@ dependencies = [ "strum", "strum_macros", "thiserror", - "toml", + "toml 0.7.6", ] [[package]] @@ -2632,7 +2852,7 @@ dependencies = [ "acvm", "build-data", "console_error_panic_hook", - "getrandom", + "getrandom 0.2.10", "gloo-utils", "iter-extended", "js-sys", @@ -2722,6 +2942,7 @@ version = "0.22.0" dependencies = [ "acvm", "iter-extended", + "jsonrpc", "regex", "serde", "serde_json", @@ -2839,6 +3060,17 @@ dependencies = [ "sha2", ] +[[package]] +name = "parking_lot" +version = "0.11.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7d17b78036a60663b797adeaee46f5c9dfebb86948d1255007a1d6be0271ff99" +dependencies = [ + "instant", + "lock_api", + "parking_lot_core 0.8.6", +] + [[package]] name = "parking_lot" version = "0.12.1" @@ -2846,7 +3078,21 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3742b2c103b9f06bc9fff0a37ff4912935851bee6d36f3c02bcc755bcfec228f" dependencies = [ "lock_api", - "parking_lot_core", + "parking_lot_core 0.9.8", +] + +[[package]] +name = "parking_lot_core" +version = "0.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "60a2cfe6f0ad2bfc16aefa463b497d5c7a5ecd44a23efa72aa342d90177356dc" +dependencies = [ + "cfg-if 1.0.0", + "instant", + "libc", + "redox_syscall 0.2.16", + "smallvec", + "winapi", ] [[package]] @@ -2855,7 +3101,7 @@ version = "0.9.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "93f00c865fe7cabf650081affecd3871070f26767e7b2070a3ffae14c654b447" dependencies = [ - "cfg-if", + "cfg-if 1.0.0", "libc", "redox_syscall 0.3.5", "smallvec", @@ -2868,6 +3114,12 @@ version = "1.0.14" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "de3145af08024dea9fa9914f381a17b8fc6034dfb00f3a84013f7ff43f29ed4c" +[[package]] +name = "percent-encoding" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "31010dd2e1ac33d5b46a5b413495239882813e0369f8ed8a5e266f173602f831" + [[package]] name = "percent-encoding" version = "2.3.0" @@ -2892,7 +3144,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5d5285893bb5eb82e6aaf5d59ee909a06a16737a8970984dd7746ba9283498d6" dependencies = [ "phf_shared", - "rand", + "rand 0.8.5", ] [[package]] @@ -2981,15 +3233,15 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "978385d59daf9269189d052ca8a84c1acfd0715c0599a5d5188d4acc078ca46a" dependencies = [ "backtrace", - "cfg-if", + "cfg-if 1.0.0", "criterion", "findshlibs", "inferno", "libc", "log", - "nix 0.26.2", + "nix 0.26.4", "once_cell", - "parking_lot", + "parking_lot 0.12.1", "smallvec", "symbolic-demangle", "tempfile", @@ -3058,6 +3310,15 @@ dependencies = [ "unicode-width", ] +[[package]] +name = "proc-macro-crate" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1d6ea3c4595b96363c13943497db34af4460fb474a95c43f4446ad341b8c9785" +dependencies = [ + "toml 0.5.11", +] + [[package]] name = "proc-macro-error" version = "1.0.4" @@ -3108,8 +3369,8 @@ dependencies = [ "bitflags 2.3.3", "lazy_static", "num-traits", - "rand", - "rand_chacha", + "rand 0.8.5", + "rand_chacha 0.3.1", "rand_xorshift", "regex-syntax 0.7.4", "rusty-fork", @@ -3177,6 +3438,19 @@ dependencies = [ "nibble_vec", ] +[[package]] +name = "rand" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6a6b1679d49b24bbfe0c803429aa1874472f50d9b363131f0e89fc356b544d03" +dependencies = [ + "getrandom 0.1.16", + "libc", + "rand_chacha 0.2.2", + "rand_core 0.5.1", + "rand_hc", +] + [[package]] name = "rand" version = "0.8.5" @@ -3184,8 +3458,18 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" dependencies = [ "libc", - "rand_chacha", - "rand_core", + "rand_chacha 0.3.1", + "rand_core 0.6.4", +] + +[[package]] +name = "rand_chacha" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f4c8ed856279c9737206bf725bf36935d8666ead7aa69b52be55af369d193402" +dependencies = [ + "ppv-lite86", + "rand_core 0.5.1", ] [[package]] @@ -3195,7 +3479,16 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" dependencies = [ "ppv-lite86", - "rand_core", + "rand_core 0.6.4", +] + +[[package]] +name = "rand_core" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "90bde5296fc891b0cef12a6d03ddccc162ce7b2aff54160af9338f8d40df6d19" +dependencies = [ + "getrandom 0.1.16", ] [[package]] @@ -3204,7 +3497,16 @@ version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" dependencies = [ - "getrandom", + "getrandom 0.2.10", +] + +[[package]] +name = "rand_hc" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ca3129af7b92a17112d59ad498c6f81eaf463253766b90396d39ea7a39d6613c" +dependencies = [ + "rand_core 0.5.1", ] [[package]] @@ -3213,7 +3515,7 @@ version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d25bf25ec5ae4a3f1b92f929810509a2f53d7dca2f50b794ff57e3face536c8f" dependencies = [ - "rand_core", + "rand_core 0.6.4", ] [[package]] @@ -3222,7 +3524,7 @@ version = "0.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6f97cdb2a36ed4183de61b2f824cc45c9f1037f28afe0a322e9fff4c108b5aaa" dependencies = [ - "rand_core", + "rand_core 0.6.4", ] [[package]] @@ -3269,7 +3571,7 @@ version = "0.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b033d837a7cf162d7993aded9304e30a83213c648b6e389db233191f891e5c2b" dependencies = [ - "getrandom", + "getrandom 0.2.10", "redox_syscall 0.2.16", "thiserror", ] @@ -3357,7 +3659,7 @@ version = "0.11.20" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3e9ad3fe7488d7e34558a2033d45a0c90b72d97b4f80705666fea71472e2e6a1" dependencies = [ - "base64", + "base64 0.21.2", "bytes", "encoding_rs", "futures-core", @@ -3372,7 +3674,7 @@ dependencies = [ "log", "mime", "once_cell", - "percent-encoding", + "percent-encoding 2.3.0", "pin-project-lite", "rustls", "rustls-pemfile", @@ -3382,7 +3684,7 @@ dependencies = [ "tokio", "tokio-rustls", "tower-service", - "url", + "url 2.4.0", "wasm-bindgen", "wasm-bindgen-futures", "web-sys", @@ -3542,7 +3844,7 @@ version = "1.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2d3987094b1d07b653b7dfdc3f70ce9a1da9c51ac18c1b06b662e4f9a0e9f4b2" dependencies = [ - "base64", + "base64 0.21.2", ] [[package]] @@ -3580,7 +3882,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "db7826789c0e25614b03e5a54a0717a86f9ff6e6e5247f92b369472869320039" dependencies = [ "bitflags 1.3.2", - "cfg-if", + "cfg-if 1.0.0", "clipboard-win", "dirs-next", "fd-lock", @@ -3837,7 +4139,7 @@ version = "3.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1402f54f9a3b9e2efe71c1cea24e648acce55887983553eeb858cf3115acfd49" dependencies = [ - "base64", + "base64 0.21.2", "chrono", "hex", "indexmap 1.9.3", @@ -3860,13 +4162,38 @@ dependencies = [ "syn 2.0.26", ] +[[package]] +name = "serial_test" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0e56dd856803e253c8f298af3f4d7eb0ae5e23a737252cd90bb4f3b435033b2d" +dependencies = [ + "dashmap", + "futures 0.3.28", + "lazy_static", + "log", + "parking_lot 0.12.1", + "serial_test_derive", +] + +[[package]] +name = "serial_test_derive" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "91d129178576168c589c9ec973feedf7d3126c01ac2bf08795109aa35b69fb8f" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.26", +] + [[package]] name = "sha2" version = "0.10.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "479fb9d862239e610720565ca91403019f2f00410f1864c5aa7479b950a76ed8" dependencies = [ - "cfg-if", + "cfg-if 1.0.0", "cpufeatures", "digest", ] @@ -3922,7 +4249,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "74233d3b3b2f6d4b006dc19dee745e73e2a6bfb6f93607cd3b02bd5b00797d7c" dependencies = [ "digest", - "rand_core", + "rand_core 0.6.4", ] [[package]] @@ -4044,12 +4371,6 @@ version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3" -[[package]] -name = "static_assertions" -version = "1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f" - [[package]] name = "str-buf" version = "1.0.6" @@ -4167,7 +4488,7 @@ version = "3.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cb94d2f3cc536af71caac6b6fcebf65860b347e7ce0cc9ebe8f70d3e521054ef" dependencies = [ - "cfg-if", + "cfg-if 1.0.0", "fastrand", "redox_syscall 0.3.5", "rustix", @@ -4260,7 +4581,7 @@ version = "1.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3fdd6f064ccff2d6567adcb3873ca630700f00b5ad3f060c25b5dcfd9a4ce152" dependencies = [ - "cfg-if", + "cfg-if 1.0.0", "once_cell", ] @@ -4356,6 +4677,31 @@ dependencies = [ "tokio", ] +[[package]] +name = "tokio-stream" +version = "0.1.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "397c988d37662c7dda6d2208364a706264bf3d6138b11d436cbac0ad38832842" +dependencies = [ + "futures-core", + "pin-project-lite", + "tokio", +] + +[[package]] +name = "tokio-util" +version = "0.6.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "36943ee01a6d67977dd3f84a5a1d2efeb4ada3a1ae771cadfaa535d9d9fc6507" +dependencies = [ + "bytes", + "futures-core", + "futures-sink", + "log", + "pin-project-lite", + "tokio", +] + [[package]] name = "tokio-util" version = "0.7.8" @@ -4371,6 +4717,15 @@ dependencies = [ "tracing", ] +[[package]] +name = "toml" +version = "0.5.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f4f7f0dd8d50a853a531c426359045b1998f04219d88799810762cd4ad314234" +dependencies = [ + "serde", +] + [[package]] name = "toml" version = "0.7.6" @@ -4552,6 +4907,15 @@ version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "eaea85b334db583fe3274d12b4cd1880032beab409c0d774be044d4480ab9a94" +[[package]] +name = "unicase" +version = "2.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f7d2d4dafb69621809a81864c9c1b864479e1235c0dd4e199924b9742439ed89" +dependencies = [ + "version_check", +] + [[package]] name = "unicode-bidi" version = "0.3.13" @@ -4603,6 +4967,17 @@ version = "0.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a156c684c91ea7d62626509bce3cb4e1d9ed5c4d978f7b4352658f96a4c26b4a" +[[package]] +name = "url" +version = "1.7.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dd4e7c0d531266369519a4aa4f399d748bd37043b00bde1e4ff1f60a120b355a" +dependencies = [ + "idna 0.1.5", + "matches", + "percent-encoding 1.0.1", +] + [[package]] name = "url" version = "2.4.0" @@ -4610,8 +4985,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "50bff7831e19200a85b17131d085c25d7811bc4e186efdaf54bbd132994a88cb" dependencies = [ "form_urlencoded", - "idna", - "percent-encoding", + "idna 0.4.0", + "percent-encoding 2.3.0", "serde", ] @@ -4677,6 +5052,12 @@ dependencies = [ "try-lock", ] +[[package]] +name = "wasi" +version = "0.9.0+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cccddf32554fecc6acb585f82a32a72e28b48f8c4c1883ddfeeeaa96f7d8e519" + [[package]] name = "wasi" version = "0.11.0+wasi-snapshot-preview1" @@ -4689,7 +5070,7 @@ version = "0.2.86" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5bba0e8cb82ba49ff4e229459ff22a191bbe9a1cb3a341610c9c33efc27ddf73" dependencies = [ - "cfg-if", + "cfg-if 1.0.0", "serde", "serde_json", "wasm-bindgen-macro", @@ -4710,36 +5091,13 @@ dependencies = [ "wasm-bindgen-shared", ] -[[package]] -name = "wasm-bindgen-downcast" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5dac026d43bcca6e7ce1c0956ba68f59edf6403e8e930a5d891be72c31a44340" -dependencies = [ - "js-sys", - "once_cell", - "wasm-bindgen", - "wasm-bindgen-downcast-macros", -] - -[[package]] -name = "wasm-bindgen-downcast-macros" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c5020cfa87c7cecefef118055d44e3c1fc122c7ec25701d528ee458a0b45f38f" -dependencies = [ - "proc-macro2", - "quote", - "syn 1.0.109", -] - [[package]] name = "wasm-bindgen-futures" version = "0.4.36" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2d1985d03709c53167ce907ff394f5316aa22cb4e12761295c5dc57dacb6297e" dependencies = [ - "cfg-if", + "cfg-if 1.0.0", "js-sys", "wasm-bindgen", "web-sys", @@ -4809,12 +5167,12 @@ dependencies = [ [[package]] name = "wasmer" -version = "4.2.3" +version = "4.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "50cb1ae2956aac1fbbcf334c543c1143cdf7d5b0a5fb6c3d23a17bf37dd1f47b" +checksum = "ce45cc009177ca345a6d041f9062305ad467d15e7d41494f5b81ab46d62d7a58" dependencies = [ "bytes", - "cfg-if", + "cfg-if 1.0.0", "derivative", "indexmap 1.9.3", "js-sys", @@ -4826,7 +5184,6 @@ dependencies = [ "target-lexicon", "thiserror", "wasm-bindgen", - "wasm-bindgen-downcast", "wasmer-compiler", "wasmer-compiler-cranelift", "wasmer-derive", @@ -4840,13 +5197,13 @@ dependencies = [ [[package]] name = "wasmer-compiler" -version = "4.2.3" +version = "4.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "12fd9aeef339095798d1e04957d5657d97490b1112f145cbf08b98f6393b4a0a" +checksum = "e044f6140c844602b920deb4526aea3cc9c0d7cf23f00730bb9b2034669f522a" dependencies = [ "backtrace", "bytes", - "cfg-if", + "cfg-if 1.0.0", "enum-iterator", "enumset", "lazy_static", @@ -4867,9 +5224,9 @@ dependencies = [ [[package]] name = "wasmer-compiler-cranelift" -version = "4.2.3" +version = "4.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "344f5f1186c122756232fe7f156cc8d2e7bf333d5a658e81e25efa3415c26d07" +checksum = "32ce02358eb44a149d791c1d6648fb7f8b2f99cd55e3c4eef0474653ec8cc889" dependencies = [ "cranelift-codegen", "cranelift-entity", @@ -4886,9 +5243,9 @@ dependencies = [ [[package]] name = "wasmer-derive" -version = "4.2.3" +version = "4.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2ac8c1f2dc0ed3c7412a5546e468365184a461f8ce7dfe2a707b621724339f91" +checksum = "c782d80401edb08e1eba206733f7859db6c997fc5a7f5fb44edc3ecd801468f6" dependencies = [ "proc-macro-error", "proc-macro2", @@ -4898,9 +5255,9 @@ dependencies = [ [[package]] name = "wasmer-types" -version = "4.2.3" +version = "4.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a57ecbf218c0a9348d4dfbdac0f9d42d9201ae276dffb13e61ea4ff939ecce7" +checksum = "fd09e80d4d74bb9fd0ce6c3c106b1ceba1a050f9948db9d9b78ae53c172d6157" dependencies = [ "bytecheck", "enum-iterator", @@ -4914,13 +5271,13 @@ dependencies = [ [[package]] name = "wasmer-vm" -version = "4.2.3" +version = "4.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "60c3513477bc0097250f6e34a640e2a903bb0ee57e6bb518c427f72c06ac7728" +checksum = "bdcd8a4fd36414a7b6a003dbfbd32393bce3e155d715dd877c05c1b7a41d224d" dependencies = [ "backtrace", "cc", - "cfg-if", + "cfg-if 1.0.0", "corosensei", "crossbeam-queue", "dashmap", @@ -4931,7 +5288,7 @@ dependencies = [ "lazy_static", "libc", "mach", - "memoffset 0.8.0", + "memoffset 0.9.0", "more-asserts", "region", "scopeguard", @@ -4953,7 +5310,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f2ea896273ea99b15132414be1da01ab0d8836415083298ecaffbe308eaac87a" dependencies = [ "indexmap 1.9.3", - "url", + "url 2.4.0", ] [[package]] @@ -5223,7 +5580,7 @@ version = "0.50.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "524e57b2c537c0f9b1e69f1965311ec12182b4122e45035b1508cd24d2adadb1" dependencies = [ - "cfg-if", + "cfg-if 1.0.0", "windows-sys 0.48.0", ] diff --git a/Cargo.toml b/Cargo.toml index 7dda111c1a5..5469a63f3dd 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -122,6 +122,7 @@ num-bigint = "0.4" num-traits = "0.2" similar-asserts = "1.5.0" tempfile = "3.6.0" +jsonrpc = { version = "0.16.0", features = ["minreq_http"] } tracing = "0.1.40" tracing-web = "0.1.3" diff --git a/README.md b/README.md index 2fc47f16fef..038ce5691cd 100644 --- a/README.md +++ b/README.md @@ -6,7 +6,7 @@ Noir is a Domain Specific Language for SNARK proving systems. It has been design ## Quick Start -Read the installation section [here](https://noir-lang.org/getting_started/nargo_installation). +Read the installation section [here](https://noir-lang.org/docs/dev/getting_started/installation/). Once you have read through the documentation, you can visit [Awesome Noir](https://github.com/noir-lang/awesome-noir) to run some of the examples that others have created. @@ -58,7 +58,7 @@ This crate's minimum supported rustc version is 1.71.1. ## Working on this project -This project uses [Nix](https://nixos.org/) and [direnv](https://direnv.net/) to streamline the development experience. Please follow [our guidelines](https://noir-lang.org/getting_started/nargo_installation/#option-3-compile-from-source) to setup your environment for working on the project. +This project uses [Nix](https://nixos.org/) and [direnv](https://direnv.net/) to streamline the development experience. Please follow [our guidelines](https://noir-lang.org/docs/dev/getting_started/installation/other_install_methods#option-3-compile-from-source) to setup your environment for working on the project. ### Building against a different local/remote version of Barretenberg diff --git a/acvm-repo/acir/acir_docs.md b/acvm-repo/acir/acir_docs.md index 1215ea22330..a124bfbc37f 100644 --- a/acvm-repo/acir/acir_docs.md +++ b/acvm-repo/acir/acir_docs.md @@ -6,14 +6,14 @@ This document describes the purpose of ACIR, what it is and how ACIR programs ca ## Introduction The purpose of ACIR is to make the link between a generic proving system, such as Aztec's Barretenberg, and a frontend, such as Noir, which describes user-specific computations. -More precisely, Noir is a programming language for zero-knowledge proofs (ZKP) which allows users to write programs in an intuitive way using a high-level language close to Rust syntax. Noir is able to generate a proof of execution of a Noir program, using an external proving system. However, proving systems uses specific low-level constrain-based languages. Similarly, frontends have their own internal representation in order to represent user programs. +More precisely, Noir is a programming language for zero-knowledge proofs (ZKP) which allows users to write programs in an intuitive way using a high-level language close to Rust syntax. Noir is able to generate a proof of execution of a Noir program, using an external proving system. However, proving systems use specific low-level constrain-based languages. Similarly, frontends have their own internal representation in order to represent user programs. The goal of ACIR is to provide a generic open-source intermediate representation close to proving system 'languages', but agnostic to a specific proving system, that can be used both by proving system as well as a target for frontends. So, at the end of the day, an ACIR program is just another representation of a program, dedicated to proving systems. ## Abstract Circuit Intermediate Representation ACIR stands for abstract circuit intermediate representation: - **abstract circuit**: circuits are a simple computation model where basic computation units, named gates, are connected with wires. Data flows through the wires while gates compute output wires based on their input. More formally, they are directed acyclic graphs (DAG) where the vertices are the gates and the edges are the wires. Due to the immutability nature of the wires (their value does not change during an execution), they are well suited for describing computations for ZKPs. Furthermore, we do not lose any expressiveness when using a circuit as it is well known that any bounded computation can be translated into an arithmetic circuit (i.e a circuit with only addition and multiplication gates). -The term abstract here simply mean that we do not refer to an actual physical circuit (such as an electronic circuit). Furthermore, we will not exactly use the circuit model, but another model even better suited to ZKPs, the constraint model (see below). +The term abstract here simply means that we do not refer to an actual physical circuit (such as an electronic circuit). Furthermore, we will not exactly use the circuit model, but another model even better suited to ZKPs, the constraint model (see below). - **intermediate representation**: The ACIR representation is intermediate because it lies between a frontend and its proving system. ACIR bytecode makes the link between noir compiler output and the proving system backend input. ## The constraint model @@ -32,13 +32,13 @@ For instance, if input_wire_1 and input_wire_2 values are supplied as 3 and 8, t In summary, the workflow is the following: 1. user program -> (compilation) ACIR, a list of opcodes which constrain (partial) witnesses 2. user inputs + ACIR -> (execution/solving) assign values to all the (partial) witnesses -3. witness assignement + ACIR -> (proving system) proof +3. witness assignment + ACIR -> (proving system) proof Although the ordering of opcode does not matter in theory, since a system of equations is not dependent on its ordering, in practice it matters a lot for the solving (i.e the performance of the execution). ACIR opcodes **must be ordered** so that each opcode can be resolved one after the other. -The values of the witnesses lie in the scalar field of the proving system. We will refer to it as FieldElement or ACIR field. The proving system need the values of all the partial witnesses and all the constraints in order to generate a proof. +The values of the witnesses lie in the scalar field of the proving system. We will refer to it as FieldElement or ACIR field. The proving system needs the values of all the partial witnesses and all the constraints in order to generate a proof. *Remark*: The value of a partial witness is unique and fixed throughout a program execution, although in some rare cases, multiple values are possible for a same execution and witness (when there are several valid solutions to the constraints). Having multiple possible values for a witness may indicate that the circuit is not safe. @@ -51,7 +51,7 @@ We assume here that the proving system is Barretenberg. Some parameters may slig Some opcodes have inputs and outputs, which means that the output is constrained to be the result of the opcode computation from the inputs. The solver expects that all inputs are known when solving such opcodes. -Some opcodes are not constrained, which mean they will not be used by the proving system and are only used by the solver. +Some opcodes are not constrained, which means they will not be used by the proving system and are only used by the solver. Finally, some opcodes will have a predicate, whose value is 0 or 1. Its purpose is to nullify the opcode when the value is 0, so that it has no effect. Note that removing the opcode is not a solution because this modifies the circuit (the circuit being mainly the list of the opcodes). @@ -71,7 +71,7 @@ The solver expects that at most one witness is not known when executing the opco ### BlackBoxFuncCall opcode These opcodes represent a specific computation. Even if any computation can be done using only assert-zero opcodes, it is not always efficient. Some proving systems, and in particular the proving system from Aztec, can implement several computations more efficiently using for instance look-up tables. The BlackBoxFuncCall opcode is used to ask the proving system to handle the computation by itself. -All black box functions takes as input a tuple (witness, num_bits), where num_bits is a constant representing the bit size of the input witness, and they have one or several witnesses as output. +All black box functions take as input a tuple (witness, num_bits), where num_bits is a constant representing the bit size of the input witness, and they have one or several witnesses as output. Some more advanced computations assume that the proving system has an 'embedded curve'. It is a curve that cycle with the main curve of the proving system, i.e the scalar field of the embedded curve is the base field of the main one, and vice-versa. The curves used by the proving system are dependent on the proving system (and/or its configuration). Aztec's Barretenberg uses BN254 as the main curve and Grumpkin as the embedded curve. The black box functions supported by ACIR are: diff --git a/acvm-repo/acir_field/src/generic_ark.rs b/acvm-repo/acir_field/src/generic_ark.rs index 5c70d3cda37..542e291982b 100644 --- a/acvm-repo/acir_field/src/generic_ark.rs +++ b/acvm-repo/acir_field/src/generic_ark.rs @@ -2,6 +2,7 @@ use ark_ff::PrimeField; use ark_ff::Zero; use num_bigint::BigUint; use serde::{Deserialize, Serialize}; +use std::borrow::Cow; // XXX: Switch out for a trait and proper implementations // This implementation is in-efficient, can definitely remove hex usage and Iterator instances for trivial functionality @@ -125,8 +126,8 @@ impl<'de, T: ark_ff::PrimeField> Deserialize<'de> for FieldElement { where D: serde::Deserializer<'de>, { - let s = <&str>::deserialize(deserializer)?; - match Self::from_hex(s) { + let s: Cow<'de, str> = Deserialize::deserialize(deserializer)?; + match Self::from_hex(&s) { Some(value) => Ok(value), None => Err(serde::de::Error::custom(format!("Invalid hex for FieldElement: {s}",))), } diff --git a/acvm-repo/acvm_js/Cargo.toml b/acvm-repo/acvm_js/Cargo.toml index 987006f8e2b..e8d46b9717e 100644 --- a/acvm-repo/acvm_js/Cargo.toml +++ b/acvm-repo/acvm_js/Cargo.toml @@ -26,7 +26,6 @@ wasm-bindgen-futures.workspace = true console_error_panic_hook.workspace = true gloo-utils.workspace = true js-sys.workspace = true -tracing.workspace = true tracing-subscriber.workspace = true tracing-web.workspace = true diff --git a/acvm-repo/acvm_js/package.json b/acvm-repo/acvm_js/package.json index 9f265dd676a..2d878e961da 100644 --- a/acvm-repo/acvm_js/package.json +++ b/acvm-repo/acvm_js/package.json @@ -1,16 +1,21 @@ { "name": "@noir-lang/acvm_js", "version": "0.38.0", - "repository": { - "type": "git", - "url": "https://github.com/noir-lang/acvm.git" - }, "publishConfig": { "access": "public" }, - "collaborators": [ + "contributors": [ "The Noir Team " ], + "homepage": "https://noir-lang.org/", + "repository": { + "url": "https://github.com/noir-lang/noir.git", + "directory": "acvm_repo/acvm_js", + "type": "git" + }, + "bugs": { + "url": "https://github.com/noir-lang/noir/issues" + }, "license": "MIT", "main": "./nodejs/acvm_js.js", "types": "./web/acvm_js.d.ts", diff --git a/compiler/noirc_evaluator/src/errors.rs b/compiler/noirc_evaluator/src/errors.rs index 42818e8b19d..33ecc794f76 100644 --- a/compiler/noirc_evaluator/src/errors.rs +++ b/compiler/noirc_evaluator/src/errors.rs @@ -106,7 +106,7 @@ pub enum InternalError { #[error("ICE: Undeclared AcirVar")] UndeclaredAcirVar { call_stack: CallStack }, #[error("ICE: Expected {expected:?}, found {found:?}")] - UnExpected { expected: String, found: String, call_stack: CallStack }, + Unexpected { expected: String, found: String, call_stack: CallStack }, } impl RuntimeError { @@ -119,7 +119,7 @@ impl RuntimeError { | InternalError::MissingArg { call_stack, .. } | InternalError::NotAConstant { call_stack, .. } | InternalError::UndeclaredAcirVar { call_stack } - | InternalError::UnExpected { call_stack, .. }, + | InternalError::Unexpected { call_stack, .. }, ) | RuntimeError::FailedConstraint { call_stack, .. } | RuntimeError::IndexOutOfBounds { call_stack, .. } diff --git a/compiler/noirc_evaluator/src/ssa/acir_gen/acir_ir/acir_variable.rs b/compiler/noirc_evaluator/src/ssa/acir_gen/acir_ir/acir_variable.rs index 05a22ab6aad..3bc99b4d054 100644 --- a/compiler/noirc_evaluator/src/ssa/acir_gen/acir_ir/acir_variable.rs +++ b/compiler/noirc_evaluator/src/ssa/acir_gen/acir_ir/acir_variable.rs @@ -368,9 +368,34 @@ impl AcirContext { rhs: AcirVar, typ: AcirType, ) -> Result { - let inputs = vec![AcirValue::Var(lhs, typ.clone()), AcirValue::Var(rhs, typ)]; - let outputs = self.black_box_function(BlackBoxFunc::XOR, inputs, 1)?; - Ok(outputs[0]) + let lhs_expr = self.var_to_expression(lhs)?; + let rhs_expr = self.var_to_expression(rhs)?; + + if lhs_expr == rhs_expr { + // x ^ x == 0 + let zero = self.add_constant(FieldElement::zero()); + return Ok(zero); + } else if lhs_expr.is_zero() { + // 0 ^ x == x + return Ok(rhs); + } else if rhs_expr.is_zero() { + // x ^ 0 == x + return Ok(lhs); + } + + let bit_size = typ.bit_size(); + if bit_size == 1 { + // Operands are booleans. + // + // a ^ b == a + b - 2*a*b + let sum = self.add_var(lhs, rhs)?; + let prod = self.mul_var(lhs, rhs)?; + self.add_mul_var(sum, -FieldElement::from(2_i128), prod) + } else { + let inputs = vec![AcirValue::Var(lhs, typ.clone()), AcirValue::Var(rhs, typ)]; + let outputs = self.black_box_function(BlackBoxFunc::XOR, inputs, 1)?; + Ok(outputs[0]) + } } /// Returns an `AcirVar` that is the AND result of `lhs` & `rhs`. @@ -380,6 +405,18 @@ impl AcirContext { rhs: AcirVar, typ: AcirType, ) -> Result { + let lhs_expr = self.var_to_expression(lhs)?; + let rhs_expr = self.var_to_expression(rhs)?; + + if lhs_expr == rhs_expr { + // x & x == x + return Ok(lhs); + } else if lhs_expr.is_zero() || rhs_expr.is_zero() { + // x & 0 == 0 and 0 & x == 0 + let zero = self.add_constant(FieldElement::zero()); + return Ok(zero); + } + let bit_size = typ.bit_size(); if bit_size == 1 { // Operands are booleans. @@ -398,6 +435,16 @@ impl AcirContext { rhs: AcirVar, typ: AcirType, ) -> Result { + let lhs_expr = self.var_to_expression(lhs)?; + let rhs_expr = self.var_to_expression(rhs)?; + if lhs_expr.is_zero() { + // 0 | x == x + return Ok(rhs); + } else if rhs_expr.is_zero() { + // x | 0 == x + return Ok(lhs); + } + let bit_size = typ.bit_size(); if bit_size == 1 { // Operands are booleans @@ -407,15 +454,11 @@ impl AcirContext { self.sub_var(sum, mul) } else { // Implement OR in terms of AND - // max - ((max - a) AND (max -b)) - // Subtracting from max flips the bits, so this is effectively: - // (NOT a) NAND (NOT b) - let max = self.add_constant((1_u128 << bit_size) - 1); - let a = self.sub_var(max, lhs)?; - let b = self.sub_var(max, rhs)?; - let inputs = vec![AcirValue::Var(a, typ.clone()), AcirValue::Var(b, typ)]; - let outputs = self.black_box_function(BlackBoxFunc::AND, inputs, 1)?; - self.sub_var(max, outputs[0]) + // (NOT a) NAND (NOT b) => a OR b + let a = self.not_var(lhs, typ.clone())?; + let b = self.not_var(rhs, typ.clone())?; + let a_and_b = self.and_var(a, b, typ.clone())?; + self.not_var(a_and_b, typ) } } @@ -487,8 +530,19 @@ impl AcirContext { let lhs_data = self.vars[&lhs].clone(); let rhs_data = self.vars[&rhs].clone(); let result = match (lhs_data, rhs_data) { + // (x * 1) == (1 * x) == x + (AcirVarData::Const(constant), _) if constant.is_one() => rhs, + (_, AcirVarData::Const(constant)) if constant.is_one() => lhs, + + // (x * 0) == (0 * x) == 0 + (AcirVarData::Const(constant), _) | (_, AcirVarData::Const(constant)) + if constant.is_zero() => + { + self.add_constant(FieldElement::zero()) + } + (AcirVarData::Const(lhs_constant), AcirVarData::Const(rhs_constant)) => { - self.add_data(AcirVarData::Const(lhs_constant * rhs_constant)) + self.add_constant(lhs_constant * rhs_constant) } (AcirVarData::Witness(witness), AcirVarData::Const(constant)) | (AcirVarData::Const(constant), AcirVarData::Witness(witness)) => { @@ -852,9 +906,7 @@ impl AcirContext { // Unsigned to signed: derive q and r from q1,r1 and the signs of lhs and rhs // Quotient sign is lhs sign * rhs sign, whose resulting sign bit is the XOR of the sign bits - let sign_sum = self.add_var(lhs_leading, rhs_leading)?; - let sign_prod = self.mul_var(lhs_leading, rhs_leading)?; - let q_sign = self.add_mul_var(sign_sum, -FieldElement::from(2_i128), sign_prod)?; + let q_sign = self.xor_var(lhs_leading, rhs_leading, AcirType::unsigned(1))?; let quotient = self.two_complement(q1, q_sign, bit_size)?; let remainder = self.two_complement(r1, lhs_leading, bit_size)?; @@ -947,23 +999,25 @@ impl AcirContext { lhs: AcirVar, rhs: AcirVar, bit_count: u32, - predicate: AcirVar, ) -> Result { let pow_last = self.add_constant(FieldElement::from(1_u128 << (bit_count - 1))); let pow = self.add_constant(FieldElement::from(1_u128 << (bit_count))); // We check whether the inputs have same sign or not by computing the XOR of their bit sign + + // Predicate is always active as `pow_last` is known to be non-zero. + let one = self.add_constant(1_u128); let lhs_sign = self.div_var( lhs, pow_last, AcirType::NumericType(NumericType::Unsigned { bit_size: bit_count }), - predicate, + one, )?; let rhs_sign = self.div_var( rhs, pow_last, AcirType::NumericType(NumericType::Unsigned { bit_size: bit_count }), - predicate, + one, )?; let same_sign = self.xor_var( lhs_sign, @@ -976,7 +1030,7 @@ impl AcirContext { let diff = self.sub_var(no_underflow, rhs)?; // We check the 'bit sign' of the difference - let diff_sign = self.less_than_var(diff, pow, bit_count + 1, predicate)?; + let diff_sign = self.less_than_var(diff, pow, bit_count + 1)?; // Then the result is simply diff_sign XOR same_sign (can be checked with a truth table) self.xor_var( @@ -993,7 +1047,6 @@ impl AcirContext { lhs: AcirVar, rhs: AcirVar, max_bits: u32, - predicate: AcirVar, ) -> Result { // Returns a `Witness` that is constrained to be: // - `1` if lhs >= rhs @@ -1018,6 +1071,7 @@ impl AcirContext { // // TODO: perhaps this should be a user error, instead of an assert assert!(max_bits + 1 < FieldElement::max_num_bits()); + let two_max_bits = self .add_constant(FieldElement::from(2_i128).pow(&FieldElement::from(max_bits as i128))); let diff = self.sub_var(lhs, rhs)?; @@ -1047,13 +1101,11 @@ impl AcirContext { // let k = b - a // - 2^{max_bits} - k == q * 2^{max_bits} + r // - This is only the case when q == 0 and r == 2^{max_bits} - k - // - let (q, _) = self.euclidean_division_var( - comparison_evaluation, - two_max_bits, - max_bits + 1, - predicate, - )?; + + // Predicate is always active as we know `two_max_bits` is always non-zero. + let one = self.add_constant(1_u128); + let (q, _) = + self.euclidean_division_var(comparison_evaluation, two_max_bits, max_bits + 1, one)?; Ok(q) } @@ -1064,11 +1116,10 @@ impl AcirContext { lhs: AcirVar, rhs: AcirVar, bit_size: u32, - predicate: AcirVar, ) -> Result { // Flip the result of calling more than equal method to // compute less than. - let comparison = self.more_than_eq_var(lhs, rhs, bit_size, predicate)?; + let comparison = self.more_than_eq_var(lhs, rhs, bit_size)?; let one = self.add_constant(FieldElement::one()); self.sub_var(one, comparison) // comparison_negated @@ -1464,7 +1515,7 @@ impl AcirContext { bit_size: u32, predicate: AcirVar, ) -> Result<(), RuntimeError> { - let lhs_less_than_rhs = self.more_than_eq_var(rhs, lhs, bit_size, predicate)?; + let lhs_less_than_rhs = self.more_than_eq_var(rhs, lhs, bit_size)?; self.maybe_eq_predicate(lhs_less_than_rhs, predicate) } diff --git a/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs b/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs index 6a48dadaf30..33f00796c9d 100644 --- a/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs +++ b/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs @@ -302,7 +302,7 @@ impl Context { let len = if matches!(typ, Type::Array(_, _)) { typ.flattened_size() } else { - return Err(InternalError::UnExpected { + return Err(InternalError::Unexpected { expected: "Block params should be an array".to_owned(), found: format!("Instead got {:?}", typ), call_stack: self.acir_context.get_call_stack(), @@ -640,7 +640,7 @@ impl Context { Instruction::ArrayGet { array, index } => (array, index, None), Instruction::ArraySet { array, index, value, .. } => (array, index, Some(value)), _ => { - return Err(InternalError::UnExpected { + return Err(InternalError::Unexpected { expected: "Instruction should be an ArrayGet or ArraySet".to_owned(), found: format!("Instead got {:?}", dfg[instruction]), call_stack: self.acir_context.get_call_stack(), @@ -697,7 +697,7 @@ impl Context { match self.convert_value(array, dfg) { AcirValue::Var(acir_var, _) => { - return Err(RuntimeError::InternalError(InternalError::UnExpected { + return Err(RuntimeError::InternalError(InternalError::Unexpected { expected: "an array value".to_string(), found: format!("{acir_var:?}"), call_stack: self.acir_context.get_call_stack(), @@ -788,7 +788,7 @@ impl Context { let slice_sizes = if store_type.contains_slice_element() { self.compute_slice_sizes(store, None, dfg); self.slice_sizes.get(&store).cloned().ok_or_else(|| { - InternalError::UnExpected { + InternalError::Unexpected { expected: "Store value should have slice sizes computed".to_owned(), found: "Missing key in slice sizes map".to_owned(), call_stack: self.acir_context.get_call_stack(), @@ -1013,7 +1013,7 @@ impl Context { let array = match dfg[instruction] { Instruction::ArraySet { array, .. } => array, _ => { - return Err(InternalError::UnExpected { + return Err(InternalError::Unexpected { expected: "Instruction should be an ArraySet".to_owned(), found: format!("Instead got {:?}", dfg[instruction]), call_stack: self.acir_context.get_call_stack(), @@ -1235,7 +1235,7 @@ impl Context { } } _ => { - return Err(InternalError::UnExpected { + return Err(InternalError::Unexpected { expected: "AcirValue::DynamicArray or AcirValue::Array" .to_owned(), found: format!("{:?}", array_acir_value), @@ -1246,7 +1246,7 @@ impl Context { } } _ => { - return Err(InternalError::UnExpected { + return Err(InternalError::Unexpected { expected: "array or instruction".to_owned(), found: format!("{:?}", &dfg[array_id]), call_stack: self.acir_context.get_call_stack(), @@ -1256,7 +1256,7 @@ impl Context { }; } _ => { - return Err(InternalError::UnExpected { + return Err(InternalError::Unexpected { expected: "array or slice".to_owned(), found: array_typ.to_string(), call_stack: self.acir_context.get_call_stack(), @@ -1513,12 +1513,12 @@ impl Context { ) -> Result { match self.convert_value(value_id, dfg) { AcirValue::Var(acir_var, _) => Ok(acir_var), - AcirValue::Array(array) => Err(InternalError::UnExpected { + AcirValue::Array(array) => Err(InternalError::Unexpected { expected: "a numeric value".to_string(), found: format!("{array:?}"), call_stack: self.acir_context.get_call_stack(), }), - AcirValue::DynamicArray(_) => Err(InternalError::UnExpected { + AcirValue::DynamicArray(_) => Err(InternalError::Unexpected { expected: "a numeric value".to_string(), found: "an array".to_string(), call_stack: self.acir_context.get_call_stack(), @@ -1571,15 +1571,10 @@ impl Context { // this Eq instruction is being used for a constrain statement BinaryOp::Eq => self.acir_context.eq_var(lhs, rhs), BinaryOp::Lt => match binary_type { - AcirType::NumericType(NumericType::Signed { .. }) => self - .acir_context - .less_than_signed(lhs, rhs, bit_count, self.current_side_effects_enabled_var), - _ => self.acir_context.less_than_var( - lhs, - rhs, - bit_count, - self.current_side_effects_enabled_var, - ), + AcirType::NumericType(NumericType::Signed { .. }) => { + self.acir_context.less_than_signed(lhs, rhs, bit_count) + } + _ => self.acir_context.less_than_var(lhs, rhs, bit_count), }, BinaryOp::Xor => self.acir_context.xor_var(lhs, rhs, binary_type), BinaryOp::And => self.acir_context.and_var(lhs, rhs, binary_type), @@ -2141,19 +2136,11 @@ impl Context { let current_index = self.acir_context.add_constant(i); // Check that we are above the lower bound of the insertion index - let greater_eq_than_idx = self.acir_context.more_than_eq_var( - current_index, - flat_user_index, - 64, - self.current_side_effects_enabled_var, - )?; + let greater_eq_than_idx = + self.acir_context.more_than_eq_var(current_index, flat_user_index, 64)?; // Check that we are below the upper bound of the insertion index - let less_than_idx = self.acir_context.less_than_var( - current_index, - max_flat_user_index, - 64, - self.current_side_effects_enabled_var, - )?; + let less_than_idx = + self.acir_context.less_than_var(current_index, max_flat_user_index, 64)?; // Read from the original slice the value we want to insert into our new slice. // We need to make sure that we read the previous element when our current index is greater than insertion index. @@ -2328,7 +2315,6 @@ impl Context { current_index, flat_user_index, 64, - self.current_side_effects_enabled_var, )?; let shifted_value_pred = diff --git a/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg.rs b/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg.rs index 0e7bfff7b6b..fdd7c66684c 100644 --- a/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg.rs +++ b/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg.rs @@ -641,8 +641,25 @@ impl<'f> Context<'f> { match instruction { Instruction::Constrain(lhs, rhs, message) => { // Replace constraint `lhs == rhs` with `condition * lhs == condition * rhs`. - let lhs = self.handle_constrain_arg_side_effects(lhs, condition, &call_stack); - let rhs = self.handle_constrain_arg_side_effects(rhs, condition, &call_stack); + + // Condition needs to be cast to argument type in order to multiply them together. + let argument_type = self.inserter.function.dfg.type_of_value(lhs); + // Sanity check that we're not constraining non-primitive types + assert!(matches!(argument_type, Type::Numeric(_))); + + let casted_condition = self.insert_instruction( + Instruction::Cast(condition, argument_type), + call_stack.clone(), + ); + + let lhs = self.insert_instruction( + Instruction::binary(BinaryOp::Mul, lhs, casted_condition), + call_stack.clone(), + ); + let rhs = self.insert_instruction( + Instruction::binary(BinaryOp::Mul, rhs, casted_condition), + call_stack, + ); Instruction::Constrain(lhs, rhs, message) } @@ -673,90 +690,6 @@ impl<'f> Context<'f> { } } - /// Given the arguments of a constrain instruction, multiplying them by the branch's condition - /// requires special handling in the case of complex types. - fn handle_constrain_arg_side_effects( - &mut self, - argument: ValueId, - condition: ValueId, - call_stack: &CallStack, - ) -> ValueId { - let argument_type = self.inserter.function.dfg.type_of_value(argument); - - match &argument_type { - Type::Numeric(_) => { - // Condition needs to be cast to argument type in order to multiply them together. - let casted_condition = self.insert_instruction( - Instruction::Cast(condition, argument_type), - call_stack.clone(), - ); - - self.insert_instruction( - Instruction::binary(BinaryOp::Mul, argument, casted_condition), - call_stack.clone(), - ) - } - Type::Array(_, _) => { - self.handle_array_constrain_arg(argument_type, argument, condition, call_stack) - } - Type::Slice(_) => { - panic!("Cannot use slices directly in a constrain statement") - } - Type::Reference(_) => { - panic!("Cannot use references directly in a constrain statement") - } - Type::Function => { - panic!("Cannot use functions directly in a constrain statement") - } - } - } - - fn handle_array_constrain_arg( - &mut self, - typ: Type, - argument: ValueId, - condition: ValueId, - call_stack: &CallStack, - ) -> ValueId { - let mut new_array = im::Vector::new(); - - let (element_types, len) = match &typ { - Type::Array(elements, len) => (elements, *len), - _ => panic!("Expected array type"), - }; - - for i in 0..len { - for (element_index, element_type) in element_types.iter().enumerate() { - let index = ((i * element_types.len() + element_index) as u128).into(); - let index = self.inserter.function.dfg.make_constant(index, Type::field()); - - let typevars = Some(vec![element_type.clone()]); - - let mut get_element = |array, typevars| { - let get = Instruction::ArrayGet { array, index }; - self.inserter - .function - .dfg - .insert_instruction_and_results( - get, - self.inserter.function.entry_block(), - typevars, - CallStack::new(), - ) - .first() - }; - - let element = get_element(argument, typevars); - - new_array.push_back( - self.handle_constrain_arg_side_effects(element, condition, call_stack), - ); - } - } - - self.inserter.function.dfg.make_array(new_array, typ) - } - fn undo_stores_in_then_branch(&mut self, then_branch: &Branch) { for (address, store) in &then_branch.store_values { let address = *address; diff --git a/compiler/noirc_evaluator/src/ssa/ssa_gen/mod.rs b/compiler/noirc_evaluator/src/ssa/ssa_gen/mod.rs index d7e6b8b0a3d..c00fbbbcb40 100644 --- a/compiler/noirc_evaluator/src/ssa/ssa_gen/mod.rs +++ b/compiler/noirc_evaluator/src/ssa/ssa_gen/mod.rs @@ -8,8 +8,8 @@ use context::SharedContext; use iter_extended::{try_vecmap, vecmap}; use noirc_errors::Location; use noirc_frontend::{ - monomorphization::ast::{self, Binary, Expression, Program}, - BinaryOpKind, Visibility, + monomorphization::ast::{self, Expression, Program}, + Visibility, }; use crate::{ @@ -653,24 +653,10 @@ impl<'a> FunctionContext<'a> { location: Location, assert_message: Option, ) -> Result { - match expr { - // If we're constraining an equality to be true then constrain the two sides directly. - Expression::Binary(Binary { lhs, operator: BinaryOpKind::Equal, rhs, .. }) => { - let lhs = self.codegen_non_tuple_expression(lhs)?; - let rhs = self.codegen_non_tuple_expression(rhs)?; - self.builder.set_location(location).insert_constrain(lhs, rhs, assert_message); - } + let expr = self.codegen_non_tuple_expression(expr)?; + let true_literal = self.builder.numeric_constant(true, Type::bool()); + self.builder.set_location(location).insert_constrain(expr, true_literal, assert_message); - _ => { - let expr = self.codegen_non_tuple_expression(expr)?; - let true_literal = self.builder.numeric_constant(true, Type::bool()); - self.builder.set_location(location).insert_constrain( - expr, - true_literal, - assert_message, - ); - } - } Ok(Self::unit_value()) } diff --git a/compiler/noirc_frontend/src/hir/def_collector/dc_crate.rs b/compiler/noirc_frontend/src/hir/def_collector/dc_crate.rs index ae061792125..8ada3faf756 100644 --- a/compiler/noirc_frontend/src/hir/def_collector/dc_crate.rs +++ b/compiler/noirc_frontend/src/hir/def_collector/dc_crate.rs @@ -4,7 +4,7 @@ use crate::graph::CrateId; use crate::hir::def_map::{CrateDefMap, LocalModuleId, ModuleId}; use crate::hir::resolution::errors::ResolverError; -use crate::hir::resolution::import::{resolve_imports, ImportDirective}; +use crate::hir::resolution::import::{resolve_import, ImportDirective}; use crate::hir::resolution::resolver::Resolver; use crate::hir::resolution::{ collect_impls, collect_trait_impls, path_resolver, resolve_free_functions, resolve_globals, @@ -259,37 +259,33 @@ impl DefCollector { ); } - // Resolve unresolved imports collected from the crate - let (resolved, unresolved_imports) = - resolve_imports(crate_id, def_collector.collected_imports, &context.def_maps); - - { - let current_def_map = context.def_maps.get(&crate_id).unwrap(); - errors.extend(vecmap(unresolved_imports, |(error, module_id)| { - let file_id = current_def_map.file_id(module_id); - let error = DefCollectorErrorKind::PathResolutionError(error); - (error.into(), file_id) - })); - }; - - // Populate module namespaces according to the imports used - let current_def_map = context.def_maps.get_mut(&crate_id).unwrap(); - for resolved_import in resolved { - let name = resolved_import.name; - for ns in resolved_import.resolved_namespace.iter_defs() { - let result = current_def_map.modules[resolved_import.module_scope.0].import( - name.clone(), - ns, - resolved_import.is_prelude, - ); - - if let Err((first_def, second_def)) = result { - let err = DefCollectorErrorKind::Duplicate { - typ: DuplicateType::Import, - first_def, - second_def, - }; - errors.push((err.into(), root_file_id)); + // Resolve unresolved imports collected from the crate, one by one. + for collected_import in def_collector.collected_imports { + match resolve_import(crate_id, collected_import, &context.def_maps) { + Ok(resolved_import) => { + // Populate module namespaces according to the imports used + let current_def_map = context.def_maps.get_mut(&crate_id).unwrap(); + + let name = resolved_import.name; + for ns in resolved_import.resolved_namespace.iter_defs() { + let result = current_def_map.modules[resolved_import.module_scope.0] + .import(name.clone(), ns, resolved_import.is_prelude); + + if let Err((first_def, second_def)) = result { + let err = DefCollectorErrorKind::Duplicate { + typ: DuplicateType::Import, + first_def, + second_def, + }; + errors.push((err.into(), root_file_id)); + } + } + } + Err((error, module_id)) => { + let current_def_map = context.def_maps.get(&crate_id).unwrap(); + let file_id = current_def_map.file_id(module_id); + let error = DefCollectorErrorKind::PathResolutionError(error); + errors.push((error.into(), file_id)); } } } diff --git a/compiler/noirc_frontend/src/hir/resolution/import.rs b/compiler/noirc_frontend/src/hir/resolution/import.rs index 41fdac746bd..e6ac33053a0 100644 --- a/compiler/noirc_frontend/src/hir/resolution/import.rs +++ b/compiler/noirc_frontend/src/hir/resolution/import.rs @@ -1,4 +1,3 @@ -use iter_extended::partition_results; use noirc_errors::{CustomDiagnostic, Span}; use crate::graph::CrateId; @@ -51,29 +50,27 @@ impl From for CustomDiagnostic { } } -pub fn resolve_imports( +pub fn resolve_import( crate_id: CrateId, - imports_to_resolve: Vec, + import_directive: ImportDirective, def_maps: &BTreeMap, -) -> (Vec, Vec<(PathResolutionError, LocalModuleId)>) { +) -> Result { let def_map = &def_maps[&crate_id]; - partition_results(imports_to_resolve, |import_directive| { - let allow_contracts = - allow_referencing_contracts(def_maps, crate_id, import_directive.module_id); - - let module_scope = import_directive.module_id; - let resolved_namespace = - resolve_path_to_ns(&import_directive, def_map, def_maps, allow_contracts) - .map_err(|error| (error, module_scope))?; - - let name = resolve_path_name(&import_directive); - Ok(ResolvedImport { - name, - resolved_namespace, - module_scope, - is_prelude: import_directive.is_prelude, - }) + let allow_contracts = + allow_referencing_contracts(def_maps, crate_id, import_directive.module_id); + + let module_scope = import_directive.module_id; + let resolved_namespace = + resolve_path_to_ns(&import_directive, def_map, def_maps, allow_contracts) + .map_err(|error| (error, module_scope))?; + + let name = resolve_path_name(&import_directive); + Ok(ResolvedImport { + name, + resolved_namespace, + module_scope, + is_prelude: import_directive.is_prelude, }) } diff --git a/compiler/noirc_frontend/src/hir/resolution/resolver.rs b/compiler/noirc_frontend/src/hir/resolution/resolver.rs index 9cd28d80784..91cfa5c6058 100644 --- a/compiler/noirc_frontend/src/hir/resolution/resolver.rs +++ b/compiler/noirc_frontend/src/hir/resolution/resolver.rs @@ -1253,8 +1253,10 @@ impl<'a> Resolver<'a> { if let Some((hir_expr, object_type)) = self.resolve_trait_generic_path(&path) { let expr_id = self.interner.push_expr(hir_expr); self.interner.push_expr_location(expr_id, expr.span, self.file); - self.interner - .select_impl_for_ident(expr_id, TraitImplKind::Assumed { object_type }); + self.interner.select_impl_for_expression( + expr_id, + TraitImplKind::Assumed { object_type }, + ); return expr_id; } else { // If the Path is being used as an Expression, then it is referring to a global from a separate module @@ -1313,10 +1315,12 @@ impl<'a> Resolver<'a> { ExpressionKind::Infix(infix) => { let lhs = self.resolve_expression(infix.lhs); let rhs = self.resolve_expression(infix.rhs); + let trait_id = self.interner.get_operator_trait_method(infix.operator.contents); HirExpression::Infix(HirInfixExpression { lhs, operator: HirBinaryOp::new(infix.operator, self.file), + trait_method_id: trait_id, rhs, }) } diff --git a/compiler/noirc_frontend/src/hir/resolution/traits.rs b/compiler/noirc_frontend/src/hir/resolution/traits.rs index 54d2630c722..40041b0fd00 100644 --- a/compiler/noirc_frontend/src/hir/resolution/traits.rs +++ b/compiler/noirc_frontend/src/hir/resolution/traits.rs @@ -52,6 +52,13 @@ pub(crate) fn resolve_traits( context.def_interner.update_trait(trait_id, |trait_def| { trait_def.set_methods(methods); }); + + // This check needs to be after the trait's methods are set since + // the interner may set `interner.ordering_type` based on the result type + // of the Cmp trait, if this is it. + if crate_id.is_stdlib() { + context.def_interner.try_add_operator_trait(trait_id); + } } res } diff --git a/compiler/noirc_frontend/src/hir/type_check/expr.rs b/compiler/noirc_frontend/src/hir/type_check/expr.rs index e8f9f23d378..caa77852560 100644 --- a/compiler/noirc_frontend/src/hir/type_check/expr.rs +++ b/compiler/noirc_frontend/src/hir/type_check/expr.rs @@ -136,11 +136,21 @@ impl<'interner> TypeChecker<'interner> { let rhs_span = self.interner.expr_span(&infix_expr.rhs); let span = lhs_span.merge(rhs_span); - self.infix_operand_type_rules(&lhs_type, &infix_expr.operator, &rhs_type, span) - .unwrap_or_else(|error| { + let operator = &infix_expr.operator; + match self.infix_operand_type_rules(&lhs_type, operator, &rhs_type, span) { + Ok((typ, use_impl)) => { + if use_impl { + let id = infix_expr.trait_method_id; + self.verify_trait_constraint(&lhs_type, id.trait_id, *expr_id, span); + self.typecheck_operator_method(*expr_id, id, &lhs_type, span); + } + typ + } + Err(error) => { self.errors.push(error); Type::Error - }) + } + } } HirExpression::Index(index_expr) => self.check_index_expression(expr_id, index_expr), HirExpression::Call(call_expr) => { @@ -294,7 +304,7 @@ impl<'interner> TypeChecker<'interner> { // We must also remember to apply these substitutions to the object_type // referenced by the selected trait impl, if one has yet to be selected. - let impl_kind = self.interner.get_selected_impl_for_ident(*expr_id); + let impl_kind = self.interner.get_selected_impl_for_expression(*expr_id); if let Some(TraitImplKind::Assumed { object_type }) = impl_kind { let the_trait = self.interner.get_trait(method.trait_id); let object_type = object_type.substitute(&bindings); @@ -302,8 +312,10 @@ impl<'interner> TypeChecker<'interner> { the_trait.self_type_typevar_id, (the_trait.self_type_typevar.clone(), object_type.clone()), ); - self.interner - .select_impl_for_ident(*expr_id, TraitImplKind::Assumed { object_type }); + self.interner.select_impl_for_expression( + *expr_id, + TraitImplKind::Assumed { object_type }, + ); } self.interner.store_instantiation_bindings(*expr_id, bindings); @@ -323,7 +335,7 @@ impl<'interner> TypeChecker<'interner> { span: Span, ) { match self.interner.lookup_trait_implementation(object_type, trait_id) { - Ok(impl_kind) => self.interner.select_impl_for_ident(function_ident_id, impl_kind), + Ok(impl_kind) => self.interner.select_impl_for_expression(function_ident_id, impl_kind), Err(erroring_constraints) => { // Don't show any errors where try_get_trait returns None. // This can happen if a trait is used that was never declared. @@ -753,19 +765,22 @@ impl<'interner> TypeChecker<'interner> { None } + // Given a binary comparison operator and another type. This method will produce the output type + // and a boolean indicating whether to use the trait impl corresponding to the operator + // or not. A value of false indicates the caller to use a primitive operation for this + // operator, while a true value indicates a user-provided trait impl is required. fn comparator_operand_type_rules( &mut self, lhs_type: &Type, rhs_type: &Type, op: &HirBinaryOp, span: Span, - ) -> Result { - use crate::BinaryOpKind::{Equal, NotEqual}; + ) -> Result<(Type, bool), TypeCheckError> { use Type::*; match (lhs_type, rhs_type) { // Avoid reporting errors multiple times - (Error, _) | (_, Error) => Ok(Bool), + (Error, _) | (_, Error) => Ok((Bool, false)), // Matches on TypeVariable must be first to follow any type // bindings. @@ -791,7 +806,7 @@ impl<'interner> TypeChecker<'interner> { || other == &Type::Error { Type::apply_type_bindings(bindings); - Ok(Bool) + Ok((Bool, false)) } else { Err(TypeCheckError::TypeMismatchWithSource { expected: lhs_type.clone(), @@ -816,36 +831,23 @@ impl<'interner> TypeChecker<'interner> { span, }); } - Ok(Bool) - } - (Integer(..), FieldElement) | (FieldElement, Integer(..)) => { - Err(TypeCheckError::IntegerAndFieldBinaryOperation { span }) - } - (Integer(..), typ) | (typ, Integer(..)) => { - Err(TypeCheckError::IntegerTypeMismatch { typ: typ.clone(), span }) + Ok((Bool, false)) } (FieldElement, FieldElement) => { if op.kind.is_valid_for_field_type() { - Ok(Bool) + Ok((Bool, false)) } else { Err(TypeCheckError::FieldComparison { span }) } } // <= and friends are technically valid for booleans, just not very useful - (Bool, Bool) => Ok(Bool), + (Bool, Bool) => Ok((Bool, false)), // Special-case == and != for arrays (Array(x_size, x_type), Array(y_size, y_type)) - if matches!(op.kind, Equal | NotEqual) => + if matches!(op.kind, BinaryOpKind::Equal | BinaryOpKind::NotEqual) => { - self.unify(x_type, y_type, || TypeCheckError::TypeMismatchWithSource { - expected: lhs_type.clone(), - actual: rhs_type.clone(), - source: Source::ArrayElements, - span: op.location.span, - }); - self.unify(x_size, y_size, || TypeCheckError::TypeMismatchWithSource { expected: lhs_type.clone(), actual: rhs_type.clone(), @@ -853,19 +855,9 @@ impl<'interner> TypeChecker<'interner> { span: op.location.span, }); - Ok(Bool) - } - (lhs @ NamedGeneric(binding_a, _), rhs @ NamedGeneric(binding_b, _)) => { - if binding_a == binding_b { - return Ok(Bool); - } - Err(TypeCheckError::TypeMismatchWithSource { - expected: lhs.clone(), - actual: rhs.clone(), - source: Source::Comparison, - span, - }) + self.comparator_operand_type_rules(x_type, y_type, op, span) } + (String(x_size), String(y_size)) => { self.unify(x_size, y_size, || TypeCheckError::TypeMismatchWithSource { expected: *x_size.clone(), @@ -874,14 +866,17 @@ impl<'interner> TypeChecker<'interner> { source: Source::StringLen, }); - Ok(Bool) + Ok((Bool, false)) + } + (lhs, rhs) => { + self.unify(lhs, rhs, || TypeCheckError::TypeMismatchWithSource { + expected: lhs.clone(), + actual: rhs.clone(), + span: op.location.span, + source: Source::Binary, + }); + Ok((Bool, true)) } - (lhs, rhs) => Err(TypeCheckError::TypeMismatchWithSource { - expected: lhs.clone(), - actual: rhs.clone(), - source: Source::Comparison, - span, - }), } } @@ -1041,13 +1036,16 @@ impl<'interner> TypeChecker<'interner> { } // Given a binary operator and another type. This method will produce the output type + // and a boolean indicating whether to use the trait impl corresponding to the operator + // or not. A value of false indicates the caller to use a primitive operation for this + // operator, while a true value indicates a user-provided trait impl is required. fn infix_operand_type_rules( &mut self, lhs_type: &Type, op: &HirBinaryOp, rhs_type: &Type, span: Span, - ) -> Result { + ) -> Result<(Type, bool), TypeCheckError> { if op.kind.is_comparator() { return self.comparator_operand_type_rules(lhs_type, rhs_type, op, span); } @@ -1055,7 +1053,7 @@ impl<'interner> TypeChecker<'interner> { use Type::*; match (lhs_type, rhs_type) { // An error type on either side will always return an error - (Error, _) | (_, Error) => Ok(Error), + (Error, _) | (_, Error) => Ok((Error, false)), // Matches on TypeVariable must be first so that we follow any type // bindings. @@ -1096,7 +1094,7 @@ impl<'interner> TypeChecker<'interner> { || other == &Type::Error { Type::apply_type_bindings(bindings); - Ok(other.clone()) + Ok((other.clone(), false)) } else { Err(TypeCheckError::TypeMismatchWithSource { expected: lhs_type.clone(), @@ -1121,25 +1119,8 @@ impl<'interner> TypeChecker<'interner> { span, }); } - Ok(Integer(*sign_x, *bit_width_x)) - } - (Integer(..), FieldElement) | (FieldElement, Integer(..)) => { - Err(TypeCheckError::IntegerAndFieldBinaryOperation { span }) + Ok((Integer(*sign_x, *bit_width_x), false)) } - (Integer(..), typ) | (typ, Integer(..)) => { - Err(TypeCheckError::IntegerTypeMismatch { typ: typ.clone(), span }) - } - // These types are not supported in binary operations - (Array(..), _) | (_, Array(..)) => { - Err(TypeCheckError::InvalidInfixOp { kind: "Arrays", span }) - } - (Struct(..), _) | (_, Struct(..)) => { - Err(TypeCheckError::InvalidInfixOp { kind: "Structs", span }) - } - (Tuple(_), _) | (_, Tuple(_)) => { - Err(TypeCheckError::InvalidInfixOp { kind: "Tuples", span }) - } - // The result of two Fields is always a witness (FieldElement, FieldElement) => { if op.is_bitwise() { @@ -1148,17 +1129,20 @@ impl<'interner> TypeChecker<'interner> { if op.is_modulo() { return Err(TypeCheckError::FieldModulo { span }); } - Ok(FieldElement) + Ok((FieldElement, false)) } - (Bool, Bool) => Ok(Bool), + (Bool, Bool) => Ok((Bool, false)), - (lhs, rhs) => Err(TypeCheckError::TypeMismatchWithSource { - expected: lhs.clone(), - actual: rhs.clone(), - source: Source::BinOp(op.kind), - span, - }), + (lhs, rhs) => { + self.unify(lhs, rhs, || TypeCheckError::TypeMismatchWithSource { + expected: lhs.clone(), + actual: rhs.clone(), + span: op.location.span, + source: Source::Binary, + }); + Ok((lhs.clone(), true)) + } } } @@ -1210,6 +1194,57 @@ impl<'interner> TypeChecker<'interner> { } } } + + /// Prerequisite: verify_trait_constraint of the operator's trait constraint. + /// + /// Although by this point the operator is expected to already have a trait impl, + /// we still need to match the operator's type against the method's instantiated type + /// to ensure the instantiation bindings are correct and the monomorphizer can + /// re-apply the needed bindings. + fn typecheck_operator_method( + &mut self, + expr_id: ExprId, + trait_method_id: TraitMethodId, + object_type: &Type, + span: Span, + ) { + let the_trait = self.interner.get_trait(trait_method_id.trait_id); + + let method = &the_trait.methods[trait_method_id.method_index]; + let (method_type, mut bindings) = method.typ.instantiate(self.interner); + + match method_type { + Type::Function(args, _, _) => { + // We can cheat a bit and match against only the object type here since no operator + // overload uses other generic parameters or return types aside from the object type. + let expected_object_type = &args[0]; + self.unify(object_type, expected_object_type, || TypeCheckError::TypeMismatch { + expected_typ: expected_object_type.to_string(), + expr_typ: object_type.to_string(), + expr_span: span, + }); + } + other => { + unreachable!("Expected operator method to have a function type, but found {other}") + } + } + + // We must also remember to apply these substitutions to the object_type + // referenced by the selected trait impl, if one has yet to be selected. + let impl_kind = self.interner.get_selected_impl_for_expression(expr_id); + if let Some(TraitImplKind::Assumed { object_type }) = impl_kind { + let the_trait = self.interner.get_trait(trait_method_id.trait_id); + let object_type = object_type.substitute(&bindings); + bindings.insert( + the_trait.self_type_typevar_id, + (the_trait.self_type_typevar.clone(), object_type.clone()), + ); + self.interner + .select_impl_for_expression(expr_id, TraitImplKind::Assumed { object_type }); + } + + self.interner.store_instantiation_bindings(expr_id, bindings); + } } /// Taken from: https://stackoverflow.com/a/47127500 diff --git a/compiler/noirc_frontend/src/hir/type_check/mod.rs b/compiler/noirc_frontend/src/hir/type_check/mod.rs index 95991047091..092e8631f1b 100644 --- a/compiler/noirc_frontend/src/hir/type_check/mod.rs +++ b/compiler/noirc_frontend/src/hir/type_check/mod.rs @@ -241,7 +241,7 @@ mod test { function::{FuncMeta, HirFunction}, stmt::HirStatement, }; - use crate::node_interner::{DefinitionKind, FuncId, NodeInterner}; + use crate::node_interner::{DefinitionKind, FuncId, NodeInterner, TraitId, TraitMethodId}; use crate::{ hir::{ def_map::{CrateDefMap, LocalModuleId, ModuleDefId}, @@ -254,6 +254,7 @@ mod test { #[test] fn basic_let() { let mut interner = NodeInterner::default(); + interner.populate_dummy_operator_traits(); // Safety: The FileId in a location isn't used for tests let file = FileId::default(); @@ -284,7 +285,9 @@ mod test { // Create Infix let operator = HirBinaryOp { location, kind: BinaryOpKind::Add }; - let expr = HirInfixExpression { lhs: x_expr_id, operator, rhs: y_expr_id }; + let trait_id = TraitId(ModuleId::dummy_id()); + let trait_method_id = TraitMethodId { trait_id, method_index: 0 }; + let expr = HirInfixExpression { lhs: x_expr_id, operator, rhs: y_expr_id, trait_method_id }; let expr_id = interner.push_expr(HirExpression::Infix(expr)); interner.push_expr_location(expr_id, Span::single_char(0), file); @@ -469,6 +472,7 @@ mod test { ) { let (program, errors) = parse_program(src); let mut interner = NodeInterner::default(); + interner.populate_dummy_operator_traits(); assert_eq!( errors.len(), diff --git a/compiler/noirc_frontend/src/hir_def/expr.rs b/compiler/noirc_frontend/src/hir_def/expr.rs index ef1c3af7ac0..7c04398ca88 100644 --- a/compiler/noirc_frontend/src/hir_def/expr.rs +++ b/compiler/noirc_frontend/src/hir_def/expr.rs @@ -101,6 +101,12 @@ pub struct HirInfixExpression { pub lhs: ExprId, pub operator: HirBinaryOp, pub rhs: ExprId, + + /// The trait method id for the operator trait method that corresponds to this operator. + /// For derived operators like `!=`, this will lead to the method `Eq::eq`. For these + /// cases, it is up to the monomorphization pass to insert the appropriate `not` operation + /// after the call to `Eq::eq` to get the result of the `!=` operator. + pub trait_method_id: TraitMethodId, } /// This is always a struct field access `my_struct.field` diff --git a/compiler/noirc_frontend/src/monomorphization/ast.rs b/compiler/noirc_frontend/src/monomorphization/ast.rs index 5a5f07b0a38..42a618e7d77 100644 --- a/compiler/noirc_frontend/src/monomorphization/ast.rs +++ b/compiler/noirc_frontend/src/monomorphization/ast.rs @@ -352,7 +352,7 @@ impl std::fmt::Display for Type { }; write!(f, "fn({}) -> {}{}", args.join(", "), ret, closure_env_text) } - Type::Slice(element) => write!(f, "[{element}"), + Type::Slice(element) => write!(f, "[{element}]"), Type::MutableReference(element) => write!(f, "&mut {element}"), } } diff --git a/compiler/noirc_frontend/src/monomorphization/mod.rs b/compiler/noirc_frontend/src/monomorphization/mod.rs index 39b05dcd5ac..bb0972987e4 100644 --- a/compiler/noirc_frontend/src/monomorphization/mod.rs +++ b/compiler/noirc_frontend/src/monomorphization/mod.rs @@ -26,7 +26,7 @@ use crate::{ }, node_interner::{self, DefinitionKind, NodeInterner, StmtId, TraitImplKind, TraitMethodId}, token::FunctionAttribute, - ContractFunctionType, FunctionKind, Type, TypeBinding, TypeBindings, TypeVariableKind, + ContractFunctionType, FunctionKind, Type, TypeBinding, TypeBindings, TypeVariableKind, UnaryOp, Visibility, }; @@ -238,6 +238,7 @@ impl<'interner> Monomorphizer<'interner> { }); let parameters = self.parameters(meta.parameters); + let body = self.expr(body_expr_id); let unconstrained = modifiers.is_unconstrained || matches!(modifiers.contract_function_type, Some(ContractFunctionType::Open)); @@ -356,11 +357,37 @@ impl<'interner> Monomorphizer<'interner> { } HirExpression::Infix(infix) => { - let lhs = Box::new(self.expr(infix.lhs)); - let rhs = Box::new(self.expr(infix.rhs)); + let lhs = self.expr(infix.lhs); + let rhs = self.expr(infix.rhs); let operator = infix.operator.kind; let location = self.interner.expr_location(&expr); - ast::Expression::Binary(ast::Binary { lhs, rhs, operator, location }) + + if self.interner.get_selected_impl_for_expression(expr).is_some() { + // If an impl was selected for this infix operator, replace it + // with a method call to the appropriate trait impl method. + let lhs_type = self.interner.id_type(infix.lhs); + let args = vec![lhs_type.clone(), lhs_type]; + + // If this is a comparison operator, the result is a boolean but + // the actual method call returns an Ordering + use crate::BinaryOpKind::*; + let ret = if matches!(operator, Less | LessEqual | Greater | GreaterEqual) { + self.interner.ordering_type() + } else { + self.interner.id_type(expr) + }; + + let env = Box::new(Type::Unit); + let function_type = Type::Function(args, Box::new(ret.clone()), env); + + let method = infix.trait_method_id; + let func = self.resolve_trait_method_reference(expr, function_type, method); + self.create_operator_impl_call(func, lhs, infix.operator, rhs, ret, location) + } else { + let lhs = Box::new(lhs); + let rhs = Box::new(rhs); + ast::Expression::Binary(ast::Binary { lhs, rhs, operator, location }) + } } HirExpression::Index(index) => self.index(expr, index), @@ -400,13 +427,8 @@ impl<'interner> Monomorphizer<'interner> { HirExpression::Lambda(lambda) => self.lambda(lambda, expr), HirExpression::TraitMethodReference(method) => { - if let Type::Function(_, _, _) = self.interner.id_type(expr) { - self.resolve_trait_method_reference(expr, method) - } else { - unreachable!( - "Calling a non-function, this should've been caught in typechecking" - ); - } + let function_type = self.interner.id_type(expr); + self.resolve_trait_method_reference(expr, function_type, method) } HirExpression::MethodCall(hir_method_call) => { @@ -826,13 +848,12 @@ impl<'interner> Monomorphizer<'interner> { fn resolve_trait_method_reference( &mut self, expr_id: node_interner::ExprId, + function_type: HirType, method: TraitMethodId, ) -> ast::Expression { - let function_type = self.interner.id_type(expr_id); - let trait_impl = self .interner - .get_selected_impl_for_ident(expr_id) + .get_selected_impl_for_expression(expr_id) .expect("ICE: missing trait impl - should be caught during type checking"); let hir_func_id = match trait_impl { @@ -953,7 +974,7 @@ impl<'interner> Monomorphizer<'interner> { } /// Adds a function argument that contains type metadata that is required to tell - /// `println` how to convert values passed to an foreign call back to a human-readable string. + /// `println` how to convert values passed to an foreign call back to a human-readable string. /// The values passed to an foreign call will be a simple list of field elements, /// thus requiring extra metadata to correctly decode this list of elements. /// @@ -1087,7 +1108,7 @@ impl<'interner> Monomorphizer<'interner> { function_type: HirType, ) -> FuncId { let new_id = self.next_function_id(); - self.define_global(id, function_type, new_id); + self.define_global(id, function_type.clone(), new_id); let bindings = self.interner.get_instantiation_bindings(expr_id); let bindings = self.follow_bindings(bindings); @@ -1417,6 +1438,70 @@ impl<'interner> Monomorphizer<'interner> { ), }) } + + /// Call an operator overloading method for the given operator. + /// This function handles the special cases some operators have which don't map + /// 1 to 1 onto their operator function. For example: != requires a negation on + /// the result of its `eq` method, and the comparison operators each require a + /// conversion from the `Ordering` result to a boolean. + fn create_operator_impl_call( + &self, + func: ast::Expression, + lhs: ast::Expression, + operator: HirBinaryOp, + rhs: ast::Expression, + ret: Type, + location: Location, + ) -> ast::Expression { + let arguments = vec![lhs, rhs]; + let func = Box::new(func); + let return_type = self.convert_type(&ret); + + let mut result = + ast::Expression::Call(ast::Call { func, arguments, return_type, location }); + + use crate::BinaryOpKind::*; + match operator.kind { + // Negate the result of the == operation + NotEqual => { + result = ast::Expression::Unary(ast::Unary { + operator: UnaryOp::Not, + rhs: Box::new(result), + result_type: ast::Type::Bool, + location, + }); + } + // All the comparison operators require special handling since their `cmp` method + // returns an `Ordering` rather than a boolean value. + // + // (a < b) => a.cmp(b) == Ordering::Less + // (a <= b) => a.cmp(b) != Ordering::Greater + // (a > b) => a.cmp(b) == Ordering::Greater + // (a >= b) => a.cmp(b) != Ordering::Less + Less | LessEqual | Greater | GreaterEqual => { + // Comparing an Ordering directly to a field value in this way takes advantage + // of the fact the Ordering struct contains a single Field type, and our SSA + // pass will automatically unpack tuple values. + let ordering_value = if matches!(operator.kind, Less | GreaterEqual) { + FieldElement::zero() // Ordering::Less + } else { + 2u128.into() // Ordering::Greater + }; + + let operator = + if matches!(operator.kind, Less | Greater) { Equal } else { NotEqual }; + + let int_value = ast::Literal::Integer(ordering_value, ast::Type::Field, location); + let rhs = Box::new(ast::Expression::Literal(int_value)); + let lhs = Box::new(ast::Expression::ExtractTupleField(Box::new(result), 0)); + + result = ast::Expression::Binary(ast::Binary { lhs, operator, rhs, location }); + } + _ => (), + } + + result + } } fn unwrap_tuple_type(typ: &HirType) -> Vec { diff --git a/compiler/noirc_frontend/src/node_interner.rs b/compiler/noirc_frontend/src/node_interner.rs index 236f1e0b513..7c42e279916 100644 --- a/compiler/noirc_frontend/src/node_interner.rs +++ b/compiler/noirc_frontend/src/node_interner.rs @@ -21,8 +21,8 @@ use crate::hir_def::{ }; use crate::token::{Attributes, SecondaryAttribute}; use crate::{ - ContractFunctionType, FunctionDefinition, FunctionVisibility, Generics, Shared, TypeAliasType, - TypeBindings, TypeVariable, TypeVariableId, TypeVariableKind, + BinaryOpKind, ContractFunctionType, FunctionDefinition, FunctionVisibility, Generics, Shared, + TypeAliasType, TypeBindings, TypeVariable, TypeVariableId, TypeVariableKind, }; /// An arbitrary number to limit the recursion depth when searching for trait impls. @@ -110,6 +110,12 @@ pub struct NodeInterner { /// the context to get the concrete type of the object and select the correct impl itself. selected_trait_implementations: HashMap, + /// Holds the trait ids of the traits used for operator overloading + operator_traits: HashMap, + + /// The `Ordering` type is a semi-builtin type that is the result of the comparison traits. + ordering_type: Option, + /// Map from ExprId (referring to a Function/Method call) to its corresponding TypeBindings, /// filled out during type checking from instantiated variables. Used during monomorphization /// to map call site types back onto function parameter types, and undo this binding as needed. @@ -423,6 +429,8 @@ impl Default for NodeInterner { trait_implementations: Vec::new(), trait_implementation_map: HashMap::new(), selected_trait_implementations: HashMap::new(), + operator_traits: HashMap::new(), + ordering_type: None, instantiation_bindings: HashMap::new(), field_indices: HashMap::new(), next_type_variable_id: std::cell::Cell::new(0), @@ -1254,13 +1262,12 @@ impl NodeInterner { /// Tags the given identifier with the selected trait_impl so that monomorphization /// can later recover which impl was selected, or alternatively see if it needs to /// decide which impl to select (because the impl was Assumed). - pub fn select_impl_for_ident(&mut self, ident_id: ExprId, trait_impl: TraitImplKind) { + pub fn select_impl_for_expression(&mut self, ident_id: ExprId, trait_impl: TraitImplKind) { self.selected_trait_implementations.insert(ident_id, trait_impl); } - /// Retrieves the impl selected for a given IdentId during name resolution. - /// From type checking and on, the "ident" referred to is changed to a TraitMethodReference node. - pub fn get_selected_impl_for_ident(&self, ident_id: ExprId) -> Option { + /// Retrieves the impl selected for a given ExprId during name resolution. + pub fn get_selected_impl_for_expression(&self, ident_id: ExprId) -> Option { self.selected_trait_implementations.get(&ident_id).cloned() } @@ -1294,9 +1301,6 @@ impl NodeInterner { } HirExpression::Constructor(expr) => { let struct_type = &expr.r#type.borrow(); - - eprintln!("\n -> Resolve Constructor {struct_type:?}\n"); - Some(struct_type.location) } HirExpression::MemberAccess(expr_member_access) => { @@ -1322,35 +1326,103 @@ impl NodeInterner { let expr_lhs = &expr_member_access.lhs; let expr_rhs = &expr_member_access.rhs; - let found_ident = self.nodes.get(expr_lhs.into())?; - - let ident = match found_ident { - Node::Expression(HirExpression::Ident(ident)) => ident, + let lhs_self_struct = match self.id_type(expr_lhs) { + Type::Struct(struct_type, _) => struct_type, _ => return None, }; - let definition_info = self.definition(ident.id); + let struct_type = lhs_self_struct.borrow(); + let field_names = struct_type.field_names(); - let local_id = match definition_info.kind { - DefinitionKind::Local(Some(local_id)) => local_id, - _ => return None, + field_names.iter().find(|field_name| field_name.0 == expr_rhs.0).map(|found_field_name| { + Location::new(found_field_name.span(), struct_type.location.file) + }) + } + + /// Retrieves the trait id for a given binary operator. + /// All binary operators correspond to a trait - although multiple may correspond + /// to the same trait (such as `==` and `!=`). + /// `self.operator_traits` is expected to be filled before name resolution, + /// during definition collection. + pub fn get_operator_trait_method(&self, operator: BinaryOpKind) -> TraitMethodId { + let trait_id = self.operator_traits[&operator]; + + // Assume that the operator's method to be overloaded is the first method of the trait. + TraitMethodId { trait_id, method_index: 0 } + } + + /// Add the given trait as an operator trait if its name matches one of the + /// operator trait names (Add, Sub, ...). + pub fn try_add_operator_trait(&mut self, trait_id: TraitId) { + let the_trait = self.get_trait(trait_id); + + let operator = match the_trait.name.0.contents.as_str() { + "Add" => BinaryOpKind::Add, + "Sub" => BinaryOpKind::Subtract, + "Mul" => BinaryOpKind::Multiply, + "Div" => BinaryOpKind::Divide, + "Rem" => BinaryOpKind::Modulo, + "Eq" => BinaryOpKind::Equal, + "Ord" => BinaryOpKind::Less, + "BitAnd" => BinaryOpKind::And, + "BitOr" => BinaryOpKind::Or, + "BitXor" => BinaryOpKind::Xor, + "Shl" => BinaryOpKind::ShiftLeft, + "Shr" => BinaryOpKind::ShiftRight, + _ => return, }; - let constructor_expression = match self.nodes.get(local_id.into()) { - Some(Node::Expression(HirExpression::Constructor(constructor_expression))) => { - constructor_expression - } - _ => return None, - }; - - let struct_type = constructor_expression.r#type.borrow(); - let field_names = struct_type.field_names(); + self.operator_traits.insert(operator, trait_id); - match field_names.iter().find(|field_name| field_name.0 == expr_rhs.0) { - Some(found) => Some(Location::new(found.span(), struct_type.location.file)), - None => None, + // Some operators also require we insert a matching entry for related operators + match operator { + BinaryOpKind::Equal => { + self.operator_traits.insert(BinaryOpKind::NotEqual, trait_id); + } + BinaryOpKind::Less => { + self.operator_traits.insert(BinaryOpKind::LessEqual, trait_id); + self.operator_traits.insert(BinaryOpKind::Greater, trait_id); + self.operator_traits.insert(BinaryOpKind::GreaterEqual, trait_id); + + let the_trait = self.get_trait(trait_id); + self.ordering_type = match &the_trait.methods[0].typ { + Type::Forall(_, typ) => match typ.as_ref() { + Type::Function(_, return_type, _) => Some(return_type.as_ref().clone()), + other => unreachable!("Expected function type for `cmp`, found {}", other), + }, + other => unreachable!("Expected Forall type for `cmp`, found {}", other), + }; + } + _ => (), } } + + /// This function is needed when creating a NodeInterner for testing so that calls + /// to `get_operator_trait` do not panic when the stdlib isn't present. + #[cfg(test)] + pub fn populate_dummy_operator_traits(&mut self) { + let dummy_trait = TraitId(ModuleId::dummy_id()); + self.operator_traits.insert(BinaryOpKind::Add, dummy_trait); + self.operator_traits.insert(BinaryOpKind::Subtract, dummy_trait); + self.operator_traits.insert(BinaryOpKind::Multiply, dummy_trait); + self.operator_traits.insert(BinaryOpKind::Divide, dummy_trait); + self.operator_traits.insert(BinaryOpKind::Modulo, dummy_trait); + self.operator_traits.insert(BinaryOpKind::Equal, dummy_trait); + self.operator_traits.insert(BinaryOpKind::NotEqual, dummy_trait); + self.operator_traits.insert(BinaryOpKind::Less, dummy_trait); + self.operator_traits.insert(BinaryOpKind::LessEqual, dummy_trait); + self.operator_traits.insert(BinaryOpKind::Greater, dummy_trait); + self.operator_traits.insert(BinaryOpKind::GreaterEqual, dummy_trait); + self.operator_traits.insert(BinaryOpKind::And, dummy_trait); + self.operator_traits.insert(BinaryOpKind::Or, dummy_trait); + self.operator_traits.insert(BinaryOpKind::Xor, dummy_trait); + self.operator_traits.insert(BinaryOpKind::ShiftLeft, dummy_trait); + self.operator_traits.insert(BinaryOpKind::ShiftRight, dummy_trait); + } + + pub(crate) fn ordering_type(&self) -> Type { + self.ordering_type.clone().expect("Expected ordering_type to be set in the NodeInterner") + } } impl Methods { diff --git a/compiler/noirc_frontend/src/tests.rs b/compiler/noirc_frontend/src/tests.rs index cd0c34f7e09..063e0215a30 100644 --- a/compiler/noirc_frontend/src/tests.rs +++ b/compiler/noirc_frontend/src/tests.rs @@ -53,6 +53,7 @@ mod test { let root = std::path::Path::new("/"); let fm = FileManager::new(root); let mut context = Context::new(fm); + context.def_interner.populate_dummy_operator_traits(); let root_file_id = FileId::dummy(); let root_crate_id = context.crate_graph.add_crate_root(root_file_id); let (program, parser_errors) = parse_program(src); diff --git a/compiler/noirc_printable_type/Cargo.toml b/compiler/noirc_printable_type/Cargo.toml index 5f2eea92257..fbbe778e561 100644 --- a/compiler/noirc_printable_type/Cargo.toml +++ b/compiler/noirc_printable_type/Cargo.toml @@ -14,5 +14,6 @@ regex = "1.9.1" serde.workspace = true serde_json.workspace = true thiserror.workspace = true +jsonrpc.workspace = true [dev-dependencies] diff --git a/compiler/noirc_printable_type/src/lib.rs b/compiler/noirc_printable_type/src/lib.rs index e10e400b0db..273e2d512ea 100644 --- a/compiler/noirc_printable_type/src/lib.rs +++ b/compiler/noirc_printable_type/src/lib.rs @@ -73,6 +73,9 @@ pub enum ForeignCallError { #[error("Could not parse PrintableType argument. {0}")] ParsingError(#[from] serde_json::Error), + + #[error("Failed calling external resolver. {0}")] + ExternalResolverError(#[from] jsonrpc::Error), } impl TryFrom<&[ForeignCallParam]> for PrintableValueDisplay { diff --git a/compiler/wasm/package.json b/compiler/wasm/package.json index 352142eb3e3..38fd118f189 100644 --- a/compiler/wasm/package.json +++ b/compiler/wasm/package.json @@ -1,6 +1,6 @@ { "name": "@noir-lang/noir_wasm", - "collaborators": [ + "contributors": [ "The Noir Team " ], "version": "0.22.0", @@ -14,9 +14,14 @@ "package.json" ], "sideEffects": false, + "homepage": "https://noir-lang.org/", "repository": { - "type": "git", - "url": "https://github.com/noir-lang/noir.git" + "url": "https://github.com/noir-lang/noir.git", + "directory": "compiler/wasm", + "type": "git" + }, + "bugs": { + "url": "https://github.com/noir-lang/noir/issues" }, "scripts": { "build": "bash ./build.sh", diff --git a/cspell.json b/cspell.json index d8f04ed6069..94449a68a77 100644 --- a/cspell.json +++ b/cspell.json @@ -15,6 +15,8 @@ "bincode", "bindgen", "bitand", + "bitxor", + "bitor", "blackbox", "bridgekeeper", "brillig", @@ -36,6 +38,7 @@ "cpus", "cranelift", "curvegroup", + "databus", "deflatten", "deflattened", "deflattening", @@ -63,6 +66,7 @@ "gloo", "grumpkin", "Guillaume", + "gzipped", "hasher", "hexdigit", "higher-kinded", @@ -124,6 +128,7 @@ "srem", "stdlib", "struct", + "structs", "subexpression", "subshell", "subtyping", @@ -148,5 +153,8 @@ "wasi", "Weierstraß", "zshell" + ], + "ignorePaths": [ + "./**/node_modules/**" ] } diff --git a/deny.toml b/deny.toml index d9ffd4d37f0..5edce08fb70 100644 --- a/deny.toml +++ b/deny.toml @@ -67,6 +67,7 @@ exceptions = [ # so we prefer to not have dependencies using it # https://tldrlegal.com/license/creative-commons-cc0-1.0-universal { allow = ["CC0-1.0"], name = "more-asserts" }, + { allow = ["CC0-1.0"], name = "jsonrpc" }, { allow = ["MPL-2.0"], name = "sized-chunks" }, { allow = ["MPL-2.0"], name = "webpki-roots" }, diff --git a/docs/docs/explainers/explainer-recursion.md b/docs/docs/explainers/explainer-recursion.md index 9357d3c7341..8f992ec29fd 100644 --- a/docs/docs/explainers/explainer-recursion.md +++ b/docs/docs/explainers/explainer-recursion.md @@ -64,7 +64,9 @@ So, they use zero-knowledge proofs. Alice tries to guess Bob's number, and Bob w This ZK proof can go on a smart contract, revealing the winner and even giving prizes. However, this means every turn needs to be verified on-chain. This incurs some cost and waiting time that may simply make the game too expensive or time-consuming to be worth it. -So, Alice started thinking: "what if Bob generates his proof, and instead of sending it on-chain, I verify it *within* my own proof before playing my own turn?". She can then generate a proof that she verified his proof, and so on. +As a solution, Alice proposes the following: "what if Bob generates his proof, and instead of sending it on-chain, I verify it *within* my own proof before playing my own turn?". + +She can then generate a proof that she verified his proof, and so on. ```md Did you fail? <-------------------------- @@ -86,7 +88,7 @@ Generate proof of that / Charlie is a concerned citizen, and wants to be sure his vote in an election is accounted for. He votes with a ZK proof, but he has no way of knowing that his ZK proof was included in the total vote count! -So, the tallier puts all the votes in a merkle tree, and everyone can also prove the verification of two proofs within one proof, as such: +If the vote collector puts all of the votes into a [Merkle tree](https://en.wikipedia.org/wiki/Merkle_tree), everyone can prove the verification of two proofs within one proof, as such: ```md abcd @@ -104,7 +106,7 @@ Doing this recursively allows us to arrive on a final proof `abcd` which if true Daniel has a big circuit and a big headache. A part of his circuit is a setup phase that finishes with some assertions that need to be made. But that section alone takes most of the proving time, and is largely independent of the rest of the circuit. -He could find it more efficient to generate a proof for that setup phase separately, and verifying it in his actual business logic section of the circuit. This will allow for parallelization of both proofs, which results in a considerable speedup. +He might find it more efficient to generate a proof for that setup phase separately, and verify that proof recursively in the actual business logic section of his circuit. This will allow for parallelization of both proofs, which results in a considerable speedup. ## What params do I need diff --git a/docs/docs/explanations/standard_library/traits.md b/docs/docs/explanations/standard_library/traits.md deleted file mode 100644 index 63b4f3d6f0b..00000000000 --- a/docs/docs/explanations/standard_library/traits.md +++ /dev/null @@ -1,140 +0,0 @@ ---- -title: Traits -description: Noir's stdlib provides a few commonly used traits. -keywords: [traits, trait, interface, protocol, default, add, eq] ---- - -## `std::default` - -### `std::default::Default` - -```rust -trait Default { - fn default() -> Self; -} -``` - -Constructs a default value of a type. - -Implementations: -```rust -impl Default for Field { .. } - -impl Default for i8 { .. } -impl Default for i16 { .. } -impl Default for i32 { .. } -impl Default for i64 { .. } - -impl Default for u8 { .. } -impl Default for u16 { .. } -impl Default for u32 { .. } -impl Default for u64 { .. } - -impl Default for () { .. } -impl Default for bool { .. } - -impl Default for [T; N] - where T: Default { .. } - -impl Default for (A, B) - where A: Default, B: Default { .. } - -impl Default for (A, B, C) - where A: Default, B: Default, C: Default { .. } - -impl Default for (A, B, C, D) - where A: Default, B: Default, C: Default, D: Default { .. } - -impl Default for (A, B, C, D, E) - where A: Default, B: Default, C: Default, D: Default, E: Default { .. } -``` - -For primitive integer types, the return value of `default` is `0`. Container -types such as arrays are filled with default values of their element type. - -## `std::ops` - -### `std::ops::Eq` - -```rust -trait Eq { - fn eq(self, other: Self) -> bool; -} -``` -Returns `true` if `self` is equal to `other`. - -Implementations: -```rust -impl Eq for Field { .. } - -impl Eq for i8 { .. } -impl Eq for i16 { .. } -impl Eq for i32 { .. } -impl Eq for i64 { .. } - -impl Eq for u8 { .. } -impl Eq for u16 { .. } -impl Eq for u32 { .. } -impl Eq for u64 { .. } - -impl Eq for () { .. } -impl Eq for bool { .. } - -impl Eq for [T; N] - where T: Eq { .. } - -impl Eq for (A, B) - where A: Eq, B: Eq { .. } - -impl Eq for (A, B, C) - where A: Eq, B: Eq, C: Eq { .. } - -impl Eq for (A, B, C, D) - where A: Eq, B: Eq, C: Eq, D: Eq { .. } - -impl Eq for (A, B, C, D, E) - where A: Eq, B: Eq, C: Eq, D: Eq, E: Eq { .. } -``` - -### `std::ops::Add`, `std::ops::Sub`, `std::ops::Mul`, and `std::ops::Div` - -These traits abstract over addition, subtraction, multiplication, and division respectively. -Although Noir does not currently have operator overloading, in the future implementing these -traits for a given type will also allow that type to be used with the corresponding operator -for that trait (`+` for Add, etc) in addition to the normal method names. - -```rust -trait Add { - fn add(self, other: Self) -> Self; -} - -trait Sub { - fn sub(self, other: Self) -> Self; -} - -trait Mul { - fn mul(self, other: Self) -> Self; -} - -trait Div { - fn div(self, other: Self) -> Self; -} -``` - -The implementations block below is given for the `Add` trait, but the same types that implement -`Add` also implement `Sub`, `Mul`, and `Div`. - -Implementations: -```rust -impl Add for Field { .. } - -impl Add for i8 { .. } -impl Add for i16 { .. } -impl Add for i32 { .. } -impl Add for i64 { .. } - -impl Add for u8 { .. } -impl Add for u16 { .. } -impl Add for u32 { .. } -impl Add for u64 { .. } -``` diff --git a/docs/docs/getting_started/create_a_project.md b/docs/docs/getting_started/create_a_project.md index f10916c39c5..26ff265c389 100644 --- a/docs/docs/getting_started/create_a_project.md +++ b/docs/docs/getting_started/create_a_project.md @@ -69,7 +69,7 @@ x : Field, y : pub Field Program inputs in Noir are private by default (e.g. `x`), but can be labeled public using the keyword `pub` (e.g. `y`). To learn more about private and public values, check the -[Data Types](../noir/syntax/data_types/index.md) section. +[Data Types](../noir/concepts/data_types/index.md) section. The next line of the program specifies its body: @@ -79,7 +79,7 @@ assert(x != y); The Noir syntax `assert` can be interpreted as something similar to constraints in other zk-contract languages. -For more Noir syntax, check the [Language Concepts](../noir/syntax/comments.md) chapter. +For more Noir syntax, check the [Language Concepts](../noir/concepts/comments.md) chapter. ## Build In/Output Files diff --git a/docs/docs/getting_started/installation/index.md b/docs/docs/getting_started/installation/index.md index ddb8a250eb4..27eeeca88ed 100644 --- a/docs/docs/getting_started/installation/index.md +++ b/docs/docs/getting_started/installation/index.md @@ -1,7 +1,7 @@ --- title: Nargo Installation description: - nargo is a command line tool for interacting with Noir programs. This page is a quick guide on how to install Nargo though the most common and easy method, noirup + nargo is a command line tool for interacting with Noir programs. This page is a quick guide on how to install Nargo through the most common and easy method, noirup keywords: [ Nargo Noir @@ -41,5 +41,5 @@ noirup Done. That's it. You should have the latest version working. You can check with `nargo --version`. You can also install nightlies, specific versions -or branches, check out the [noirup repository](https://github.com/noir-lang/noirup) for more +or branches. Check out the [noirup repository](https://github.com/noir-lang/noirup) for more information. diff --git a/docs/docs/getting_started/tooling/testing.md b/docs/docs/getting_started/tooling/testing.md index 5febd44e96b..d3e0c522473 100644 --- a/docs/docs/getting_started/tooling/testing.md +++ b/docs/docs/getting_started/tooling/testing.md @@ -24,7 +24,7 @@ fn test_add() { } ``` -Running `nargo test` will test that the `test_add` function can be executed while satisfying the all +Running `nargo test` will test that the `test_add` function can be executed while satisfying all the constraints which allows you to test that add returns the expected values. Test functions can't have any arguments currently. diff --git a/docs/docs/how_to/how-to-recursion.md b/docs/docs/how_to/how-to-recursion.md index db9ad0e99f8..f60aa3ff2d9 100644 --- a/docs/docs/how_to/how-to-recursion.md +++ b/docs/docs/how_to/how-to-recursion.md @@ -31,9 +31,9 @@ It is also assumed that you're not using `noir_wasm` for compilation, and instea :::info -As you've read in the [explainer](../explainers/explainer-recursion.md), a recursive proof is an intermediate proof. Meaning it doesn't necessarily generate the final step that makes it verifiable in a smart contract. However, it is easy to verify within another circuit. +As you've read in the [explainer](../explainers/explainer-recursion.md), a recursive proof is an intermediate proof. This means that it doesn't necessarily generate the final step that makes it verifiable in a smart contract. However, it is easy to verify within another circuit. -While "standard" usage of NoirJS packages abstracts final proofs, it currently lacks the necessary interface to abstract away intermediate proofs. Which means these proofs need to be created by using the backend directly. +While "standard" usage of NoirJS packages abstracts final proofs, it currently lacks the necessary interface to abstract away intermediate proofs. This means that these proofs need to be created by using the backend directly. In short: @@ -82,11 +82,11 @@ const { proof, publicInputs } = await backend.generateIntermediateProof(witness) :::warning -Always keep in mind what is actually happening on your development process, otherwise you'll quickly become confused about what circuit are we actually running and why! +Always keep in mind what is actually happening on your development process, otherwise you'll quickly become confused about what circuit we are actually running and why! In this case, you can imagine that Alice (running the `main` circuit) is proving something to Bob (running the `recursive` circuit), and Bob is verifying her proof within his proof. -With this in mind, it becomes clear that our intermediate proof is the one *meant to be verified within another circuit*. So it is Alice's. Actually, the only final proof in this theoretical scenario would be the last one, sent on-chain. +With this in mind, it becomes clear that our intermediate proof is the one *meant to be verified within another circuit*, so it must be Alice's. Actually, the only final proof in this theoretical scenario would be the last one, sent on-chain. ::: diff --git a/docs/docs/index.md b/docs/docs/index.md index eaf8c59f935..ab8c2f8acd2 100644 --- a/docs/docs/index.md +++ b/docs/docs/index.md @@ -24,12 +24,13 @@ sidebar_position: 0 Noir, a domain-specific language crafted for SNARK proving systems, stands out with its simplicity, flexibility, and robust capabilities. Unlike conventional approaches that compile directly to a fixed NP-complete language, -Noir takes a two-pronged path. It first compiles to an adaptable intermediate language known as ACIR. From there, -depending on the project's needs, ACIR can be further compiled into an arithmetic circuit for integration with Aztec's -barretenberg backend or transformed into a rank-1 constraint system suitable for R1CS backends like Arkworks' Marlin -backend, among others. +Noir takes a two-pronged path. First, Noir compiles to an adaptable intermediate language known as ACIR. -This innovative design introduces unique challenges, yet it strategically separates the programming language from the +From there, depending on a given project's needs, ACIR can be further compiled into an arithmetic circuit for integration with Aztec's +barretenberg backend, or transformed into a rank-1 constraint system suitable for R1CS backends like Arkworks' Marlin +backend (among others). + +This innovative design introduces unique challenges; however, this approach also strategically separates the programming language from the backend. Noir's approach echoes the modular philosophy of LLVM, offering developers a versatile toolkit for cryptographic programming. @@ -61,7 +62,7 @@ within your projects. ## Libraries -Noir does not currently have an official package manager. You can find a list of available Noir libraries in the +Noir does not currently have an official package manager. You can find a list of some of the available Noir libraries in the [awesome-noir repo here](https://github.com/noir-lang/awesome-noir#libraries). Some libraries that are available today include: diff --git a/docs/docs/migration_notes.md b/docs/docs/migration_notes.md index d5d0682cf0c..9f27230a1a0 100644 --- a/docs/docs/migration_notes.md +++ b/docs/docs/migration_notes.md @@ -16,7 +16,7 @@ To update, please make sure this field in `Nargo.toml` matches the output of `na ## ≥0.14 -The index of the [for loops](noir/syntax/control_flow.md#loops) is now of type `u64` instead of `Field`. An example refactor would be: +The index of the [for loops](noir/concepts/control_flow.md#loops) is now of type `u64` instead of `Field`. An example refactor would be: ```rust for i in 0..10 { @@ -50,7 +50,7 @@ If you are using the default barretenberg backend, simply run: nargo prove ``` -with you Noir program. +with your Noir program. This will trigger the download and installation of the latest version of barretenberg compatible with your Nargo in use. @@ -78,7 +78,7 @@ nargo backend install acvm-backend-barretenberg https://github.com/noir-lang/bar This downloads and installs a specific bb.js based version of barretenberg binary from GitHub. -The gzipped filed is running [this bash script](https://github.com/noir-lang/barretenberg-js-binary/blob/master/run-bb-js.sh), where we need to gzip it as the Nargo currently expect the backend to be zipped up. +The gzipped file is running [this bash script](https://github.com/noir-lang/barretenberg-js-binary/blob/master/run-bb-js.sh), where we need to gzip it as the Nargo currently expect the backend to be zipped up. Then run: diff --git a/docs/docs/noir/syntax/_category_.json b/docs/docs/noir/concepts/_category_.json similarity index 72% rename from docs/docs/noir/syntax/_category_.json rename to docs/docs/noir/concepts/_category_.json index 666b691ae91..7da08f8a8c5 100644 --- a/docs/docs/noir/syntax/_category_.json +++ b/docs/docs/noir/concepts/_category_.json @@ -1,6 +1,6 @@ { - "label": "Syntax", + "label": "Concepts", "position": 0, "collapsible": true, "collapsed": true -} +} \ No newline at end of file diff --git a/docs/docs/noir/syntax/assert.md b/docs/docs/noir/concepts/assert.md similarity index 100% rename from docs/docs/noir/syntax/assert.md rename to docs/docs/noir/concepts/assert.md diff --git a/docs/docs/noir/syntax/comments.md b/docs/docs/noir/concepts/comments.md similarity index 100% rename from docs/docs/noir/syntax/comments.md rename to docs/docs/noir/concepts/comments.md diff --git a/docs/docs/noir/syntax/control_flow.md b/docs/docs/noir/concepts/control_flow.md similarity index 100% rename from docs/docs/noir/syntax/control_flow.md rename to docs/docs/noir/concepts/control_flow.md diff --git a/docs/docs/noir/syntax/data_bus.md b/docs/docs/noir/concepts/data_bus.md similarity index 100% rename from docs/docs/noir/syntax/data_bus.md rename to docs/docs/noir/concepts/data_bus.md diff --git a/docs/docs/noir/syntax/data_types/_category_.json b/docs/docs/noir/concepts/data_types/_category_.json similarity index 100% rename from docs/docs/noir/syntax/data_types/_category_.json rename to docs/docs/noir/concepts/data_types/_category_.json diff --git a/docs/docs/noir/syntax/data_types/arrays.md b/docs/docs/noir/concepts/data_types/arrays.md similarity index 100% rename from docs/docs/noir/syntax/data_types/arrays.md rename to docs/docs/noir/concepts/data_types/arrays.md diff --git a/docs/docs/noir/syntax/data_types/booleans.md b/docs/docs/noir/concepts/data_types/booleans.md similarity index 100% rename from docs/docs/noir/syntax/data_types/booleans.md rename to docs/docs/noir/concepts/data_types/booleans.md diff --git a/docs/docs/noir/syntax/data_types/fields.md b/docs/docs/noir/concepts/data_types/fields.md similarity index 100% rename from docs/docs/noir/syntax/data_types/fields.md rename to docs/docs/noir/concepts/data_types/fields.md diff --git a/docs/docs/noir/syntax/data_types/function_types.md b/docs/docs/noir/concepts/data_types/function_types.md similarity index 88% rename from docs/docs/noir/syntax/data_types/function_types.md rename to docs/docs/noir/concepts/data_types/function_types.md index 61e4076adaf..e224e860d59 100644 --- a/docs/docs/noir/syntax/data_types/function_types.md +++ b/docs/docs/noir/concepts/data_types/function_types.md @@ -23,4 +23,4 @@ fn main() { ``` A function type also has an optional capture environment - this is necessary to support closures. -See [Lambdas](@site/docs/noir/syntax/lambdas.md) for more details. +See [Lambdas](@site/docs/noir/concepts/lambdas.md) for more details. diff --git a/docs/docs/noir/syntax/data_types/index.md b/docs/docs/noir/concepts/data_types/index.md similarity index 97% rename from docs/docs/noir/syntax/data_types/index.md rename to docs/docs/noir/concepts/data_types/index.md index 01cd0431a68..3c9cd4c2437 100644 --- a/docs/docs/noir/syntax/data_types/index.md +++ b/docs/docs/noir/concepts/data_types/index.md @@ -79,7 +79,7 @@ fn main() { } ``` -Type aliases can also be used with [generics](@site/docs/noir/syntax/generics.md): +Type aliases can also be used with [generics](@site/docs/noir/concepts/generics.md): ```rust type Id = Size; diff --git a/docs/docs/noir/syntax/data_types/integers.md b/docs/docs/noir/concepts/data_types/integers.md similarity index 100% rename from docs/docs/noir/syntax/data_types/integers.md rename to docs/docs/noir/concepts/data_types/integers.md diff --git a/docs/docs/noir/syntax/data_types/references.md b/docs/docs/noir/concepts/data_types/references.md similarity index 100% rename from docs/docs/noir/syntax/data_types/references.md rename to docs/docs/noir/concepts/data_types/references.md diff --git a/docs/docs/noir/syntax/data_types/slices.mdx b/docs/docs/noir/concepts/data_types/slices.mdx similarity index 100% rename from docs/docs/noir/syntax/data_types/slices.mdx rename to docs/docs/noir/concepts/data_types/slices.mdx diff --git a/docs/docs/noir/syntax/data_types/strings.md b/docs/docs/noir/concepts/data_types/strings.md similarity index 100% rename from docs/docs/noir/syntax/data_types/strings.md rename to docs/docs/noir/concepts/data_types/strings.md diff --git a/docs/docs/noir/syntax/data_types/structs.md b/docs/docs/noir/concepts/data_types/structs.md similarity index 100% rename from docs/docs/noir/syntax/data_types/structs.md rename to docs/docs/noir/concepts/data_types/structs.md diff --git a/docs/docs/noir/syntax/data_types/tuples.md b/docs/docs/noir/concepts/data_types/tuples.md similarity index 100% rename from docs/docs/noir/syntax/data_types/tuples.md rename to docs/docs/noir/concepts/data_types/tuples.md diff --git a/docs/docs/noir/syntax/data_types/vectors.mdx b/docs/docs/noir/concepts/data_types/vectors.mdx similarity index 100% rename from docs/docs/noir/syntax/data_types/vectors.mdx rename to docs/docs/noir/concepts/data_types/vectors.mdx diff --git a/docs/docs/noir/syntax/distinct.md b/docs/docs/noir/concepts/distinct.md similarity index 100% rename from docs/docs/noir/syntax/distinct.md rename to docs/docs/noir/concepts/distinct.md diff --git a/docs/docs/noir/syntax/functions.md b/docs/docs/noir/concepts/functions.md similarity index 100% rename from docs/docs/noir/syntax/functions.md rename to docs/docs/noir/concepts/functions.md diff --git a/docs/docs/noir/syntax/generics.md b/docs/docs/noir/concepts/generics.md similarity index 100% rename from docs/docs/noir/syntax/generics.md rename to docs/docs/noir/concepts/generics.md diff --git a/docs/docs/noir/syntax/lambdas.md b/docs/docs/noir/concepts/lambdas.md similarity index 100% rename from docs/docs/noir/syntax/lambdas.md rename to docs/docs/noir/concepts/lambdas.md diff --git a/docs/docs/noir/syntax/mutability.md b/docs/docs/noir/concepts/mutability.md similarity index 100% rename from docs/docs/noir/syntax/mutability.md rename to docs/docs/noir/concepts/mutability.md diff --git a/docs/docs/noir/syntax/ops.md b/docs/docs/noir/concepts/ops.md similarity index 100% rename from docs/docs/noir/syntax/ops.md rename to docs/docs/noir/concepts/ops.md diff --git a/docs/docs/noir/syntax/shadowing.md b/docs/docs/noir/concepts/shadowing.md similarity index 100% rename from docs/docs/noir/syntax/shadowing.md rename to docs/docs/noir/concepts/shadowing.md diff --git a/docs/docs/explanations/noir/traits.md b/docs/docs/noir/concepts/traits.md similarity index 100% rename from docs/docs/explanations/noir/traits.md rename to docs/docs/noir/concepts/traits.md diff --git a/docs/docs/noir/syntax/unconstrained.md b/docs/docs/noir/concepts/unconstrained.md similarity index 100% rename from docs/docs/noir/syntax/unconstrained.md rename to docs/docs/noir/concepts/unconstrained.md diff --git a/docs/docs/noir/standard_library/logging.md b/docs/docs/noir/standard_library/logging.md index 16daf922e15..2e163b52ab3 100644 --- a/docs/docs/noir/standard_library/logging.md +++ b/docs/docs/noir/standard_library/logging.md @@ -22,7 +22,7 @@ The standard library provides two familiar statements you can use: `println` and You can print the output of both statements in your Noir code by using the `nargo execute` command or the `--show-output` flag when using `nargo test` (provided there are print statements in your tests). -It is recommended to use `nargo execute` if you want to debug failing constrains with `println` or `print` statements. This is due to every input in a test being a constant rather than a witness, so we issue an error during compilation while we only print during execution (which comes after compilation). Neither `println`, nor `print` are callable for failed constraints caught at compile time. +It is recommended to use `nargo execute` if you want to debug failing constraints with `println` or `print` statements. This is due to every input in a test being a constant rather than a witness, so we issue an error during compilation while we only print during execution (which comes after compilation). Neither `println`, nor `print` are callable for failed constraints caught at compile time. Both `print` and `println` are generic functions which can work on integers, fields, strings, and even structs or expressions. Note however, that slices are currently unsupported. For example: diff --git a/docs/docs/noir/standard_library/traits.md b/docs/docs/noir/standard_library/traits.md new file mode 100644 index 00000000000..50b0e6816ab --- /dev/null +++ b/docs/docs/noir/standard_library/traits.md @@ -0,0 +1,284 @@ +--- +title: Traits +description: Noir's stdlib provides a few commonly used traits. +keywords: [traits, trait, interface, protocol, default, add, eq] +--- + +## `std::default` + +### `std::default::Default` + +```rust +trait Default { + fn default() -> Self; +} +``` + +Constructs a default value of a type. + +Implementations: +```rust +impl Default for Field { .. } + +impl Default for i8 { .. } +impl Default for i16 { .. } +impl Default for i32 { .. } +impl Default for i64 { .. } + +impl Default for u8 { .. } +impl Default for u16 { .. } +impl Default for u32 { .. } +impl Default for u64 { .. } + +impl Default for () { .. } +impl Default for bool { .. } + +impl Default for [T; N] + where T: Default { .. } + +impl Default for (A, B) + where A: Default, B: Default { .. } + +impl Default for (A, B, C) + where A: Default, B: Default, C: Default { .. } + +impl Default for (A, B, C, D) + where A: Default, B: Default, C: Default, D: Default { .. } + +impl Default for (A, B, C, D, E) + where A: Default, B: Default, C: Default, D: Default, E: Default { .. } +``` + +For primitive integer types, the return value of `default` is `0`. Container +types such as arrays are filled with default values of their element type. + +## `std::cmp` + +### `std::cmp::Eq` + +```rust +trait Eq { + fn eq(self, other: Self) -> bool; +} +``` +Returns `true` if `self` is equal to `other`. Implementing this trait on a type +allows the type to be used with `==` and `!=`. + +Implementations: +```rust +impl Eq for Field { .. } + +impl Eq for i8 { .. } +impl Eq for i16 { .. } +impl Eq for i32 { .. } +impl Eq for i64 { .. } + +impl Eq for u8 { .. } +impl Eq for u16 { .. } +impl Eq for u32 { .. } +impl Eq for u64 { .. } + +impl Eq for () { .. } +impl Eq for bool { .. } + +impl Eq for [T; N] + where T: Eq { .. } + +impl Eq for (A, B) + where A: Eq, B: Eq { .. } + +impl Eq for (A, B, C) + where A: Eq, B: Eq, C: Eq { .. } + +impl Eq for (A, B, C, D) + where A: Eq, B: Eq, C: Eq, D: Eq { .. } + +impl Eq for (A, B, C, D, E) + where A: Eq, B: Eq, C: Eq, D: Eq, E: Eq { .. } +``` + +### `std::cmp::Cmp` + +```rust +trait Cmp { + fn cmp(self, other: Self) -> Ordering; +} +``` + +`a.cmp(b)` compares two values returning `Ordering::less()` if `a < b`, +`Ordering::equal()` if `a == b`, or `Ordering::greater()` if `a > b`. +Implementing this trait on a type allows `<`, `<=`, `>`, and `>=` to be +used on values of the type. + +Implementations: + +```rust +impl Ord for u8 { .. } +impl Ord for u16 { .. } +impl Ord for u32 { .. } +impl Ord for u64 { .. } + +impl Ord for i8 { .. } +impl Ord for i16 { .. } +impl Ord for i32 { .. } + +impl Ord for i64 { .. } + +impl Ord for () { .. } +impl Ord for bool { .. } + +impl Ord for [T; N] + where T: Ord { .. } + +impl Ord for (A, B) + where A: Ord, B: Ord { .. } + +impl Ord for (A, B, C) + where A: Ord, B: Ord, C: Ord { .. } + +impl Ord for (A, B, C, D) + where A: Ord, B: Ord, C: Ord, D: Ord { .. } + +impl Ord for (A, B, C, D, E) + where A: Ord, B: Ord, C: Ord, D: Ord, E: Ord { .. } +``` + +## `std::ops` + +### `std::ops::Add`, `std::ops::Sub`, `std::ops::Mul`, and `std::ops::Div` + +These traits abstract over addition, subtraction, multiplication, and division respectively. +Implementing these traits for a given type will also allow that type to be used with the corresponding operator +for that trait (`+` for Add, etc) in addition to the normal method names. + +```rust +trait Add { + fn add(self, other: Self) -> Self; +} + +trait Sub { + fn sub(self, other: Self) -> Self; +} + +trait Mul { + fn mul(self, other: Self) -> Self; +} + +trait Div { + fn div(self, other: Self) -> Self; +} +``` + +The implementations block below is given for the `Add` trait, but the same types that implement +`Add` also implement `Sub`, `Mul`, and `Div`. + +Implementations: +```rust +impl Add for Field { .. } + +impl Add for i8 { .. } +impl Add for i16 { .. } +impl Add for i32 { .. } +impl Add for i64 { .. } + +impl Add for u8 { .. } +impl Add for u16 { .. } +impl Add for u32 { .. } +impl Add for u64 { .. } +``` + +### `std::ops::Rem` + +```rust +trait Rem { + fn rem(self, other: Self) -> Self; +} +``` + +`Rem::rem(a, b)` is the remainder function returning the result of what is +left after dividing `a` and `b`. Implementing `Rem` allows the `%` operator +to be used with the implementation type. + +Unlike other numeric traits, `Rem` is not implemented for `Field`. + +Implementations: +```rust +impl Rem for u8 { fn rem(self, other: u8) -> u8 { self % other } } +impl Rem for u16 { fn rem(self, other: u16) -> u16 { self % other } } +impl Rem for u32 { fn rem(self, other: u32) -> u32 { self % other } } +impl Rem for u64 { fn rem(self, other: u64) -> u64 { self % other } } + +impl Rem for i8 { fn rem(self, other: i8) -> i8 { self % other } } +impl Rem for i16 { fn rem(self, other: i16) -> i16 { self % other } } +impl Rem for i32 { fn rem(self, other: i32) -> i32 { self % other } } +impl Rem for i64 { fn rem(self, other: i64) -> i64 { self % other } } +``` + +### `std::ops::{ BitOr, BitAnd, BitXor }` + +```rust +trait BitOr { + fn bitor(self, other: Self) -> Self; +} + +trait BitAnd { + fn bitand(self, other: Self) -> Self; +} + +trait BitXor { + fn bitxor(self, other: Self) -> Self; +} +``` + +Traits for the bitwise operations `|`, `&`, and `^`. + +Implementing `BitOr`, `BitAnd` or `BitXor` for a type allows the `|`, `&`, or `^` operator respectively +to be used with the type. + +The implementations block below is given for the `BitOr` trait, but the same types that implement +`BitOr` also implement `BitAnd` and `BitXor`. + +Implementations: +```rust +impl BitOr for bool { fn bitor(self, other: bool) -> bool { self | other } } + +impl BitOr for u8 { fn bitor(self, other: u8) -> u8 { self | other } } +impl BitOr for u16 { fn bitor(self, other: u16) -> u16 { self | other } } +impl BitOr for u32 { fn bitor(self, other: u32) -> u32 { self | other } } +impl BitOr for u64 { fn bitor(self, other: u64) -> u64 { self | other } } + +impl BitOr for i8 { fn bitor(self, other: i8) -> i8 { self | other } } +impl BitOr for i16 { fn bitor(self, other: i16) -> i16 { self | other } } +impl BitOr for i32 { fn bitor(self, other: i32) -> i32 { self | other } } +impl BitOr for i64 { fn bitor(self, other: i64) -> i64 { self | other } } +``` + +### `std::ops::{ Shl, Shr }` + +```rust +trait Shl { + fn shl(self, other: Self) -> Self; +} + +trait Shr { + fn shr(self, other: Self) -> Self; +} +``` + +Traits for a bit shift left and bit shift right. + +Implementing `Shl` for a type allows the left shift operator (`<<`) to be used with the implementation type. +Similarly, implementing `Shr` allows the right shift operator (`>>`) to be used with the type. + +Note that bit shifting is not currently implemented for signed types. + +The implementations block below is given for the `Shl` trait, but the same types that implement +`Shl` also implement `Shr`. + +Implementations: +```rust +impl Shl for u8 { fn shl(self, other: u8) -> u8 { self << other } } +impl Shl for u16 { fn shl(self, other: u16) -> u16 { self << other } } +impl Shl for u32 { fn shl(self, other: u32) -> u32 { self << other } } +impl Shl for u64 { fn shl(self, other: u64) -> u64 { self << other } } +``` \ No newline at end of file diff --git a/docs/versioned_docs/version-v0.17.0/index.md b/docs/versioned_docs/version-v0.17.0/index.md index 4eeeb6f1346..2d5e6f4454f 100644 --- a/docs/versioned_docs/version-v0.17.0/index.md +++ b/docs/versioned_docs/version-v0.17.0/index.md @@ -25,7 +25,7 @@ slug: / Noir is a Domain Specific Language for SNARK proving systems. It has been designed to use any ACIR compatible proving system. -It's design choices are influenced heavily by Rust and focuses on a simple, familiar syntax. +Its design choices are influenced heavily by Rust and focuses on a simple, familiar syntax. ## Who is Noir for? diff --git a/docs/versioned_docs/version-v0.17.0/migration_notes.md b/docs/versioned_docs/version-v0.17.0/migration_notes.md index 69782cba388..1a81af04b3a 100644 --- a/docs/versioned_docs/version-v0.17.0/migration_notes.md +++ b/docs/versioned_docs/version-v0.17.0/migration_notes.md @@ -42,7 +42,7 @@ If you are using the default barretenberg backend, simply run: nargo prove ``` -with you Noir program. +with your Noir program. This will trigger the download and installation of the latest version of barretenberg compatible with your Nargo in use. @@ -70,7 +70,7 @@ nargo backend install acvm-backend-barretenberg https://github.com/noir-lang/bar This downloads and installs a specific bb.js based version of barretenberg binary from GitHub. -The gzipped filed is running [this bash script](https://github.com/noir-lang/barretenberg-js-binary/blob/master/run-bb-js.sh), where we need to gzip it as the Nargo currently expect the backend to be zipped up. +The gzipped file is running [this bash script](https://github.com/noir-lang/barretenberg-js-binary/blob/master/run-bb-js.sh), where we need to gzip it as the Nargo currently expect the backend to be zipped up. Then run: diff --git a/docs/versioned_docs/version-v0.17.0/nargo/02_testing.md b/docs/versioned_docs/version-v0.17.0/nargo/02_testing.md index 917c9415126..5c57ef92705 100644 --- a/docs/versioned_docs/version-v0.17.0/nargo/02_testing.md +++ b/docs/versioned_docs/version-v0.17.0/nargo/02_testing.md @@ -23,7 +23,7 @@ fn test_add() { } ``` -Running `nargo test` will test that the `test_add` function can be executed while satisfying the all +Running `nargo test` will test that the `test_add` function can be executed while satisfying all the constraints which allows you to test that add returns the expected values. Test functions can't have any arguments currently. diff --git a/docs/versioned_docs/version-v0.17.0/standard_library/logging.md b/docs/versioned_docs/version-v0.17.0/standard_library/logging.md index 7e2fd9b9aff..4ba0fe0e656 100644 --- a/docs/versioned_docs/version-v0.17.0/standard_library/logging.md +++ b/docs/versioned_docs/version-v0.17.0/standard_library/logging.md @@ -22,7 +22,7 @@ implementation of rust's `println!` macro, this construct can be useful for debu You can print the output of println statements in your Noir code by using the `nargo execute` command or the `--show-output` flag when using `nargo test` (provided there are println statements in your tests). -It is recommended to use `nargo execute` if you want to debug failing constrains with `println` statements. This is due to every input in a test being a constant rather than a witness, so we issue an error during compilation while we only print during execution (which comes after compilation). `println` will not work for failed constraints caught at compile time. +It is recommended to use `nargo execute` if you want to debug failing constraints with `println` statements. This is due to every input in a test being a constant rather than a witness, so we issue an error during compilation while we only print during execution (which comes after compilation). `println` will not work for failed constraints caught at compile time. The `println` statement is unconstrained, so it works for outputting integers, fields, strings, and even structs or expressions. For example: diff --git a/docs/versioned_docs/version-v0.19.0/index.md b/docs/versioned_docs/version-v0.19.0/index.md index 93944f92bf5..380368db036 100644 --- a/docs/versioned_docs/version-v0.19.0/index.md +++ b/docs/versioned_docs/version-v0.19.0/index.md @@ -25,7 +25,7 @@ slug: / Noir is a Domain Specific Language for SNARK proving systems. It has been designed to use any ACIR compatible proving system. -It's design choices are influenced heavily by Rust and focuses on a simple, familiar syntax. +Its design choices are influenced heavily by Rust and focuses on a simple, familiar syntax. ## Who is Noir for? diff --git a/docs/versioned_docs/version-v0.19.0/migration_notes.md b/docs/versioned_docs/version-v0.19.0/migration_notes.md index 0d7e0af0efd..905bca3d30c 100644 --- a/docs/versioned_docs/version-v0.19.0/migration_notes.md +++ b/docs/versioned_docs/version-v0.19.0/migration_notes.md @@ -50,7 +50,7 @@ If you are using the default barretenberg backend, simply run: nargo prove ``` -with you Noir program. +with your Noir program. This will trigger the download and installation of the latest version of barretenberg compatible with your Nargo in use. @@ -78,7 +78,7 @@ nargo backend install acvm-backend-barretenberg https://github.com/noir-lang/bar This downloads and installs a specific bb.js based version of barretenberg binary from GitHub. -The gzipped filed is running [this bash script](https://github.com/noir-lang/barretenberg-js-binary/blob/master/run-bb-js.sh), where we need to gzip it as the Nargo currently expect the backend to be zipped up. +The gzipped file is running [this bash script](https://github.com/noir-lang/barretenberg-js-binary/blob/master/run-bb-js.sh), where we need to gzip it as the Nargo currently expect the backend to be zipped up. Then run: diff --git a/docs/versioned_docs/version-v0.19.0/nargo/02_testing.md b/docs/versioned_docs/version-v0.19.0/nargo/02_testing.md index 917c9415126..5c57ef92705 100644 --- a/docs/versioned_docs/version-v0.19.0/nargo/02_testing.md +++ b/docs/versioned_docs/version-v0.19.0/nargo/02_testing.md @@ -23,7 +23,7 @@ fn test_add() { } ``` -Running `nargo test` will test that the `test_add` function can be executed while satisfying the all +Running `nargo test` will test that the `test_add` function can be executed while satisfying all the constraints which allows you to test that add returns the expected values. Test functions can't have any arguments currently. diff --git a/docs/versioned_docs/version-v0.19.0/standard_library/logging.md b/docs/versioned_docs/version-v0.19.0/standard_library/logging.md index 7e2fd9b9aff..4ba0fe0e656 100644 --- a/docs/versioned_docs/version-v0.19.0/standard_library/logging.md +++ b/docs/versioned_docs/version-v0.19.0/standard_library/logging.md @@ -22,7 +22,7 @@ implementation of rust's `println!` macro, this construct can be useful for debu You can print the output of println statements in your Noir code by using the `nargo execute` command or the `--show-output` flag when using `nargo test` (provided there are println statements in your tests). -It is recommended to use `nargo execute` if you want to debug failing constrains with `println` statements. This is due to every input in a test being a constant rather than a witness, so we issue an error during compilation while we only print during execution (which comes after compilation). `println` will not work for failed constraints caught at compile time. +It is recommended to use `nargo execute` if you want to debug failing constraints with `println` statements. This is due to every input in a test being a constant rather than a witness, so we issue an error during compilation while we only print during execution (which comes after compilation). `println` will not work for failed constraints caught at compile time. The `println` statement is unconstrained, so it works for outputting integers, fields, strings, and even structs or expressions. For example: diff --git a/docs/versioned_docs/version-v0.19.1/index.md b/docs/versioned_docs/version-v0.19.1/index.md index 93944f92bf5..380368db036 100644 --- a/docs/versioned_docs/version-v0.19.1/index.md +++ b/docs/versioned_docs/version-v0.19.1/index.md @@ -25,7 +25,7 @@ slug: / Noir is a Domain Specific Language for SNARK proving systems. It has been designed to use any ACIR compatible proving system. -It's design choices are influenced heavily by Rust and focuses on a simple, familiar syntax. +Its design choices are influenced heavily by Rust and focuses on a simple, familiar syntax. ## Who is Noir for? diff --git a/docs/versioned_docs/version-v0.19.1/migration_notes.md b/docs/versioned_docs/version-v0.19.1/migration_notes.md index 0d7e0af0efd..905bca3d30c 100644 --- a/docs/versioned_docs/version-v0.19.1/migration_notes.md +++ b/docs/versioned_docs/version-v0.19.1/migration_notes.md @@ -50,7 +50,7 @@ If you are using the default barretenberg backend, simply run: nargo prove ``` -with you Noir program. +with your Noir program. This will trigger the download and installation of the latest version of barretenberg compatible with your Nargo in use. @@ -78,7 +78,7 @@ nargo backend install acvm-backend-barretenberg https://github.com/noir-lang/bar This downloads and installs a specific bb.js based version of barretenberg binary from GitHub. -The gzipped filed is running [this bash script](https://github.com/noir-lang/barretenberg-js-binary/blob/master/run-bb-js.sh), where we need to gzip it as the Nargo currently expect the backend to be zipped up. +The gzipped file is running [this bash script](https://github.com/noir-lang/barretenberg-js-binary/blob/master/run-bb-js.sh), where we need to gzip it as the Nargo currently expect the backend to be zipped up. Then run: diff --git a/docs/versioned_docs/version-v0.19.1/nargo/02_testing.md b/docs/versioned_docs/version-v0.19.1/nargo/02_testing.md index 917c9415126..5c57ef92705 100644 --- a/docs/versioned_docs/version-v0.19.1/nargo/02_testing.md +++ b/docs/versioned_docs/version-v0.19.1/nargo/02_testing.md @@ -23,7 +23,7 @@ fn test_add() { } ``` -Running `nargo test` will test that the `test_add` function can be executed while satisfying the all +Running `nargo test` will test that the `test_add` function can be executed while satisfying all the constraints which allows you to test that add returns the expected values. Test functions can't have any arguments currently. diff --git a/docs/versioned_docs/version-v0.19.1/standard_library/logging.md b/docs/versioned_docs/version-v0.19.1/standard_library/logging.md index 7e2fd9b9aff..4ba0fe0e656 100644 --- a/docs/versioned_docs/version-v0.19.1/standard_library/logging.md +++ b/docs/versioned_docs/version-v0.19.1/standard_library/logging.md @@ -22,7 +22,7 @@ implementation of rust's `println!` macro, this construct can be useful for debu You can print the output of println statements in your Noir code by using the `nargo execute` command or the `--show-output` flag when using `nargo test` (provided there are println statements in your tests). -It is recommended to use `nargo execute` if you want to debug failing constrains with `println` statements. This is due to every input in a test being a constant rather than a witness, so we issue an error during compilation while we only print during execution (which comes after compilation). `println` will not work for failed constraints caught at compile time. +It is recommended to use `nargo execute` if you want to debug failing constraints with `println` statements. This is due to every input in a test being a constant rather than a witness, so we issue an error during compilation while we only print during execution (which comes after compilation). `println` will not work for failed constraints caught at compile time. The `println` statement is unconstrained, so it works for outputting integers, fields, strings, and even structs or expressions. For example: diff --git a/docs/versioned_docs/version-v0.19.2/index.md b/docs/versioned_docs/version-v0.19.2/index.md index 93944f92bf5..380368db036 100644 --- a/docs/versioned_docs/version-v0.19.2/index.md +++ b/docs/versioned_docs/version-v0.19.2/index.md @@ -25,7 +25,7 @@ slug: / Noir is a Domain Specific Language for SNARK proving systems. It has been designed to use any ACIR compatible proving system. -It's design choices are influenced heavily by Rust and focuses on a simple, familiar syntax. +Its design choices are influenced heavily by Rust and focuses on a simple, familiar syntax. ## Who is Noir for? diff --git a/docs/versioned_docs/version-v0.19.2/migration_notes.md b/docs/versioned_docs/version-v0.19.2/migration_notes.md index 0d7e0af0efd..905bca3d30c 100644 --- a/docs/versioned_docs/version-v0.19.2/migration_notes.md +++ b/docs/versioned_docs/version-v0.19.2/migration_notes.md @@ -50,7 +50,7 @@ If you are using the default barretenberg backend, simply run: nargo prove ``` -with you Noir program. +with your Noir program. This will trigger the download and installation of the latest version of barretenberg compatible with your Nargo in use. @@ -78,7 +78,7 @@ nargo backend install acvm-backend-barretenberg https://github.com/noir-lang/bar This downloads and installs a specific bb.js based version of barretenberg binary from GitHub. -The gzipped filed is running [this bash script](https://github.com/noir-lang/barretenberg-js-binary/blob/master/run-bb-js.sh), where we need to gzip it as the Nargo currently expect the backend to be zipped up. +The gzipped file is running [this bash script](https://github.com/noir-lang/barretenberg-js-binary/blob/master/run-bb-js.sh), where we need to gzip it as the Nargo currently expect the backend to be zipped up. Then run: diff --git a/docs/versioned_docs/version-v0.19.2/nargo/02_testing.md b/docs/versioned_docs/version-v0.19.2/nargo/02_testing.md index 917c9415126..5c57ef92705 100644 --- a/docs/versioned_docs/version-v0.19.2/nargo/02_testing.md +++ b/docs/versioned_docs/version-v0.19.2/nargo/02_testing.md @@ -23,7 +23,7 @@ fn test_add() { } ``` -Running `nargo test` will test that the `test_add` function can be executed while satisfying the all +Running `nargo test` will test that the `test_add` function can be executed while satisfying all the constraints which allows you to test that add returns the expected values. Test functions can't have any arguments currently. diff --git a/docs/versioned_docs/version-v0.19.2/standard_library/logging.md b/docs/versioned_docs/version-v0.19.2/standard_library/logging.md index 7e2fd9b9aff..4ba0fe0e656 100644 --- a/docs/versioned_docs/version-v0.19.2/standard_library/logging.md +++ b/docs/versioned_docs/version-v0.19.2/standard_library/logging.md @@ -22,7 +22,7 @@ implementation of rust's `println!` macro, this construct can be useful for debu You can print the output of println statements in your Noir code by using the `nargo execute` command or the `--show-output` flag when using `nargo test` (provided there are println statements in your tests). -It is recommended to use `nargo execute` if you want to debug failing constrains with `println` statements. This is due to every input in a test being a constant rather than a witness, so we issue an error during compilation while we only print during execution (which comes after compilation). `println` will not work for failed constraints caught at compile time. +It is recommended to use `nargo execute` if you want to debug failing constraints with `println` statements. This is due to every input in a test being a constant rather than a witness, so we issue an error during compilation while we only print during execution (which comes after compilation). `println` will not work for failed constraints caught at compile time. The `println` statement is unconstrained, so it works for outputting integers, fields, strings, and even structs or expressions. For example: diff --git a/docs/versioned_docs/version-v0.19.3/index.md b/docs/versioned_docs/version-v0.19.3/index.md index 93944f92bf5..380368db036 100644 --- a/docs/versioned_docs/version-v0.19.3/index.md +++ b/docs/versioned_docs/version-v0.19.3/index.md @@ -25,7 +25,7 @@ slug: / Noir is a Domain Specific Language for SNARK proving systems. It has been designed to use any ACIR compatible proving system. -It's design choices are influenced heavily by Rust and focuses on a simple, familiar syntax. +Its design choices are influenced heavily by Rust and focuses on a simple, familiar syntax. ## Who is Noir for? diff --git a/docs/versioned_docs/version-v0.19.3/migration_notes.md b/docs/versioned_docs/version-v0.19.3/migration_notes.md index 0d7e0af0efd..905bca3d30c 100644 --- a/docs/versioned_docs/version-v0.19.3/migration_notes.md +++ b/docs/versioned_docs/version-v0.19.3/migration_notes.md @@ -50,7 +50,7 @@ If you are using the default barretenberg backend, simply run: nargo prove ``` -with you Noir program. +with your Noir program. This will trigger the download and installation of the latest version of barretenberg compatible with your Nargo in use. @@ -78,7 +78,7 @@ nargo backend install acvm-backend-barretenberg https://github.com/noir-lang/bar This downloads and installs a specific bb.js based version of barretenberg binary from GitHub. -The gzipped filed is running [this bash script](https://github.com/noir-lang/barretenberg-js-binary/blob/master/run-bb-js.sh), where we need to gzip it as the Nargo currently expect the backend to be zipped up. +The gzipped file is running [this bash script](https://github.com/noir-lang/barretenberg-js-binary/blob/master/run-bb-js.sh), where we need to gzip it as the Nargo currently expect the backend to be zipped up. Then run: diff --git a/docs/versioned_docs/version-v0.19.3/nargo/02_testing.md b/docs/versioned_docs/version-v0.19.3/nargo/02_testing.md index 917c9415126..5c57ef92705 100644 --- a/docs/versioned_docs/version-v0.19.3/nargo/02_testing.md +++ b/docs/versioned_docs/version-v0.19.3/nargo/02_testing.md @@ -23,7 +23,7 @@ fn test_add() { } ``` -Running `nargo test` will test that the `test_add` function can be executed while satisfying the all +Running `nargo test` will test that the `test_add` function can be executed while satisfying all the constraints which allows you to test that add returns the expected values. Test functions can't have any arguments currently. diff --git a/docs/versioned_docs/version-v0.19.3/standard_library/logging.md b/docs/versioned_docs/version-v0.19.3/standard_library/logging.md index 7e2fd9b9aff..4ba0fe0e656 100644 --- a/docs/versioned_docs/version-v0.19.3/standard_library/logging.md +++ b/docs/versioned_docs/version-v0.19.3/standard_library/logging.md @@ -22,7 +22,7 @@ implementation of rust's `println!` macro, this construct can be useful for debu You can print the output of println statements in your Noir code by using the `nargo execute` command or the `--show-output` flag when using `nargo test` (provided there are println statements in your tests). -It is recommended to use `nargo execute` if you want to debug failing constrains with `println` statements. This is due to every input in a test being a constant rather than a witness, so we issue an error during compilation while we only print during execution (which comes after compilation). `println` will not work for failed constraints caught at compile time. +It is recommended to use `nargo execute` if you want to debug failing constraints with `println` statements. This is due to every input in a test being a constant rather than a witness, so we issue an error during compilation while we only print during execution (which comes after compilation). `println` will not work for failed constraints caught at compile time. The `println` statement is unconstrained, so it works for outputting integers, fields, strings, and even structs or expressions. For example: diff --git a/docs/versioned_docs/version-v0.19.4/index.md b/docs/versioned_docs/version-v0.19.4/index.md index 93944f92bf5..380368db036 100644 --- a/docs/versioned_docs/version-v0.19.4/index.md +++ b/docs/versioned_docs/version-v0.19.4/index.md @@ -25,7 +25,7 @@ slug: / Noir is a Domain Specific Language for SNARK proving systems. It has been designed to use any ACIR compatible proving system. -It's design choices are influenced heavily by Rust and focuses on a simple, familiar syntax. +Its design choices are influenced heavily by Rust and focuses on a simple, familiar syntax. ## Who is Noir for? diff --git a/docs/versioned_docs/version-v0.19.4/migration_notes.md b/docs/versioned_docs/version-v0.19.4/migration_notes.md index 0d7e0af0efd..905bca3d30c 100644 --- a/docs/versioned_docs/version-v0.19.4/migration_notes.md +++ b/docs/versioned_docs/version-v0.19.4/migration_notes.md @@ -50,7 +50,7 @@ If you are using the default barretenberg backend, simply run: nargo prove ``` -with you Noir program. +with your Noir program. This will trigger the download and installation of the latest version of barretenberg compatible with your Nargo in use. @@ -78,7 +78,7 @@ nargo backend install acvm-backend-barretenberg https://github.com/noir-lang/bar This downloads and installs a specific bb.js based version of barretenberg binary from GitHub. -The gzipped filed is running [this bash script](https://github.com/noir-lang/barretenberg-js-binary/blob/master/run-bb-js.sh), where we need to gzip it as the Nargo currently expect the backend to be zipped up. +The gzipped file is running [this bash script](https://github.com/noir-lang/barretenberg-js-binary/blob/master/run-bb-js.sh), where we need to gzip it as the Nargo currently expect the backend to be zipped up. Then run: diff --git a/docs/versioned_docs/version-v0.19.4/nargo/02_testing.md b/docs/versioned_docs/version-v0.19.4/nargo/02_testing.md index 917c9415126..5c57ef92705 100644 --- a/docs/versioned_docs/version-v0.19.4/nargo/02_testing.md +++ b/docs/versioned_docs/version-v0.19.4/nargo/02_testing.md @@ -23,7 +23,7 @@ fn test_add() { } ``` -Running `nargo test` will test that the `test_add` function can be executed while satisfying the all +Running `nargo test` will test that the `test_add` function can be executed while satisfying all the constraints which allows you to test that add returns the expected values. Test functions can't have any arguments currently. diff --git a/docs/versioned_docs/version-v0.19.4/standard_library/logging.md b/docs/versioned_docs/version-v0.19.4/standard_library/logging.md index 7e2fd9b9aff..4ba0fe0e656 100644 --- a/docs/versioned_docs/version-v0.19.4/standard_library/logging.md +++ b/docs/versioned_docs/version-v0.19.4/standard_library/logging.md @@ -22,7 +22,7 @@ implementation of rust's `println!` macro, this construct can be useful for debu You can print the output of println statements in your Noir code by using the `nargo execute` command or the `--show-output` flag when using `nargo test` (provided there are println statements in your tests). -It is recommended to use `nargo execute` if you want to debug failing constrains with `println` statements. This is due to every input in a test being a constant rather than a witness, so we issue an error during compilation while we only print during execution (which comes after compilation). `println` will not work for failed constraints caught at compile time. +It is recommended to use `nargo execute` if you want to debug failing constraints with `println` statements. This is due to every input in a test being a constant rather than a witness, so we issue an error during compilation while we only print during execution (which comes after compilation). `println` will not work for failed constraints caught at compile time. The `println` statement is unconstrained, so it works for outputting integers, fields, strings, and even structs or expressions. For example: diff --git a/docs/versioned_docs/version-v0.22.0/getting_started/tooling/testing.md b/docs/versioned_docs/version-v0.22.0/getting_started/tooling/testing.md index 5febd44e96b..d3e0c522473 100644 --- a/docs/versioned_docs/version-v0.22.0/getting_started/tooling/testing.md +++ b/docs/versioned_docs/version-v0.22.0/getting_started/tooling/testing.md @@ -24,7 +24,7 @@ fn test_add() { } ``` -Running `nargo test` will test that the `test_add` function can be executed while satisfying the all +Running `nargo test` will test that the `test_add` function can be executed while satisfying all the constraints which allows you to test that add returns the expected values. Test functions can't have any arguments currently. diff --git a/docs/versioned_docs/version-v0.22.0/migration_notes.md b/docs/versioned_docs/version-v0.22.0/migration_notes.md index d5d0682cf0c..184ca283539 100644 --- a/docs/versioned_docs/version-v0.22.0/migration_notes.md +++ b/docs/versioned_docs/version-v0.22.0/migration_notes.md @@ -50,7 +50,7 @@ If you are using the default barretenberg backend, simply run: nargo prove ``` -with you Noir program. +with your Noir program. This will trigger the download and installation of the latest version of barretenberg compatible with your Nargo in use. @@ -78,7 +78,7 @@ nargo backend install acvm-backend-barretenberg https://github.com/noir-lang/bar This downloads and installs a specific bb.js based version of barretenberg binary from GitHub. -The gzipped filed is running [this bash script](https://github.com/noir-lang/barretenberg-js-binary/blob/master/run-bb-js.sh), where we need to gzip it as the Nargo currently expect the backend to be zipped up. +The gzipped file is running [this bash script](https://github.com/noir-lang/barretenberg-js-binary/blob/master/run-bb-js.sh), where we need to gzip it as the Nargo currently expect the backend to be zipped up. Then run: diff --git a/docs/versioned_docs/version-v0.22.0/noir/standard_library/logging.md b/docs/versioned_docs/version-v0.22.0/noir/standard_library/logging.md index 16daf922e15..2e163b52ab3 100644 --- a/docs/versioned_docs/version-v0.22.0/noir/standard_library/logging.md +++ b/docs/versioned_docs/version-v0.22.0/noir/standard_library/logging.md @@ -22,7 +22,7 @@ The standard library provides two familiar statements you can use: `println` and You can print the output of both statements in your Noir code by using the `nargo execute` command or the `--show-output` flag when using `nargo test` (provided there are print statements in your tests). -It is recommended to use `nargo execute` if you want to debug failing constrains with `println` or `print` statements. This is due to every input in a test being a constant rather than a witness, so we issue an error during compilation while we only print during execution (which comes after compilation). Neither `println`, nor `print` are callable for failed constraints caught at compile time. +It is recommended to use `nargo execute` if you want to debug failing constraints with `println` or `print` statements. This is due to every input in a test being a constant rather than a witness, so we issue an error during compilation while we only print during execution (which comes after compilation). Neither `println`, nor `print` are callable for failed constraints caught at compile time. Both `print` and `println` are generic functions which can work on integers, fields, strings, and even structs or expressions. Note however, that slices are currently unsupported. For example: diff --git a/noir_stdlib/src/cmp.nr b/noir_stdlib/src/cmp.nr new file mode 100644 index 00000000000..888866eebd0 --- /dev/null +++ b/noir_stdlib/src/cmp.nr @@ -0,0 +1,312 @@ +trait Eq { + fn eq(self, other: Self) -> bool; + + fn ne(self, other: Self) -> bool { + !Eq::eq(self, other) + } +} + +impl Eq for Field { fn eq(self, other: Field) -> bool { self == other } } + +impl Eq for u1 { fn eq(self, other: u1) -> bool { self == other } } +impl Eq for u8 { fn eq(self, other: u8) -> bool { self == other } } +impl Eq for u16 { fn eq(self, other: u16) -> bool { self == other } } +impl Eq for u32 { fn eq(self, other: u32) -> bool { self == other } } +impl Eq for u64 { fn eq(self, other: u64) -> bool { self == other } } + +impl Eq for i8 { fn eq(self, other: i8) -> bool { self == other } } +impl Eq for i16 { fn eq(self, other: i16) -> bool { self == other } } +impl Eq for i32 { fn eq(self, other: i32) -> bool { self == other } } +impl Eq for i64 { fn eq(self, other: i64) -> bool { self == other } } + +impl Eq for () { fn eq(_self: Self, _other: ()) -> bool { true } } +impl Eq for bool { fn eq(self, other: bool) -> bool { self == other } } + +impl Eq for [T; N] where T: Eq { + fn eq(self, other: [T; N]) -> bool { + let mut result = true; + for i in 0 .. self.len() { + result &= self[i].eq(other[i]); + } + result + } +} + +impl Eq for str { + fn eq(self, other: str) -> bool { + let self_bytes = self.as_bytes(); + let other_bytes = other.as_bytes(); + self_bytes == other_bytes + } +} + +impl Eq for (A, B) where A: Eq, B: Eq { + fn eq(self, other: (A, B)) -> bool { + self.0.eq(other.0) & self.1.eq(other.1) + } +} + +impl Eq for (A, B, C) where A: Eq, B: Eq, C: Eq { + fn eq(self, other: (A, B, C)) -> bool { + self.0.eq(other.0) & self.1.eq(other.1) & self.2.eq(other.2) + } +} + +impl Eq for (A, B, C, D) where A: Eq, B: Eq, C: Eq, D: Eq { + fn eq(self, other: (A, B, C, D)) -> bool { + self.0.eq(other.0) & self.1.eq(other.1) & self.2.eq(other.2) & self.3.eq(other.3) + } +} + +impl Eq for (A, B, C, D, E) where A: Eq, B: Eq, C: Eq, D: Eq, E: Eq { + fn eq(self, other: (A, B, C, D, E)) -> bool { + self.0.eq(other.0) & self.1.eq(other.1) & self.2.eq(other.2) & self.3.eq(other.3) & self.4.eq(other.4) + } +} + +impl Eq for Ordering { + fn eq(self, other: Ordering) -> bool { + self.result == other.result + } +} + +// Noir doesn't have enums yet so we emulate (Lt | Eq | Gt) with a struct +// that has 3 public functions for constructing the struct. +struct Ordering { + result: Field, +} + +impl Ordering { + // Implementation note: 0, 1, and 2 for Lt, Eq, and Gt are built + // into the compiler, do not change these without also updating + // the compiler itself! + pub fn less() -> Ordering { + Ordering { result: 0 } + } + + pub fn equal() -> Ordering { + Ordering { result: 1 } + } + + pub fn greater() -> Ordering { + Ordering { result: 2 } + } +} + +trait Ord { + fn cmp(self, other: Self) -> Ordering; +} + +// Note: Field deliberately does not implement Ord + +impl Ord for u8 { + fn cmp(self, other: u8) -> Ordering { + if self < other { + Ordering::less() + } else if self > other { + Ordering::greater() + } else { + Ordering::equal() + } + } +} + +impl Ord for u16 { + fn cmp(self, other: u16) -> Ordering { + if self < other { + Ordering::less() + } else if self > other { + Ordering::greater() + } else { + Ordering::equal() + } + } +} + +impl Ord for u32 { + fn cmp(self, other: u32) -> Ordering { + if self < other { + Ordering::less() + } else if self > other { + Ordering::greater() + } else { + Ordering::equal() + } + } +} + +impl Ord for u64 { + fn cmp(self, other: u64) -> Ordering { + if self < other { + Ordering::less() + } else if self > other { + Ordering::greater() + } else { + Ordering::equal() + } + } +} + +impl Ord for i8 { + fn cmp(self, other: i8) -> Ordering { + if self < other { + Ordering::less() + } else if self > other { + Ordering::greater() + } else { + Ordering::equal() + } + } +} + +impl Ord for i16 { + fn cmp(self, other: i16) -> Ordering { + if self < other { + Ordering::less() + } else if self > other { + Ordering::greater() + } else { + Ordering::equal() + } + } +} + +impl Ord for i32 { + fn cmp(self, other: i32) -> Ordering { + if self < other { + Ordering::less() + } else if self > other { + Ordering::greater() + } else { + Ordering::equal() + } + } +} + +impl Ord for i64 { + fn cmp(self, other: i64) -> Ordering { + if self < other { + Ordering::less() + } else if self > other { + Ordering::greater() + } else { + Ordering::equal() + } + } +} + +impl Ord for () { + fn cmp(_self: Self, _other: ()) -> Ordering { + Ordering::equal() + } +} + +impl Ord for bool { + fn cmp(self, other: bool) -> Ordering { + if self { + if other { + Ordering::equal() + } else { + Ordering::greater() + } + } else { + if other { + Ordering::less() + } else { + Ordering::equal() + } + } + } +} + +impl Ord for [T; N] where T: Ord { + // The first non-equal element of both arrays determines + // the ordering for the whole array. + fn cmp(self, other: [T; N]) -> Ordering { + let mut result = Ordering::equal(); + for i in 0 .. self.len() { + if result == Ordering::equal() { + let result_i = self[i].cmp(other[i]); + + if result_i == Ordering::less() { + result = result_i; + } else if result_i == Ordering::greater() { + result = result_i; + } + } + } + result + } +} + +impl Ord for (A, B) where A: Ord, B: Ord { + fn cmp(self, other: (A, B)) -> Ordering { + let result = self.0.cmp(other.0); + + if result != Ordering::equal() { + result + } else { + self.1.cmp(other.1) + } + } +} + +impl Ord for (A, B, C) where A: Ord, B: Ord, C: Ord { + fn cmp(self, other: (A, B, C)) -> Ordering { + let mut result = self.0.cmp(other.0); + + if result == Ordering::equal() { + result = self.1.cmp(other.1); + } + + if result == Ordering::equal() { + result = self.2.cmp(other.2); + } + + result + } +} + +impl Ord for (A, B, C, D) where A: Ord, B: Ord, C: Ord, D: Ord { + fn cmp(self, other: (A, B, C, D)) -> Ordering { + let mut result = self.0.cmp(other.0); + + if result == Ordering::equal() { + result = self.1.cmp(other.1); + } + + if result == Ordering::equal() { + result = self.2.cmp(other.2); + } + + if result == Ordering::equal() { + result = self.3.cmp(other.3); + } + + result + } +} + +impl Ord for (A, B, C, D, E) where A: Ord, B: Ord, C: Ord, D: Ord, E: Ord { + fn cmp(self, other: (A, B, C, D, E)) -> Ordering { + let mut result = self.0.cmp(other.0); + + if result == Ordering::equal() { + result = self.1.cmp(other.1); + } + + if result == Ordering::equal() { + result = self.2.cmp(other.2); + } + + if result == Ordering::equal() { + result = self.3.cmp(other.3); + } + + if result == Ordering::equal() { + result = self.4.cmp(other.4); + } + + result + } +} diff --git a/noir_stdlib/src/lib.nr b/noir_stdlib/src/lib.nr index 70b4681b54d..9b166f6ae94 100644 --- a/noir_stdlib/src/lib.nr +++ b/noir_stdlib/src/lib.nr @@ -19,6 +19,7 @@ mod compat; mod option; mod string; mod test; +mod cmp; mod ops; mod default; mod prelude; diff --git a/noir_stdlib/src/ops.nr b/noir_stdlib/src/ops.nr index 9ba35d0a027..fbdbc651ff7 100644 --- a/noir_stdlib/src/ops.nr +++ b/noir_stdlib/src/ops.nr @@ -62,59 +62,94 @@ impl Div for i16 { fn div(self, other: i16) -> i16 { self / other } } impl Div for i32 { fn div(self, other: i32) -> i32 { self / other } } impl Div for i64 { fn div(self, other: i64) -> i64 { self / other } } -trait Eq { - fn eq(self, other: Self) -> bool; - - fn ne(self, other: Self) -> bool { - !Eq::eq(self, other) - } +trait Rem { + fn rem(self, other: Self) -> Self; } -impl Eq for Field { fn eq(self, other: Field) -> bool { self == other } } - -impl Eq for u8 { fn eq(self, other: u8) -> bool { self == other } } -impl Eq for u16 { fn eq(self, other: u16) -> bool { self == other } } -impl Eq for u32 { fn eq(self, other: u32) -> bool { self == other } } -impl Eq for u64 { fn eq(self, other: u64) -> bool { self == other } } - -impl Eq for i8 { fn eq(self, other: i8) -> bool { self == other } } -impl Eq for i16 { fn eq(self, other: i16) -> bool { self == other } } -impl Eq for i32 { fn eq(self, other: i32) -> bool { self == other } } -impl Eq for i64 { fn eq(self, other: i64) -> bool { self == other } } - -impl Eq for () { fn eq(_self: Self, _other: ()) -> bool { true } } -impl Eq for bool { fn eq(self, other: bool) -> bool { self == other } } - -impl Eq for [T; N] where T: Eq { - fn eq(self, other: [T; N]) -> bool { - let mut result = true; - for i in 0 .. self.len() { - result &= self[i].eq(other[i]); - } - result - } +impl Rem for u8 { fn rem(self, other: u8) -> u8 { self % other } } +impl Rem for u16 { fn rem(self, other: u16) -> u16 { self % other } } +impl Rem for u32 { fn rem(self, other: u32) -> u32 { self % other } } +impl Rem for u64 { fn rem(self, other: u64) -> u64 { self % other } } + +impl Rem for i8 { fn rem(self, other: i8) -> i8 { self % other } } +impl Rem for i16 { fn rem(self, other: i16) -> i16 { self % other } } +impl Rem for i32 { fn rem(self, other: i32) -> i32 { self % other } } +impl Rem for i64 { fn rem(self, other: i64) -> i64 { self % other } } + +trait BitOr { + fn bitor(self, other: Self) -> Self; } -impl Eq for (A, B) where A: Eq, B: Eq { - fn eq(self, other: (A, B)) -> bool { - self.0.eq(other.0) & self.1.eq(other.1) - } +impl BitOr for bool { fn bitor(self, other: bool) -> bool { self | other } } + +impl BitOr for u8 { fn bitor(self, other: u8) -> u8 { self | other } } +impl BitOr for u16 { fn bitor(self, other: u16) -> u16 { self | other } } +impl BitOr for u32 { fn bitor(self, other: u32) -> u32 { self | other } } +impl BitOr for u64 { fn bitor(self, other: u64) -> u64 { self | other } } + +impl BitOr for i8 { fn bitor(self, other: i8) -> i8 { self | other } } +impl BitOr for i16 { fn bitor(self, other: i16) -> i16 { self | other } } +impl BitOr for i32 { fn bitor(self, other: i32) -> i32 { self | other } } +impl BitOr for i64 { fn bitor(self, other: i64) -> i64 { self | other } } + +trait BitAnd { + fn bitand(self, other: Self) -> Self; } -impl Eq for (A, B, C) where A: Eq, B: Eq, C: Eq { - fn eq(self, other: (A, B, C)) -> bool { - self.0.eq(other.0) & self.1.eq(other.1) & self.2.eq(other.2) - } +impl BitAnd for bool { fn bitand(self, other: bool) -> bool { self & other } } + +impl BitAnd for u8 { fn bitand(self, other: u8) -> u8 { self & other } } +impl BitAnd for u16 { fn bitand(self, other: u16) -> u16 { self & other } } +impl BitAnd for u32 { fn bitand(self, other: u32) -> u32 { self & other } } +impl BitAnd for u64 { fn bitand(self, other: u64) -> u64 { self & other } } + +impl BitAnd for i8 { fn bitand(self, other: i8) -> i8 { self & other } } +impl BitAnd for i16 { fn bitand(self, other: i16) -> i16 { self & other } } +impl BitAnd for i32 { fn bitand(self, other: i32) -> i32 { self & other } } +impl BitAnd for i64 { fn bitand(self, other: i64) -> i64 { self & other } } + +trait BitXor { + fn bitxor(self, other: Self) -> Self; } -impl Eq for (A, B, C, D) where A: Eq, B: Eq, C: Eq, D: Eq { - fn eq(self, other: (A, B, C, D)) -> bool { - self.0.eq(other.0) & self.1.eq(other.1) & self.2.eq(other.2) & self.3.eq(other.3) - } +impl BitXor for bool { fn bitxor(self, other: bool) -> bool { self ^ other } } + +impl BitXor for u8 { fn bitxor(self, other: u8) -> u8 { self ^ other } } +impl BitXor for u16 { fn bitxor(self, other: u16) -> u16 { self ^ other } } +impl BitXor for u32 { fn bitxor(self, other: u32) -> u32 { self ^ other } } +impl BitXor for u64 { fn bitxor(self, other: u64) -> u64 { self ^ other } } + +impl BitXor for i8 { fn bitxor(self, other: i8) -> i8 { self ^ other } } +impl BitXor for i16 { fn bitxor(self, other: i16) -> i16 { self ^ other } } +impl BitXor for i32 { fn bitxor(self, other: i32) -> i32 { self ^ other } } +impl BitXor for i64 { fn bitxor(self, other: i64) -> i64 { self ^ other } } + +trait Shl { + fn shl(self, other: Self) -> Self; } -impl Eq for (A, B, C, D, E) where A: Eq, B: Eq, C: Eq, D: Eq, E: Eq { - fn eq(self, other: (A, B, C, D, E)) -> bool { - self.0.eq(other.0) & self.1.eq(other.1) & self.2.eq(other.2) & self.3.eq(other.3) & self.4.eq(other.4) - } +impl Shl for u8 { fn shl(self, other: u8) -> u8 { self << other } } +impl Shl for u16 { fn shl(self, other: u16) -> u16 { self << other } } +impl Shl for u32 { fn shl(self, other: u32) -> u32 { self << other } } +impl Shl for u64 { fn shl(self, other: u64) -> u64 { self << other } } + +// Bit shifting is not currently supported for signed integer types +// impl Shl for i8 { fn shl(self, other: i8) -> i8 { self << other } } +// impl Shl for i16 { fn shl(self, other: i16) -> i16 { self << other } } +// impl Shl for i32 { fn shl(self, other: i32) -> i32 { self << other } } +// impl Shl for i64 { fn shl(self, other: i64) -> i64 { self << other } } + +trait Shr { + fn shr(self, other: Self) -> Self; } + +impl Shr for u8 { fn shr(self, other: u8) -> u8 { self >> other } } +impl Shr for u16 { fn shr(self, other: u16) -> u16 { self >> other } } +impl Shr for u32 { fn shr(self, other: u32) -> u32 { self >> other } } +impl Shr for u64 { fn shr(self, other: u64) -> u64 { self >> other } } + +// Bit shifting is not currently supported for signed integer types +// impl Shr for i8 { fn shr(self, other: i8) -> i8 { self >> other } } +// impl Shr for i16 { fn shr(self, other: i16) -> i16 { self >> other } } +// impl Shr for i32 { fn shr(self, other: i32) -> i32 { self >> other } } +// impl Shr for i64 { fn shr(self, other: i64) -> i64 { self >> other } } diff --git a/noir_stdlib/src/prelude.nr b/noir_stdlib/src/prelude.nr index f33a1f7e7f1..56020509122 100644 --- a/noir_stdlib/src/prelude.nr +++ b/noir_stdlib/src/prelude.nr @@ -1,3 +1,5 @@ use crate::collections::vec::Vec; use crate::option::Option; use crate::{print, println, assert_constant}; +use crate::cmp::{Eq, Ord}; +use crate::default::Default; diff --git a/test_programs/compile_failure/cyclic_dep/Nargo.toml b/test_programs/compile_failure/cyclic_dep/Nargo.toml new file mode 100644 index 00000000000..6a5a9b7db73 --- /dev/null +++ b/test_programs/compile_failure/cyclic_dep/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "cyclic_dep" +type = "bin" +authors = [""] + +[dependencies] +dep1 = { path= "./dep1"} \ No newline at end of file diff --git a/test_programs/compile_failure/cyclic_dep/Prover.toml b/test_programs/compile_failure/cyclic_dep/Prover.toml new file mode 100644 index 00000000000..e69de29bb2d diff --git a/test_programs/compile_failure/cyclic_dep/dep1/Nargo.toml b/test_programs/compile_failure/cyclic_dep/dep1/Nargo.toml new file mode 100644 index 00000000000..4782bbd5cda --- /dev/null +++ b/test_programs/compile_failure/cyclic_dep/dep1/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "dep1" +type = "lib" +authors = [""] + +[dependencies] +dep1 = { path= "../dep2"} \ No newline at end of file diff --git a/test_programs/compile_failure/cyclic_dep/dep1/src/lib.nr b/test_programs/compile_failure/cyclic_dep/dep1/src/lib.nr new file mode 100644 index 00000000000..02b68c56bd2 --- /dev/null +++ b/test_programs/compile_failure/cyclic_dep/dep1/src/lib.nr @@ -0,0 +1,3 @@ +fn bar() { + +} \ No newline at end of file diff --git a/test_programs/compile_failure/cyclic_dep/dep2/Nargo.toml b/test_programs/compile_failure/cyclic_dep/dep2/Nargo.toml new file mode 100644 index 00000000000..5e2a0f304b0 --- /dev/null +++ b/test_programs/compile_failure/cyclic_dep/dep2/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "dep2" +type = "lib" +authors = [""] + +[dependencies] +dep1 = { path= "../dep1"} \ No newline at end of file diff --git a/test_programs/compile_failure/cyclic_dep/dep2/src/lib.nr b/test_programs/compile_failure/cyclic_dep/dep2/src/lib.nr new file mode 100644 index 00000000000..298a0f3c7ca --- /dev/null +++ b/test_programs/compile_failure/cyclic_dep/dep2/src/lib.nr @@ -0,0 +1,3 @@ +fn foo() { + +} \ No newline at end of file diff --git a/test_programs/compile_failure/cyclic_dep/src/main.nr b/test_programs/compile_failure/cyclic_dep/src/main.nr new file mode 100644 index 00000000000..c55ca748334 --- /dev/null +++ b/test_programs/compile_failure/cyclic_dep/src/main.nr @@ -0,0 +1,7 @@ +use dep1::foo; +use dep2::bar; + +fn main() { + dep1::foo(); + dep2::bar(); +} diff --git a/test_programs/compile_success_empty/impl_with_where_clause/src/main.nr b/test_programs/compile_success_empty/impl_with_where_clause/src/main.nr index 2144e6b31a4..8721dd6e58b 100644 --- a/test_programs/compile_success_empty/impl_with_where_clause/src/main.nr +++ b/test_programs/compile_success_empty/impl_with_where_clause/src/main.nr @@ -1,34 +1,34 @@ fn main() { let array: [Field; 3] = [1, 2, 3]; - assert(array.eq(array)); + assert(array.my_eq(array)); // Ensure this still works if we have to infer the type of the integer literals let array = [1, 2, 3]; - assert(array.eq(array)); + assert(array.my_eq(array)); let other_array = [3, 2, 1]; - assert(array.ne(other_array)); + assert(array.my_ne(other_array)); } -trait Eq { - fn eq(self, other: Self) -> bool; +trait MyEq { + fn my_eq(self, other: Self) -> bool; - fn ne(self, other: Self) -> bool{ - !Eq::eq(self, other) + fn my_ne(self, other: Self) -> bool{ + !MyEq::my_eq(self, other) } } -impl Eq for [T; 3] where T: Eq { - fn eq(self, other: Self) -> bool { +impl MyEq for [T; 3] where T: MyEq { + fn my_eq(self, other: Self) -> bool { let mut ret = true; for i in 0 .. self.len() { - ret &= self[i].eq(other[i]); + ret &= self[i].my_eq(other[i]); } ret } } -impl Eq for Field { - fn eq(self, other: Field) -> bool { +impl MyEq for Field { + fn my_eq(self, other: Field) -> bool { self == other } } diff --git a/test_programs/compile_success_empty/trait_default_implementation/src/main.nr b/test_programs/compile_success_empty/trait_default_implementation/src/main.nr index e1f29ce3f48..2f5bff8c40c 100644 --- a/test_programs/compile_success_empty/trait_default_implementation/src/main.nr +++ b/test_programs/compile_success_empty/trait_default_implementation/src/main.nr @@ -1,12 +1,11 @@ use dep::std; -trait Default { - fn default(x: Field, y: Field) -> Self; +trait MyDefault { + fn my_default(x: Field, y: Field) -> Self; fn method2(x: Field) -> Field { - x + x } - } struct Foo { @@ -14,8 +13,8 @@ struct Foo { array: [Field; 2], } -impl Default for Foo { - fn default(x: Field,y: Field) -> Self { +impl MyDefault for Foo { + fn my_default(x: Field,y: Field) -> Self { Self { bar: x, array: [x,y] } } } diff --git a/test_programs/compile_success_empty/trait_override_implementation/src/main.nr b/test_programs/compile_success_empty/trait_override_implementation/src/main.nr index a385efc63fd..85528291870 100644 --- a/test_programs/compile_success_empty/trait_override_implementation/src/main.nr +++ b/test_programs/compile_success_empty/trait_override_implementation/src/main.nr @@ -1,7 +1,7 @@ use dep::std; -trait Default { - fn default(x: Field, y: Field) -> Self; +trait MyDefault { + fn my_default(x: Field, y: Field) -> Self; fn method2(x: Field) -> Field { x @@ -13,8 +13,8 @@ struct Foo { array: [Field; 2], } -impl Default for Foo { - fn default(x: Field,y: Field) -> Self { +impl MyDefault for Foo { + fn my_default(x: Field,y: Field) -> Self { Self { bar: x, array: [x,y] } } @@ -25,18 +25,18 @@ impl Default for Foo { trait F { fn f1(self) -> Field; - fn f2(self) -> Field { 2 } - fn f3(self) -> Field { 3 } - fn f4(self) -> Field { 4 } - fn f5(self) -> Field { 5 } + fn f2(_self: Self) -> Field { 2 } + fn f3(_self: Self) -> Field { 3 } + fn f4(_self: Self) -> Field { 4 } + fn f5(_self: Self) -> Field { 5 } } struct Bar {} impl F for Bar { - fn f5(self) -> Field { 50 } - fn f1(self) -> Field { 10 } - fn f3(self) -> Field { 30 } + fn f5(_self: Self) -> Field { 50 } + fn f1(_self: Self) -> Field { 10 } + fn f3(_self: Self) -> Field { 30 } } // Impls on mutable references are temporarily disabled // impl F for &mut Bar { diff --git a/test_programs/compile_success_empty/traits/src/main.nr b/test_programs/compile_success_empty/traits/src/main.nr index 784ff01a883..ed804559fed 100644 --- a/test_programs/compile_success_empty/traits/src/main.nr +++ b/test_programs/compile_success_empty/traits/src/main.nr @@ -1,7 +1,7 @@ use dep::std; -trait Default { - fn default(x: Field, y: Field) -> Self; +trait MyDefault { + fn my_default(x: Field, y: Field) -> Self; } struct Foo { @@ -9,13 +9,13 @@ struct Foo { array: [Field; 2], } -impl Default for Foo { - fn default(x: Field,y: Field) -> Self { +impl MyDefault for Foo { + fn my_default(x: Field,y: Field) -> Self { Self { bar: x, array: [x,y] } } } fn main(x: Field, y: Field) { - let first = Foo::default(x, y); + let first = Foo::my_default(x, y); assert(first.bar == x); } diff --git a/test_programs/execution_success/brillig_array_eq/Nargo.toml b/test_programs/execution_success/brillig_array_eq/Nargo.toml new file mode 100644 index 00000000000..62ce392f96b --- /dev/null +++ b/test_programs/execution_success/brillig_array_eq/Nargo.toml @@ -0,0 +1,6 @@ +[package] +name = "brillig_array_eq" +type = "bin" +authors = [""] + +[dependencies] diff --git a/test_programs/execution_success/brillig_array_eq/Prover.toml b/test_programs/execution_success/brillig_array_eq/Prover.toml new file mode 100644 index 00000000000..ecfed7de213 --- /dev/null +++ b/test_programs/execution_success/brillig_array_eq/Prover.toml @@ -0,0 +1,2 @@ +a = [77,75,108,209,54,16,50,202,155,210,174,185,217,0,170,77,69,217,234,216,10,201,66,51,116,196,81,167,37,77,7,102] +b = [77,75,108,209,54,16,50,202,155,210,174,185,217,0,170,77,69,217,234,216,10,201,66,51,116,196,81,167,37,77,7,102] diff --git a/test_programs/execution_success/brillig_array_eq/src/main.nr b/test_programs/execution_success/brillig_array_eq/src/main.nr new file mode 100644 index 00000000000..90f631dbed8 --- /dev/null +++ b/test_programs/execution_success/brillig_array_eq/src/main.nr @@ -0,0 +1,4 @@ +// Simple example of checking where two arrays are equal +unconstrained fn main(a: [Field; 32], b: [Field; 32]) { + assert(a == b); +} diff --git a/test_programs/execution_success/operator_overloading/Nargo.toml b/test_programs/execution_success/operator_overloading/Nargo.toml new file mode 100644 index 00000000000..7f9f18ff567 --- /dev/null +++ b/test_programs/execution_success/operator_overloading/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "operator_overloading" +type = "bin" +authors = [""] +compiler_version = ">=0.20.0" + +[dependencies] diff --git a/test_programs/execution_success/operator_overloading/Prover.toml b/test_programs/execution_success/operator_overloading/Prover.toml new file mode 100644 index 00000000000..516b7b4074c --- /dev/null +++ b/test_programs/execution_success/operator_overloading/Prover.toml @@ -0,0 +1,2 @@ +x = 3 +y = 9 diff --git a/test_programs/execution_success/operator_overloading/src/main.nr b/test_programs/execution_success/operator_overloading/src/main.nr new file mode 100644 index 00000000000..3867531abca --- /dev/null +++ b/test_programs/execution_success/operator_overloading/src/main.nr @@ -0,0 +1,154 @@ +use dep::std::ops::{ Add, Sub, Mul, Div, Rem, BitAnd, BitOr, BitXor, Shl, Shr }; +use dep::std::cmp::Ordering; + +// x = 3, y = 9 +fn main(x: u32, y: u32) { + let wx = Wrapper::new(x); + let wy = Wrapper::new(y); + + // expected x and expected y values + let ex: u32 = 3; + let ey: u32 = 9; + + assert((wx + wy).inner == ex + ey); + assert((wy - wx).inner == ey - ex); + assert((wx * wy).inner == ex * ey); + assert((wx / wy).inner == ex / ey); + assert((wx % wy).inner == ex % ey); + + assert((wx & wy).inner == (ex & ey)); + assert((wx | wy).inner == (ex | ey)); + assert((wx ^ wy).inner == (ex ^ ey)); + + assert((wy << wx).inner == (ey << ex)); + assert((wy >> wx).inner == (ey >> ex)); + + assert((wx == wy) == (ex == ey)); + assert((wx < wy) == (ex < ey)); + assert((wx <= wy) == (ex <= ey)); + assert((wx > wy) == (ex > ey)); + assert((wx >= wy) == (ex >= ey)); + assert(wx.cmp(wy) == ex.cmp(ey)); + + // Ensure operator overloading still works with more complex types + let pair_ascending = Pair { x: wx, y: wy }; + let pair_descending = Pair { x: wy, y: wx }; + + assert(pair_ascending != pair_descending); + + assert(pair_ascending < pair_descending); + assert(pair_ascending <= pair_descending); + assert(pair_descending > pair_ascending); + assert(pair_descending >= pair_ascending); + + assert(pair_ascending.cmp(pair_descending) == Ordering::less()); +} + +struct Wrapper { + inner: u32 +} + +impl Wrapper { + fn new(inner: u32) -> Self { + Wrapper { inner } + } +} + +impl Add for Wrapper { + fn add(self, other: Self) -> Self { + Wrapper::new(self.inner + other.inner) + } +} + +impl Sub for Wrapper { + fn sub(self, other: Self) -> Self { + Wrapper::new(self.inner - other.inner) + } +} + +impl Mul for Wrapper { + fn mul(self, other: Self) -> Self { + Wrapper::new(self.inner * other.inner) + } +} + +impl Div for Wrapper { + fn div(self, other: Self) -> Self { + Wrapper::new(self.inner / other.inner) + } +} + +impl Rem for Wrapper { + fn rem(self, other: Self) -> Self { + Wrapper::new(self.inner % other.inner) + } +} + +impl BitAnd for Wrapper { + fn bitand(self, other: Self) -> Self { + Wrapper::new(self.inner & other.inner) + } +} + +impl BitOr for Wrapper { + fn bitor(self, other: Self) -> Self { + Wrapper::new(self.inner | other.inner) + } +} + +impl BitXor for Wrapper { + fn bitxor(self, other: Self) -> Self { + Wrapper::new(self.inner ^ other.inner) + } +} + +impl Shl for Wrapper { + fn shl(self, other: Self) -> Self { + Wrapper::new(self.inner << other.inner) + } +} + +impl Shr for Wrapper { + fn shr(self, other: Self) -> Self { + Wrapper::new(self.inner >> other.inner) + } +} + +impl Eq for Wrapper { + fn eq(self, other: Self) -> bool { + self.inner == other.inner + } +} + +impl Ord for Wrapper { + fn cmp(self, other: Self) -> Ordering { + self.inner.cmp(other.inner) + } +} + + + + + +struct Pair { + x: Wrapper, + y: Wrapper, +} + +impl Eq for Pair { + fn eq(self, o: Self) -> bool { + (self.x == o.x) & (self.y == o.y) + } +} + +impl Ord for Pair { + fn cmp(self, o: Self) -> Ordering { + let mut result = self.x.cmp(o.x); + + if result == Ordering::equal() { + result = self.y.cmp(o.y); + } + + result + } +} diff --git a/test_programs/execution_success/regression_3889/Nargo.toml b/test_programs/execution_success/regression_3889/Nargo.toml new file mode 100644 index 00000000000..d212d24473f --- /dev/null +++ b/test_programs/execution_success/regression_3889/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "regression_3889" +version = "0.1.0" +type = "bin" +authors = [""] + +[dependencies] diff --git a/test_programs/execution_success/regression_3889/Prover.toml b/test_programs/execution_success/regression_3889/Prover.toml new file mode 100644 index 00000000000..a81ab67fe3e --- /dev/null +++ b/test_programs/execution_success/regression_3889/Prover.toml @@ -0,0 +1,10 @@ +[works] +a = "5" + +[fails] +a = "6" + + +[also_fails] +a = "7" + diff --git a/test_programs/execution_success/regression_3889/src/main.nr b/test_programs/execution_success/regression_3889/src/main.nr new file mode 100644 index 00000000000..10b8ecabee3 --- /dev/null +++ b/test_programs/execution_success/regression_3889/src/main.nr @@ -0,0 +1,23 @@ +mod Foo { + struct NewType{ + a: Field, + } +} + +mod Bar { + use crate::Foo::NewType as BarStruct; + use crate::Foo::NewType; +} + +mod Baz { + struct Works { + a: Field, + } + use crate::Bar::BarStruct; + use crate::Bar::NewType; +} + + +fn main(works: Baz::Works, fails: Baz::BarStruct, also_fails: Bar::NewType) -> pub Field { + works.a + fails.a + also_fails.a +} diff --git a/test_programs/noir_test_failure/should_fail_mismatch/src/main.nr b/test_programs/noir_test_failure/should_fail_mismatch/src/main.nr index a677b10b0cd..253e999ce07 100644 --- a/test_programs/noir_test_failure/should_fail_mismatch/src/main.nr +++ b/test_programs/noir_test_failure/should_fail_mismatch/src/main.nr @@ -10,5 +10,6 @@ fn test_with_extra_space() { // The assert message has a space #[test(should_fail_with = "Not equal")] fn test_runtime_mismatch() { - assert_eq(dep::std::hash::pedersen_commitment([27])[0], 0, "Not equal "); + // We use a pedersen commitment here so that the assertion failure is only known at runtime. + assert_eq(dep::std::hash::pedersen_commitment([27]).x, 0, "Not equal "); } diff --git a/tooling/debugger/Cargo.toml b/tooling/debugger/Cargo.toml index fba4d028d05..0afe28727d1 100644 --- a/tooling/debugger/Cargo.toml +++ b/tooling/debugger/Cargo.toml @@ -20,4 +20,7 @@ codespan-reporting.workspace = true dap.workspace = true easy-repl = "0.2.1" owo-colors = "3" -serde_json.workspace = true \ No newline at end of file +serde_json.workspace = true + +[dev_dependencies] +tempfile.workspace = true \ No newline at end of file diff --git a/tooling/debugger/src/context.rs b/tooling/debugger/src/context.rs index 06855e75c97..74224ce3795 100644 --- a/tooling/debugger/src/context.rs +++ b/tooling/debugger/src/context.rs @@ -529,7 +529,7 @@ mod tests { circuit, debug_artifact, initial_witness, - Box::new(DefaultForeignCallExecutor::new(true)), + Box::new(DefaultForeignCallExecutor::new(true, None)), ); assert_eq!(context.get_current_opcode_location(), Some(OpcodeLocation::Acir(0))); @@ -623,7 +623,7 @@ mod tests { circuit, debug_artifact, initial_witness, - Box::new(DefaultForeignCallExecutor::new(true)), + Box::new(DefaultForeignCallExecutor::new(true, None)), ); // set breakpoint @@ -673,7 +673,7 @@ mod tests { &circuit, &debug_artifact, WitnessMap::new(), - Box::new(DefaultForeignCallExecutor::new(true)), + Box::new(DefaultForeignCallExecutor::new(true, None)), ); assert_eq!(context.offset_opcode_location(&None, 0), (None, 0)); diff --git a/tooling/debugger/src/dap.rs b/tooling/debugger/src/dap.rs index 1cc05e28a6b..803f9f108db 100644 --- a/tooling/debugger/src/dap.rs +++ b/tooling/debugger/src/dap.rs @@ -57,7 +57,7 @@ impl<'a, R: Read, W: Write, B: BlackBoxFunctionSolver> DapSession<'a, R, W, B> { circuit, debug_artifact, initial_witness, - Box::new(DefaultForeignCallExecutor::new(true)), + Box::new(DefaultForeignCallExecutor::new(true, None)), ); Self { server, diff --git a/tooling/debugger/src/lib.rs b/tooling/debugger/src/lib.rs index 7e0c1605e0a..21834e44f93 100644 --- a/tooling/debugger/src/lib.rs +++ b/tooling/debugger/src/lib.rs @@ -1,6 +1,7 @@ mod context; mod dap; mod repl; +mod source_code_printer; use std::io::{Read, Write}; diff --git a/tooling/debugger/src/repl.rs b/tooling/debugger/src/repl.rs index cb6539cbdb1..8fbd10d8db6 100644 --- a/tooling/debugger/src/repl.rs +++ b/tooling/debugger/src/repl.rs @@ -9,12 +9,7 @@ use nargo::{artifacts::debug::DebugArtifact, ops::DefaultForeignCallExecutor, Na use easy_repl::{command, CommandStatus, Repl}; use std::cell::RefCell; -use codespan_reporting::files::Files; -use noirc_errors::Location; - -use owo_colors::OwoColorize; - -use std::ops::Range; +use crate::source_code_printer::print_source_code_location; pub struct ReplDebugger<'a, B: BlackBoxFunctionSolver> { context: DebugContext<'a, B>, @@ -37,7 +32,7 @@ impl<'a, B: BlackBoxFunctionSolver> ReplDebugger<'a, B> { circuit, debug_artifact, initial_witness.clone(), - Box::new(DefaultForeignCallExecutor::new(true)), + Box::new(DefaultForeignCallExecutor::new(true, None)), ); Self { context, @@ -70,73 +65,7 @@ impl<'a, B: BlackBoxFunctionSolver> ReplDebugger<'a, B> { ); } } - self.show_source_code_location(&location); - } - } - } - - fn print_location_path(&self, loc: Location) { - let line_number = self.debug_artifact.location_line_number(loc).unwrap(); - let column_number = self.debug_artifact.location_column_number(loc).unwrap(); - - println!( - "At {}:{line_number}:{column_number}", - self.debug_artifact.name(loc.file).unwrap() - ); - } - - fn show_source_code_location(&self, location: &OpcodeLocation) { - let locations = self.debug_artifact.debug_symbols[0].opcode_location(location); - let Some(locations) = locations else { return }; - for loc in locations { - self.print_location_path(loc); - - let loc_line_index = self.debug_artifact.location_line_index(loc).unwrap(); - - // How many lines before or after the location's line we - // print - let context_lines = 5; - - let first_line_to_print = - if loc_line_index < context_lines { 0 } else { loc_line_index - context_lines }; - - let last_line_index = self.debug_artifact.last_line_index(loc).unwrap(); - let last_line_to_print = std::cmp::min(loc_line_index + context_lines, last_line_index); - - let source = self.debug_artifact.location_source_code(loc).unwrap(); - for (current_line_index, line) in source.lines().enumerate() { - let current_line_number = current_line_index + 1; - - if current_line_index < first_line_to_print { - // Ignore lines before range starts - continue; - } else if current_line_index == first_line_to_print && current_line_index > 0 { - // Denote that there's more lines before but we're not showing them - print_line_of_ellipsis(current_line_index); - } - - if current_line_index > last_line_to_print { - // Denote that there's more lines after but we're not showing them, - // and stop printing - print_line_of_ellipsis(current_line_number); - break; - } - - if current_line_index == loc_line_index { - // Highlight current location - let Range { start: loc_start, end: loc_end } = - self.debug_artifact.location_in_line(loc).unwrap(); - println!( - "{:>3} {:2} {}{}{}", - current_line_number, - "->", - &line[0..loc_start].to_string().dimmed(), - &line[loc_start..loc_end], - &line[loc_end..].to_string().dimmed() - ); - } else { - print_dimmed_line(current_line_number, line); - } + print_source_code_location(self.debug_artifact, &location); } } } @@ -278,7 +207,7 @@ impl<'a, B: BlackBoxFunctionSolver> ReplDebugger<'a, B> { self.circuit, self.debug_artifact, self.initial_witness.clone(), - Box::new(DefaultForeignCallExecutor::new(true)), + Box::new(DefaultForeignCallExecutor::new(true, None)), ); for opcode_location in breakpoints { self.context.add_breakpoint(opcode_location); @@ -384,14 +313,6 @@ impl<'a, B: BlackBoxFunctionSolver> ReplDebugger<'a, B> { } } -fn print_line_of_ellipsis(line_number: usize) { - println!("{}", format!("{:>3} {}", line_number, "...").dimmed()); -} - -fn print_dimmed_line(line_number: usize, line: &str) { - println!("{}", format!("{:>3} {:2} {}", line_number, "", line).dimmed()); -} - pub fn run( blackbox_solver: &B, circuit: &Circuit, diff --git a/tooling/debugger/src/source_code_printer.rs b/tooling/debugger/src/source_code_printer.rs new file mode 100644 index 00000000000..1707f9066b7 --- /dev/null +++ b/tooling/debugger/src/source_code_printer.rs @@ -0,0 +1,317 @@ +use acvm::acir::circuit::OpcodeLocation; +use codespan_reporting::files::Files; +use nargo::artifacts::debug::DebugArtifact; +use noirc_errors::Location; +use owo_colors::OwoColorize; +use std::ops::Range; + +#[derive(Debug, PartialEq)] +enum PrintedLine<'a> { + Skip, + Ellipsis { + line_number: usize, + }, + Content { + line_number: usize, + cursor: &'a str, + content: &'a str, + highlight: Option>, + }, +} + +#[derive(Clone, Debug)] +struct LocationPrintContext { + file_lines: Range, + printed_lines: Range, + location_lines: Range, + location_offset_in_first_line: Range, + location_offset_in_last_line: Range, +} + +// Given a DebugArtifact and an OpcodeLocation, prints all the source code +// locations the OpcodeLocation maps to, with some surrounding context and +// visual aids to highlight the location itself. +pub(crate) fn print_source_code_location( + debug_artifact: &DebugArtifact, + location: &OpcodeLocation, +) { + let locations = debug_artifact.debug_symbols[0].opcode_location(location); + let Some(locations) = locations else { return; }; + + let locations = locations.iter(); + + for loc in locations { + print_location_path(debug_artifact, *loc); + + let lines = render_location(debug_artifact, loc); + + for line in lines { + match line { + PrintedLine::Skip => {} + PrintedLine::Ellipsis { line_number } => print_ellipsis(line_number), + PrintedLine::Content { line_number, cursor, content, highlight } => { + print_content(line_number, cursor, content, highlight) + } + } + } + } +} + +fn print_location_path(debug_artifact: &DebugArtifact, loc: Location) { + let line_number = debug_artifact.location_line_number(loc).unwrap(); + let column_number = debug_artifact.location_column_number(loc).unwrap(); + + println!("At {}:{line_number}:{column_number}", debug_artifact.name(loc.file).unwrap()); +} + +fn print_ellipsis(line_number: usize) { + println!("{:>3} {:2} {}", line_number.dimmed(), "", "...".dimmed()); +} + +fn print_content(line_number: usize, cursor: &str, content: &str, highlight: Option>) { + match highlight { + Some(highlight) => { + println!( + "{:>3} {:2} {}{}{}", + line_number, + cursor, + content[0..highlight.start].to_string().dimmed(), + &content[highlight.start..highlight.end], + content[highlight.end..].to_string().dimmed(), + ); + } + None => { + println!( + "{:>3} {:2} {}", + line_number.dimmed(), + cursor.dimmed(), + content.to_string().dimmed(), + ); + } + } +} + +fn render_line( + current: usize, + content: &str, + loc_context: LocationPrintContext, +) -> PrintedLine<'_> { + let file_lines = loc_context.file_lines; + let printed_lines = loc_context.printed_lines; + let location_lines = loc_context.location_lines; + let line_number = current + 1; + + if current < printed_lines.start { + // Ignore lines before the context window we choose to show + PrintedLine::Skip + } else if 0 < current && current == printed_lines.start && current < location_lines.start { + // Denote that there's more lines before but we're not showing them + PrintedLine::Ellipsis { line_number } + } else if current < location_lines.start { + // Print lines before the location start without highlighting + PrintedLine::Content { line_number, cursor: "", content, highlight: None } + } else if current == location_lines.start { + // Highlight current location from where it starts to the end of the current line + PrintedLine::Content { + line_number, + cursor: "->", + content, + highlight: Some(loc_context.location_offset_in_first_line), + } + } else if current < location_lines.end { + // Highlight current line if it's contained by the current location + PrintedLine::Content { + line_number, + cursor: "", + content, + highlight: Some(Range { start: 0, end: content.len() }), + } + } else if current == location_lines.end { + // Highlight current location from the beginning of the line until the location's own end + PrintedLine::Content { + line_number, + cursor: "", + content, + highlight: Some(loc_context.location_offset_in_last_line), + } + } else if current < printed_lines.end || printed_lines.end == file_lines.end { + // Print lines after the location end without highlighting + PrintedLine::Content { line_number, cursor: "", content, highlight: None } + } else if current == printed_lines.end && printed_lines.end < file_lines.end { + // Denote that there's more lines after but we're not showing them + PrintedLine::Ellipsis { line_number } + } else { + PrintedLine::Skip + } +} + +// Given a Location in a DebugArtifact, returns a line iterator that specifies how to +// print the location's file. +// +// Consider for example the file (line numbers added to facilitate this doc): +// ``` +// 1 use dep::std::hash::poseidon; +// 2 +// 3 fn main(x1: [Field; 2], y1: pub Field, x2: [Field; 4], y2: pub Field) { +// 4 let hash1 = poseidon::bn254::hash_2(x1); +// 5 assert(hash1 == y1); +// 6 +// 7 let hash2 = poseidon::bn254::hash_4(x2); +// 8 assert(hash2 == y2); +// 9 } +// 10 +// ``` +// +// If the location to render is `poseidon::bn254::hash_2(x1)`, we'll render the file as: +// ``` +// 1 use dep::std::hash::poseidon; +// 2 +// 3 fn main(x1: [Field; 2], y1: pub Field, x2: [Field; 4], y2: pub Field) { +// 4 let hash1 = poseidon::bn254::hash_2(x1); +// 5 -> assert(hash1 == y1); +// 6 +// 7 let hash2 = poseidon::bn254::hash_4(x2); +// 8 assert(hash2 == y2); +// 9 } +// 10 ... +// ``` +// +// This is the result of: +// 1. Limiting the amount of printed lines to 5 before and 5 after the location. +// 2. Using ellipsis (...) to denote when some file lines have been left out of the render. +// 3. Using an arrow cursor (->) to denote where the rendered location starts. +// 4. Highlighting the location (here expressed as a block for the sake of the explanation). +// +// Note that locations may span multiple lines, so this function deals with that too. +fn render_location<'a>( + debug_artifact: &'a DebugArtifact, + loc: &'a Location, +) -> impl Iterator> { + let loc = *loc; + + let file_lines = Range { start: 0, end: debug_artifact.last_line_index(loc).unwrap() }; + + // Sub-range of file lines that this location spans + let location_lines = Range { + start: debug_artifact.location_line_index(loc).unwrap(), + end: debug_artifact.location_end_line_index(loc).unwrap(), + }; + + // How many lines before or after the location's lines we print + let context_lines = 5; + + // Sub-range of lines that we'll print, which includes location + context lines + let first_line_to_print = + if location_lines.start < context_lines { 0 } else { location_lines.start - context_lines }; + let last_line_to_print = std::cmp::min(location_lines.end + context_lines, file_lines.end); + let printed_lines = Range { start: first_line_to_print, end: last_line_to_print }; + + // Range of the location relative to its starting and ending lines + let location_offset_in_first_line = debug_artifact.location_in_line(loc).unwrap(); + let location_offset_in_last_line = debug_artifact.location_in_end_line(loc).unwrap(); + + let context = LocationPrintContext { + file_lines, + printed_lines, + location_lines, + location_offset_in_first_line, + location_offset_in_last_line, + }; + + let source = debug_artifact.location_source_code(loc).unwrap(); + source + .lines() + .enumerate() + .map(move |(index, content)| render_line(index, content, context.clone())) +} + +#[cfg(test)] +mod tests { + use crate::source_code_printer::render_location; + use crate::source_code_printer::PrintedLine::Content; + use acvm::acir::circuit::OpcodeLocation; + use fm::FileManager; + use nargo::artifacts::debug::DebugArtifact; + use noirc_errors::{debug_info::DebugInfo, Location, Span}; + use std::collections::BTreeMap; + use std::ops::Range; + use std::path::Path; + use std::path::PathBuf; + use tempfile::{tempdir, TempDir}; + + // Returns the absolute path to the file + fn create_dummy_file(dir: &TempDir, file_name: &Path) -> PathBuf { + let file_path = dir.path().join(file_name); + let _file = std::fs::File::create(&file_path).unwrap(); + file_path + } + + #[test] + fn render_multiple_line_location() { + let source_code = r##"pub fn main(mut state: [Field; 2]) -> [Field; 2] { + state = permute( + consts::x5_2_config(), + state); + + state +}"##; + + let dir = tempdir().unwrap(); + let file_name = Path::new("main.nr"); + create_dummy_file(&dir, file_name); + + let mut fm = FileManager::new(dir.path()); + let file_id = fm.add_file_with_source(file_name, source_code.to_string()).unwrap(); + + // Location of + // ``` + // permute( + // consts::x5_2_config(), + // state) + // ``` + let loc = Location::new(Span::inclusive(63, 116), file_id); + + // We don't care about opcodes in this context, + // we just use a dummy to construct debug_symbols + let mut opcode_locations = BTreeMap::>::new(); + opcode_locations.insert(OpcodeLocation::Acir(42), vec![loc]); + + let debug_symbols = vec![DebugInfo::new(opcode_locations)]; + let debug_artifact = DebugArtifact::new(debug_symbols, &fm); + + let location_rendered: Vec<_> = render_location(&debug_artifact, &loc).collect(); + + assert_eq!( + location_rendered, + vec![ + Content { + line_number: 1, + cursor: "", + content: "pub fn main(mut state: [Field; 2]) -> [Field; 2] {", + highlight: None, + }, + Content { + line_number: 2, + cursor: "->", + content: " state = permute(", + highlight: Some(Range { start: 12, end: 20 }), + }, + Content { + line_number: 3, + cursor: "", + content: " consts::x5_2_config(),", + highlight: Some(Range { start: 0, end: 30 }), + }, + Content { + line_number: 4, + cursor: "", + content: " state);", + highlight: Some(Range { start: 0, end: 14 }), + }, + Content { line_number: 5, cursor: "", content: "", highlight: None }, + Content { line_number: 6, cursor: "", content: " state", highlight: None }, + Content { line_number: 7, cursor: "", content: "}", highlight: None }, + ] + ); + } +} diff --git a/tooling/lsp/src/lib.rs b/tooling/lsp/src/lib.rs index 9887e5b8e96..90e8e563cd3 100644 --- a/tooling/lsp/src/lib.rs +++ b/tooling/lsp/src/lib.rs @@ -21,14 +21,12 @@ use fm::codespan_files as files; use lsp_types::CodeLens; use nargo::workspace::Workspace; use nargo_toml::{find_file_manifest, resolve_workspace_from_toml, PackageSelection}; -use noirc_driver::NOIR_ARTIFACT_VERSION_STRING; +use noirc_driver::{file_manager_with_stdlib, prepare_crate, NOIR_ARTIFACT_VERSION_STRING}; use noirc_frontend::{ graph::{CrateId, CrateName}, hir::{Context, FunctionNameMatch}, }; -use fm::FileManager; - use notifications::{ on_did_change_configuration, on_did_change_text_document, on_did_close_text_document, on_did_open_text_document, on_did_save_text_document, on_exit, on_initialized, @@ -223,14 +221,14 @@ pub(crate) fn resolve_workspace_for_source_path(file_path: &Path) -> Result (Context<'static>, CrateId) { let root = Path::new(""); - let mut file_manager = FileManager::new(root); - let root_file_id = file_manager.add_file_with_source(Path::new("main.nr"), source).expect( + let file_name = Path::new("main.nr"); + let mut file_manager = file_manager_with_stdlib(root); + file_manager.add_file_with_source(file_name, source).expect( "Adding source buffer to file manager should never fail when file manager is empty", ); let mut context = Context::new(file_manager); - - let root_crate_id = context.crate_graph.add_crate_root(root_file_id); + let root_crate_id = prepare_crate(&mut context, file_name); (context, root_crate_id) } diff --git a/tooling/lsp/src/requests/goto_definition.rs b/tooling/lsp/src/requests/goto_definition.rs index a23bde2e210..d2b66682d89 100644 --- a/tooling/lsp/src/requests/goto_definition.rs +++ b/tooling/lsp/src/requests/goto_definition.rs @@ -1,13 +1,13 @@ use std::future::{self, Future}; +use crate::resolve_workspace_for_source_path; use crate::{types::GotoDefinitionResult, LspState}; -use async_lsp::{ErrorCode, LanguageClient, ResponseError}; +use async_lsp::{ErrorCode, ResponseError}; use fm::codespan_files::Error; use lsp_types::{GotoDefinitionParams, GotoDefinitionResponse, Location}; use lsp_types::{Position, Url}; use nargo::insert_all_files_for_workspace_into_file_manager; -use nargo_toml::{find_package_manifest, resolve_workspace_from_toml, PackageSelection}; -use noirc_driver::{file_manager_with_stdlib, NOIR_ARTIFACT_VERSION_STRING}; +use noirc_driver::file_manager_with_stdlib; pub(crate) fn on_goto_definition_request( state: &mut LspState, @@ -18,81 +18,55 @@ pub(crate) fn on_goto_definition_request( } fn on_goto_definition_inner( - state: &mut LspState, + _state: &mut LspState, params: GotoDefinitionParams, ) -> Result { - let root_path = state.root_path.as_deref().ok_or_else(|| { - ResponseError::new(ErrorCode::REQUEST_FAILED, "Could not find project root") - })?; - let file_path = params.text_document_position_params.text_document.uri.to_file_path().map_err(|_| { ResponseError::new(ErrorCode::REQUEST_FAILED, "URI is not a valid file path") })?; - let toml_path = match find_package_manifest(root_path, &file_path) { - Ok(toml_path) => toml_path, - Err(err) => { - let _ = state.client.log_message(lsp_types::LogMessageParams { - typ: lsp_types::MessageType::WARNING, - message: err.to_string(), - }); - return Ok(None); - } - }; - let workspace = resolve_workspace_from_toml( - &toml_path, - PackageSelection::All, - Some(NOIR_ARTIFACT_VERSION_STRING.to_string()), - ) - .map_err(|err| { - // If we found a manifest, but the workspace is invalid, we raise an error about it - ResponseError::new(ErrorCode::REQUEST_FAILED, err) - })?; - - let mut definition_position = None; + let workspace = resolve_workspace_for_source_path(file_path.as_path()).unwrap(); + let package = workspace.members.first().unwrap(); let mut workspace_file_manager = file_manager_with_stdlib(&workspace.root_dir); insert_all_files_for_workspace_into_file_manager(&workspace, &mut workspace_file_manager); - for package in &workspace { - let (mut context, crate_id) = nargo::prepare_package(&workspace_file_manager, package); - - // We ignore the warnings and errors produced by compilation while resolving the definition - let _ = noirc_driver::check_crate(&mut context, crate_id, false, false); - - let files = context.file_manager.as_file_map(); - let file_id = context.file_manager.name_to_id(file_path.clone()); - - if let Some(file_id) = file_id { - let byte_index = position_to_byte_index( - files, - file_id, - ¶ms.text_document_position_params.position, - ); - - if let Ok(byte_index) = byte_index { - let search_for_location = noirc_errors::Location { - file: file_id, - span: noirc_errors::Span::single_char(byte_index as u32), - }; - let found_location = context.get_definition_location_from(search_for_location); - - if let Some(found_location) = found_location { - let file_id = found_location.file; - definition_position = to_lsp_location(files, file_id, found_location.span); - } - } - } - } + let (mut context, crate_id) = nargo::prepare_package(&workspace_file_manager, package); + + // We ignore the warnings and errors produced by compilation while resolving the definition + let _ = noirc_driver::check_crate(&mut context, crate_id, false, false); + + let files = context.file_manager.as_file_map(); + let file_id = context.file_manager.name_to_id(file_path.clone()).ok_or(ResponseError::new( + ErrorCode::REQUEST_FAILED, + format!("Could not find file in file manager. File path: {:?}", file_path), + ))?; + + let byte_index = + position_to_byte_index(files, file_id, ¶ms.text_document_position_params.position) + .map_err(|err| { + ResponseError::new( + ErrorCode::REQUEST_FAILED, + format!("Could not convert position to byte index. Error: {:?}", err), + ) + })?; + + let search_for_location = noirc_errors::Location { + file: file_id, + span: noirc_errors::Span::single_char(byte_index as u32), + }; - if let Some(definition_position) = definition_position { - let response: GotoDefinitionResponse = - GotoDefinitionResponse::from(definition_position).to_owned(); - Ok(Some(response)) - } else { - Ok(None) - } + let goto_definition_response = + context.get_definition_location_from(search_for_location).and_then(|found_location| { + let file_id = found_location.file; + let definition_position = to_lsp_location(files, file_id, found_location.span)?; + let response: GotoDefinitionResponse = + GotoDefinitionResponse::from(definition_position).to_owned(); + Some(response) + }); + + Ok(goto_definition_response) } fn to_lsp_location<'a, F>( diff --git a/tooling/lsp/src/requests/test_run.rs b/tooling/lsp/src/requests/test_run.rs index 2a040a2e3e9..c2181d7839d 100644 --- a/tooling/lsp/src/requests/test_run.rs +++ b/tooling/lsp/src/requests/test_run.rs @@ -78,8 +78,14 @@ fn on_test_run_request_inner( ) })?; - let test_result = - run_test(&state.solver, &context, test_function, false, &CompileOptions::default()); + let test_result = run_test( + &state.solver, + &context, + test_function, + false, + None, + &CompileOptions::default(), + ); let result = match test_result { TestStatus::Pass => NargoTestRunResult { id: params.id.clone(), diff --git a/tooling/nargo/Cargo.toml b/tooling/nargo/Cargo.toml index d60f9d3ea28..cd97980b9e0 100644 --- a/tooling/nargo/Cargo.toml +++ b/tooling/nargo/Cargo.toml @@ -26,8 +26,14 @@ thiserror.workspace = true codespan-reporting.workspace = true tracing.workspace = true rayon = "1.8.0" +jsonrpc.workspace = true [dev-dependencies] # TODO: This dependency is used to generate unit tests for `get_all_paths_in_dir` # TODO: once that method is moved to nargo_cli, we can move this dependency to nargo_cli -tempfile.workspace = true \ No newline at end of file +tempfile.workspace = true +jsonrpc-http-server = "18.0" +jsonrpc-core-client = "18.0" +jsonrpc-derive = "18.0" +jsonrpc-core = "18.0" +serial_test = "2.0" diff --git a/tooling/nargo/src/artifacts/debug.rs b/tooling/nargo/src/artifacts/debug.rs index 324c476d13d..633fc7a8ded 100644 --- a/tooling/nargo/src/artifacts/debug.rs +++ b/tooling/nargo/src/artifacts/debug.rs @@ -59,6 +59,12 @@ impl DebugArtifact { self.line_index(location.file, location_start) } + /// Given a location, returns the index of the line it ends at + pub fn location_end_line_index(&self, location: Location) -> Result { + let location_end = location.span.end() as usize; + self.line_index(location.file, location_end) + } + /// Given a location, returns the line number it starts at pub fn location_line_number(&self, location: Location) -> Result { let location_start = location.span.start() as usize; @@ -82,12 +88,28 @@ impl DebugArtifact { let line_index = self.line_index(location.file, location_start)?; let line_span = self.line_range(location.file, line_index)?; + let line_length = line_span.end - (line_span.start + 1); let start_in_line = location_start - line_span.start; + + // The location might continue beyond the line, + // so we need a bounds check let end_in_line = location_end - line_span.start; + let end_in_line = std::cmp::min(end_in_line, line_length); Ok(Range { start: start_in_line, end: end_in_line }) } + /// Given a location, returns a Span relative to its last line's + /// position in the file. This is useful when processing a file's + /// contents on a per-line-basis. + pub fn location_in_end_line(&self, location: Location) -> Result, Error> { + let end_line_index = self.location_end_line_index(location)?; + let line_span = self.line_range(location.file, end_line_index)?; + let location_end = location.span.end() as usize; + let end_in_line = location_end - line_span.start; + Ok(Range { start: 0, end: end_in_line }) + } + /// Given a location, returns the last line index /// of its file pub fn last_line_index(&self, location: Location) -> Result { @@ -149,3 +171,70 @@ impl<'a> Files<'a> for DebugArtifact { }) } } + +#[cfg(test)] +mod tests { + use crate::artifacts::debug::DebugArtifact; + use acvm::acir::circuit::OpcodeLocation; + use fm::FileManager; + use noirc_errors::{debug_info::DebugInfo, Location, Span}; + use std::collections::BTreeMap; + use std::ops::Range; + use std::path::Path; + use std::path::PathBuf; + use tempfile::{tempdir, TempDir}; + + // Returns the absolute path to the file + fn create_dummy_file(dir: &TempDir, file_name: &Path) -> PathBuf { + let file_path = dir.path().join(file_name); + let _file = std::fs::File::create(&file_path).unwrap(); + file_path + } + + // Tests that location_in_line correctly handles + // locations spanning multiple lines. + // For example, given the snippet: + // ``` + // permute( + // consts::x5_2_config(), + // state); + // ``` + // We want location_in_line to return the range + // containing `permute(` + #[test] + fn location_in_line_stops_at_end_of_line() { + let source_code = r##"pub fn main(mut state: [Field; 2]) -> [Field; 2] { + state = permute( + consts::x5_2_config(), + state); + + state +}"##; + + let dir = tempdir().unwrap(); + let file_name = Path::new("main.nr"); + create_dummy_file(&dir, file_name); + + let mut fm = FileManager::new(dir.path()); + let file_id = fm.add_file_with_source(file_name, source_code.to_string()).unwrap(); + + // Location of + // ``` + // permute( + // consts::x5_2_config(), + // state) + // ``` + let loc = Location::new(Span::inclusive(63, 116), file_id); + + // We don't care about opcodes in this context, + // we just use a dummy to construct debug_symbols + let mut opcode_locations = BTreeMap::>::new(); + opcode_locations.insert(OpcodeLocation::Acir(42), vec![loc]); + + let debug_symbols = vec![DebugInfo::new(opcode_locations)]; + let debug_artifact = DebugArtifact::new(debug_symbols, &fm); + + let location_in_line = debug_artifact.location_in_line(loc).expect("Expected a range"); + assert_eq!(location_in_line, Range { start: 12, end: 20 }); + } +} diff --git a/tooling/nargo/src/ops/foreign_calls.rs b/tooling/nargo/src/ops/foreign_calls.rs index bc1e19cdcf4..cbe40c92b4e 100644 --- a/tooling/nargo/src/ops/foreign_calls.rs +++ b/tooling/nargo/src/ops/foreign_calls.rs @@ -2,6 +2,7 @@ use acvm::{ acir::brillig::{ForeignCallParam, ForeignCallResult, Value}, pwg::ForeignCallWaitInfo, }; +use jsonrpc::{arg as build_json_rpc_arg, minreq_http::Builder, Client}; use noirc_printable_type::{decode_string_value, ForeignCallError, PrintableValueDisplay}; pub trait ForeignCallExecutor { @@ -94,11 +95,22 @@ pub struct DefaultForeignCallExecutor { mocked_responses: Vec, /// Whether to print [`ForeignCall::Print`] output. show_output: bool, + /// JSON RPC client to resolve foreign calls + external_resolver: Option, } impl DefaultForeignCallExecutor { - pub fn new(show_output: bool) -> Self { - DefaultForeignCallExecutor { show_output, ..DefaultForeignCallExecutor::default() } + pub fn new(show_output: bool, resolver_url: Option<&str>) -> Self { + let oracle_resolver = resolver_url.map(|resolver_url| { + let transport_builder = + Builder::new().url(resolver_url).expect("Invalid oracle resolver URL"); + Client::with_transport(transport_builder.build()) + }); + DefaultForeignCallExecutor { + show_output, + external_resolver: oracle_resolver, + ..DefaultForeignCallExecutor::default() + } } } @@ -190,27 +202,136 @@ impl ForeignCallExecutor for DefaultForeignCallExecutor { Ok(ForeignCallResult { values: vec![] }) } None => { - let response_position = self + let mock_response_position = self .mocked_responses .iter() - .position(|response| response.matches(foreign_call_name, &foreign_call.inputs)) - .unwrap_or_else(|| panic!("Unknown foreign call {}", foreign_call_name)); + .position(|response| response.matches(foreign_call_name, &foreign_call.inputs)); - let mock = self - .mocked_responses - .get_mut(response_position) - .expect("Invalid position of mocked response"); - let result = mock.result.values.clone(); - - if let Some(times_left) = &mut mock.times_left { - *times_left -= 1; - if *times_left == 0 { - self.mocked_responses.remove(response_position); + match (mock_response_position, &self.external_resolver) { + (Some(response_position), _) => { + let mock = self + .mocked_responses + .get_mut(response_position) + .expect("Invalid position of mocked response"); + let result = mock.result.values.clone(); + + if let Some(times_left) = &mut mock.times_left { + *times_left -= 1; + if *times_left == 0 { + self.mocked_responses.remove(response_position); + } + } + + Ok(ForeignCallResult { values: result }) + } + (None, Some(external_resolver)) => { + let encoded_params: Vec<_> = + foreign_call.inputs.iter().map(build_json_rpc_arg).collect(); + + let req = + external_resolver.build_request(foreign_call_name, &encoded_params); + + let response = external_resolver.send_request(req)?; + + let parsed_response: ForeignCallResult = response.result()?; + + Ok(parsed_response) } + (None, None) => panic!("Unknown foreign call {}", foreign_call_name), } + } + } + } +} + +#[cfg(test)] +mod tests { + use acvm::{ + acir::brillig::ForeignCallParam, + brillig_vm::brillig::{ForeignCallResult, Value}, + pwg::ForeignCallWaitInfo, + FieldElement, + }; + use jsonrpc_core::Result as RpcResult; + use jsonrpc_derive::rpc; + use jsonrpc_http_server::{Server, ServerBuilder}; + use serial_test::serial; + + use crate::ops::{DefaultForeignCallExecutor, ForeignCallExecutor}; - Ok(ForeignCallResult { values: result }) + #[allow(unreachable_pub)] + #[rpc] + pub trait OracleResolver { + #[rpc(name = "echo")] + fn echo(&self, param: ForeignCallParam) -> RpcResult; + + #[rpc(name = "sum")] + fn sum(&self, array: ForeignCallParam) -> RpcResult; + } + + struct OracleResolverImpl; + + impl OracleResolver for OracleResolverImpl { + fn echo(&self, param: ForeignCallParam) -> RpcResult { + Ok(vec![param].into()) + } + + fn sum(&self, array: ForeignCallParam) -> RpcResult { + let mut res: FieldElement = 0_usize.into(); + + for value in array.values() { + res += value.to_field(); } + + Ok(Value::from(res).into()) } } + + fn build_oracle_server() -> (Server, String) { + let mut io = jsonrpc_core::IoHandler::new(); + io.extend_with(OracleResolverImpl.to_delegate()); + + let server = ServerBuilder::new(io) + .start_http(&"127.0.0.1:5555".parse().expect("Invalid address")) + .expect("Could not start server"); + + let url = format!("http://{}", server.address()); + (server, url) + } + + #[serial] + #[test] + fn test_oracle_resolver_echo() { + let (server, url) = build_oracle_server(); + + let mut executor = DefaultForeignCallExecutor::new(false, Some(&url)); + + let foreign_call = ForeignCallWaitInfo { + function: "echo".to_string(), + inputs: vec![ForeignCallParam::Single(1_u128.into())], + }; + + let result = executor.execute(&foreign_call); + assert_eq!(result.unwrap(), ForeignCallResult { values: foreign_call.inputs }); + + server.close(); + } + + #[serial] + #[test] + fn test_oracle_resolver_sum() { + let (server, url) = build_oracle_server(); + + let mut executor = DefaultForeignCallExecutor::new(false, Some(&url)); + + let foreign_call = ForeignCallWaitInfo { + function: "sum".to_string(), + inputs: vec![ForeignCallParam::Array(vec![1_usize.into(), 2_usize.into()])], + }; + + let result = executor.execute(&foreign_call); + assert_eq!(result.unwrap(), Value::from(3_usize).into()); + + server.close(); + } } diff --git a/tooling/nargo/src/ops/test.rs b/tooling/nargo/src/ops/test.rs index 5bfdd6d15d0..f38dcad0c2f 100644 --- a/tooling/nargo/src/ops/test.rs +++ b/tooling/nargo/src/ops/test.rs @@ -19,6 +19,7 @@ pub fn run_test( context: &Context, test_function: TestFunction, show_output: bool, + foreign_call_resolver_url: Option<&str>, config: &CompileOptions, ) -> TestStatus { let program = compile_no_check(context, config, test_function.get_id(), None, false); @@ -30,7 +31,7 @@ pub fn run_test( &program.circuit, WitnessMap::new(), blackbox_solver, - &mut DefaultForeignCallExecutor::new(show_output), + &mut DefaultForeignCallExecutor::new(show_output, foreign_call_resolver_url), ); test_status_program_compile_pass(test_function, program.debug, circuit_execution) } diff --git a/tooling/nargo_cli/src/cli/execute_cmd.rs b/tooling/nargo_cli/src/cli/execute_cmd.rs index f2d5bce5524..7f695c42fa4 100644 --- a/tooling/nargo_cli/src/cli/execute_cmd.rs +++ b/tooling/nargo_cli/src/cli/execute_cmd.rs @@ -42,6 +42,10 @@ pub(crate) struct ExecuteCommand { #[clap(flatten)] compile_options: CompileOptions, + + /// JSON RPC url to solve oracle calls + #[clap(long)] + oracle_resolver: Option, } pub(crate) fn run( @@ -73,8 +77,12 @@ pub(crate) fn run( expression_width, )?; - let (return_value, solved_witness) = - execute_program_and_decode(compiled_program, package, &args.prover_name)?; + let (return_value, solved_witness) = execute_program_and_decode( + compiled_program, + package, + &args.prover_name, + args.oracle_resolver.as_deref(), + )?; println!("[{}] Circuit witness successfully solved", package.name); if let Some(return_value) = return_value { @@ -93,11 +101,12 @@ fn execute_program_and_decode( program: CompiledProgram, package: &Package, prover_name: &str, + foreign_call_resolver_url: Option<&str>, ) -> Result<(Option, WitnessMap), CliError> { // Parse the initial witness values from Prover.toml let (inputs_map, _) = read_inputs_from_file(&package.root_dir, prover_name, Format::Toml, &program.abi)?; - let solved_witness = execute_program(&program, &inputs_map)?; + let solved_witness = execute_program(&program, &inputs_map, foreign_call_resolver_url)?; let public_abi = program.abi.public_abi(); let (_, return_value) = public_abi.decode(&solved_witness)?; @@ -107,6 +116,7 @@ fn execute_program_and_decode( pub(crate) fn execute_program( compiled_program: &CompiledProgram, inputs_map: &InputMap, + foreign_call_resolver_url: Option<&str>, ) -> Result { let blackbox_solver = Bn254BlackBoxSolver::new(); @@ -116,7 +126,7 @@ pub(crate) fn execute_program( &compiled_program.circuit, initial_witness, &blackbox_solver, - &mut DefaultForeignCallExecutor::new(true), + &mut DefaultForeignCallExecutor::new(true, foreign_call_resolver_url), ); match solved_witness_err { Ok(solved_witness) => Ok(solved_witness), diff --git a/tooling/nargo_cli/src/cli/prove_cmd.rs b/tooling/nargo_cli/src/cli/prove_cmd.rs index 2b1dd8c65f3..167ab541bc5 100644 --- a/tooling/nargo_cli/src/cli/prove_cmd.rs +++ b/tooling/nargo_cli/src/cli/prove_cmd.rs @@ -43,6 +43,10 @@ pub(crate) struct ProveCommand { #[clap(flatten)] compile_options: CompileOptions, + + /// JSON RPC url to solve oracle calls + #[clap(long)] + oracle_resolver: Option, } pub(crate) fn run( @@ -81,12 +85,14 @@ pub(crate) fn run( &args.prover_name, &args.verifier_name, args.verify, + args.oracle_resolver.as_deref(), )?; } Ok(()) } +#[allow(clippy::too_many_arguments)] pub(crate) fn prove_package( backend: &Backend, workspace: &Workspace, @@ -95,12 +101,14 @@ pub(crate) fn prove_package( prover_name: &str, verifier_name: &str, check_proof: bool, + foreign_call_resolver_url: Option<&str>, ) -> Result<(), CliError> { // Parse the initial witness values from Prover.toml let (inputs_map, _) = read_inputs_from_file(&package.root_dir, prover_name, Format::Toml, &compiled_program.abi)?; - let solved_witness = execute_program(&compiled_program, &inputs_map)?; + let solved_witness = + execute_program(&compiled_program, &inputs_map, foreign_call_resolver_url)?; // Write public inputs into Verifier.toml let public_abi = compiled_program.abi.public_abi(); diff --git a/tooling/nargo_cli/src/cli/test_cmd.rs b/tooling/nargo_cli/src/cli/test_cmd.rs index 575af9938b6..32893baa157 100644 --- a/tooling/nargo_cli/src/cli/test_cmd.rs +++ b/tooling/nargo_cli/src/cli/test_cmd.rs @@ -43,6 +43,10 @@ pub(crate) struct TestCommand { #[clap(flatten)] compile_options: CompileOptions, + + /// JSON RPC url to solve oracle calls + #[clap(long)] + oracle_resolver: Option, } pub(crate) fn run( @@ -84,6 +88,7 @@ pub(crate) fn run( package, pattern, args.show_output, + args.oracle_resolver.as_deref(), &args.compile_options, )?; } @@ -97,6 +102,7 @@ fn run_tests( package: &Package, fn_name: FunctionNameMatch, show_output: bool, + foreign_call_resolver_url: Option<&str>, compile_options: &CompileOptions, ) -> Result<(), CliError> { let (mut context, crate_id) = prepare_package(file_manager, package); @@ -141,7 +147,14 @@ fn run_tests( .expect("Failed to write to stdout"); writer.flush().expect("Failed to flush writer"); - match run_test(blackbox_solver, &context, test_function, show_output, compile_options) { + match run_test( + blackbox_solver, + &context, + test_function, + show_output, + foreign_call_resolver_url, + compile_options, + ) { TestStatus::Pass { .. } => { writer .set_color(ColorSpec::new().set_fg(Some(Color::Green))) diff --git a/tooling/nargo_toml/src/errors.rs b/tooling/nargo_toml/src/errors.rs index da976e1b185..440895056c3 100644 --- a/tooling/nargo_toml/src/errors.rs +++ b/tooling/nargo_toml/src/errors.rs @@ -69,6 +69,9 @@ pub enum ManifestError { #[error(transparent)] SemverError(SemverError), + + #[error("Cyclic package dependency found when processing {cycle}")] + CyclicDependency { cycle: String }, } #[allow(clippy::enum_variant_names)] diff --git a/tooling/nargo_toml/src/lib.rs b/tooling/nargo_toml/src/lib.rs index 6c77fe85f2b..cecc3f7e26a 100644 --- a/tooling/nargo_toml/src/lib.rs +++ b/tooling/nargo_toml/src/lib.rs @@ -120,7 +120,11 @@ struct PackageConfig { } impl PackageConfig { - fn resolve_to_package(&self, root_dir: &Path) -> Result { + fn resolve_to_package( + &self, + root_dir: &Path, + processed: &mut Vec, + ) -> Result { let name: CrateName = if let Some(name) = &self.package.name { name.parse().map_err(|_| ManifestError::InvalidPackageName { toml: root_dir.join("Nargo.toml"), @@ -136,7 +140,7 @@ impl PackageConfig { toml: root_dir.join("Nargo.toml"), name: name.into(), })?; - let resolved_dep = dep_config.resolve_to_dependency(root_dir)?; + let resolved_dep = dep_config.resolve_to_dependency(root_dir, processed)?; dependencies.insert(name, resolved_dep); } @@ -283,7 +287,11 @@ enum DependencyConfig { } impl DependencyConfig { - fn resolve_to_dependency(&self, pkg_root: &Path) -> Result { + fn resolve_to_dependency( + &self, + pkg_root: &Path, + processed: &mut Vec, + ) -> Result { let dep = match self { Self::Github { git, tag, directory } => { let dir_path = clone_git_repo(git, tag).map_err(ManifestError::GitError)?; @@ -300,13 +308,13 @@ impl DependencyConfig { dir_path }; let toml_path = project_path.join("Nargo.toml"); - let package = resolve_package_from_toml(&toml_path)?; + let package = resolve_package_from_toml(&toml_path, processed)?; Dependency::Remote { package } } Self::Path { path } => { let dir_path = pkg_root.join(path); let toml_path = dir_path.join("Nargo.toml"); - let package = resolve_package_from_toml(&toml_path)?; + let package = resolve_package_from_toml(&toml_path, processed)?; Dependency::Local { package } } }; @@ -325,9 +333,10 @@ fn toml_to_workspace( nargo_toml: NargoToml, package_selection: PackageSelection, ) -> Result { + let mut resolved = Vec::new(); let workspace = match nargo_toml.config { Config::Package { package_config } => { - let member = package_config.resolve_to_package(&nargo_toml.root_dir)?; + let member = package_config.resolve_to_package(&nargo_toml.root_dir, &mut resolved)?; match &package_selection { PackageSelection::Selected(selected_name) if selected_name != &member.name => { return Err(ManifestError::MissingSelectedPackage(member.name)) @@ -345,7 +354,7 @@ fn toml_to_workspace( for (index, member_path) in workspace_config.members.into_iter().enumerate() { let package_root_dir = nargo_toml.root_dir.join(&member_path); let package_toml_path = package_root_dir.join("Nargo.toml"); - let member = resolve_package_from_toml(&package_toml_path)?; + let member = resolve_package_from_toml(&package_toml_path, &mut resolved)?; match &package_selection { PackageSelection::Selected(selected_name) => { @@ -402,17 +411,43 @@ fn read_toml(toml_path: &Path) -> Result { } /// Resolves a Nargo.toml file into a `Package` struct as defined by our `nargo` core. -fn resolve_package_from_toml(toml_path: &Path) -> Result { +fn resolve_package_from_toml( + toml_path: &Path, + processed: &mut Vec, +) -> Result { + // Checks for cyclic dependencies + let str_path = toml_path.to_str().expect("ICE - path is empty"); + if processed.contains(&str_path.to_string()) { + let mut cycle = false; + let mut message = String::new(); + for toml in processed { + cycle = cycle || toml == str_path; + if cycle { + message += &format!("{} referencing ", toml); + } + } + message += str_path; + return Err(ManifestError::CyclicDependency { cycle: message }); + } + // Adds the package to the set of resolved packages + if let Some(str) = toml_path.to_str() { + processed.push(str.to_string()); + } + let nargo_toml = read_toml(toml_path)?; - match nargo_toml.config { + let result = match nargo_toml.config { Config::Package { package_config } => { - package_config.resolve_to_package(&nargo_toml.root_dir) + package_config.resolve_to_package(&nargo_toml.root_dir, processed) } Config::Workspace { .. } => { Err(ManifestError::UnexpectedWorkspace(toml_path.to_path_buf())) } - } + }; + let pos = + processed.iter().position(|toml| toml == str_path).expect("added package must be here"); + processed.remove(pos); + result } #[derive(Debug, PartialEq, Eq)] diff --git a/tooling/noir_codegen/package.json b/tooling/noir_codegen/package.json index 3ae31d9834f..a10855ed63d 100644 --- a/tooling/noir_codegen/package.json +++ b/tooling/noir_codegen/package.json @@ -1,12 +1,21 @@ { "name": "@noir-lang/noir_codegen", - "collaborators": [ + "contributors": [ "The Noir Team " ], "version": "0.22.0", "packageManager": "yarn@3.5.1", "license": "(MIT OR Apache-2.0)", "type": "module", + "homepage": "https://noir-lang.org/", + "repository": { + "url": "https://github.com/noir-lang/noir.git", + "directory": "compiler/wasm", + "type": "git" + }, + "bugs": { + "url": "https://github.com/noir-lang/noir/issues" + }, "dependencies": { "@noir-lang/types": "workspace:*", "glob": "^10.3.10", diff --git a/tooling/noir_codegen/src/index.ts b/tooling/noir_codegen/src/index.ts index 7bef216097a..19829cd06ff 100644 --- a/tooling/noir_codegen/src/index.ts +++ b/tooling/noir_codegen/src/index.ts @@ -9,7 +9,9 @@ const codegenPrelude = `/* Autogenerated file, do not edit! */ /* eslint-disable */ -import { Noir, InputMap, CompiledCircuit } from "@noir-lang/noir_js" +import { Noir, InputMap, CompiledCircuit, ForeignCallHandler } from "@noir-lang/noir_js" + +export { ForeignCallHandler } from "@noir-lang/noir_js" `; const codegenFunction = ( @@ -22,10 +24,12 @@ const codegenFunction = ( return `export const ${name}_circuit: CompiledCircuit = ${JSON.stringify(compiled_program)}; -export async function ${name}(${args_with_types}): Promise<${function_signature.returnValue}> { +export async function ${name}(${args_with_types}, foreignCallHandler?: ForeignCallHandler): Promise<${ + function_signature.returnValue + }> { const program = new Noir(${name}_circuit); const args: InputMap = { ${args} }; - const { returnValue } = await program.execute(args); + const { returnValue } = await program.execute(args, foreignCallHandler); return returnValue as ${function_signature.returnValue}; } `; diff --git a/tooling/noir_codegen/test/assert_lt/src/main.nr b/tooling/noir_codegen/test/assert_lt/src/main.nr index 32d5ff84722..ed370bd87c0 100644 --- a/tooling/noir_codegen/test/assert_lt/src/main.nr +++ b/tooling/noir_codegen/test/assert_lt/src/main.nr @@ -20,6 +20,7 @@ fn main( assert(my_struct.foo.foo); assert(string == "12345"); + print(x); assert(x < y); (x + y, 3, my_struct.foo) } diff --git a/tooling/noir_codegen/test/assert_lt/target/assert_lt.json b/tooling/noir_codegen/test/assert_lt/target/assert_lt.json index b1865ca5f86..be1b134d642 100644 --- a/tooling/noir_codegen/test/assert_lt/target/assert_lt.json +++ b/tooling/noir_codegen/test/assert_lt/target/assert_lt.json @@ -1 +1 @@ -{"noir_version":"0.22.0+528d7c9fa244611cd54636493c730e6ce0734ece","hash":9387426530776910287,"abi":{"parameters":[{"name":"x","type":{"kind":"integer","sign":"unsigned","width":64},"visibility":"private"},{"name":"y","type":{"kind":"integer","sign":"unsigned","width":64},"visibility":"public"},{"name":"array","type":{"kind":"array","length":5,"type":{"kind":"integer","sign":"unsigned","width":8}},"visibility":"private"},{"name":"my_struct","type":{"kind":"struct","path":"NestedStruct","fields":[{"name":"foo","type":{"kind":"struct","path":"MyStruct","fields":[{"name":"foo","type":{"kind":"boolean"}},{"name":"bar","type":{"kind":"array","length":3,"type":{"kind":"string","length":5}}}]}},{"name":"bar","type":{"kind":"array","length":3,"type":{"kind":"struct","path":"MyStruct","fields":[{"name":"foo","type":{"kind":"boolean"}},{"name":"bar","type":{"kind":"array","length":3,"type":{"kind":"string","length":5}}}]}}},{"name":"baz","type":{"kind":"integer","sign":"unsigned","width":64}}]},"visibility":"private"},{"name":"string","type":{"kind":"string","length":5},"visibility":"private"}],"param_witnesses":{"array":[{"start":3,"end":8}],"my_struct":[{"start":8,"end":73}],"string":[{"start":73,"end":78}],"x":[{"start":1,"end":2}],"y":[{"start":2,"end":3}]},"return_type":{"abi_type":{"kind":"tuple","fields":[{"kind":"integer","sign":"unsigned","width":64},{"kind":"integer","sign":"unsigned","width":64},{"kind":"struct","path":"MyStruct","fields":[{"name":"foo","type":{"kind":"boolean"}},{"name":"bar","type":{"kind":"array","length":3,"type":{"kind":"string","length":5}}}]}]},"visibility":"public"},"return_witnesses":[80,81,82,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23]},"bytecode":"H4sIAAAAAAAA/82Y6W/TQBDFNy5HylGgXAEKmHKF2xvbic3VUM62OaDwCYkPSeMIJFAQiui/j59iw+YFIoFnJVaKnF+OyXhmN7vvvVNKfVCTUUofTnZtGuwQL6SPssF7iPcS7yPen3H+myqLj+EVG7ps/JYZ1/fqQZA0aon2dc+rxf0o9IKwX490pMMoHNQi30+iIGrE/bjhxTrwEz0MY3+YBS7L5ejldVikuhwgPkh8iPgw8RLxEeKjxMeIl4mPE58gPkl8ivi0xT5X0hgVC32uKPk+n6G6nCU+R7xCfJ74AvFFYpf4EvEq8WXiK8RXia8RX7fY52oao2qhz1Ul3+cbVJebxLeIbxPfIb5LfI/YI9bENWKfOCAOievEDYt9jtIYkYU+R0q+zzHV5T7xA+KHxI+IHxOvETeJnxCvEz8lfkb8nPgF8UviV2p6/9+g9zeJt4hbxG31ax7lw8Y5oCk0h2zmuSGQZzLEGFjNc1Msz52hzTy35PJMbObZkstzYDPPtlyeO9ANjpodjnDOJSW39p1/z0vzC7+5dbHYZl072bWrJlosnxf5Z6DX1tXsnCkZz/N9xZnzmdIf4iwar+XfXzLeL3rzM8Uwf1wqZicrpPSBpCOX488DSdeImY8F4XrYWlRFY70VrOe8+v1lnh7lqTuC99wV7GuB+s39g/uf1828PnvFxtQ68YoNLblOXiv5/x0zpq2+v5HL27eZ57Zg31tGjpif2LCxkcNIzc1TbLIwDGESwhiEGYhNFqYfjD6YezD0YOLBuINZB4MOphxMLphSMKJgPsFwgskEYwlmkqsmptGqmphDMIRgAsH4gdkD8wRmBwwOmBowMmBewLCASYEFhk0ZBgSKDqMB5gIMBZgIEOUQ0RDOEMsQyBDFEMJrWR0hcnG4gJiFgIVohVCFOIUghXCCKMGBH/Vqq+nDy3L2vEidML8x/7bV9OHlfXZdya698Tj58nXsjkdubzBwdz+NP7qj78m34efR7g+ltqXnYRcAAA=="} \ No newline at end of file +{"noir_version":"0.22.0+6f69b3f511c8b4c51404ad4c18131bdf6b7f6a94","hash":3763979860977920209,"abi":{"parameters":[{"name":"x","type":{"kind":"integer","sign":"unsigned","width":64},"visibility":"private"},{"name":"y","type":{"kind":"integer","sign":"unsigned","width":64},"visibility":"public"},{"name":"array","type":{"kind":"array","length":5,"type":{"kind":"integer","sign":"unsigned","width":8}},"visibility":"private"},{"name":"my_struct","type":{"kind":"struct","path":"NestedStruct","fields":[{"name":"foo","type":{"kind":"struct","path":"MyStruct","fields":[{"name":"foo","type":{"kind":"boolean"}},{"name":"bar","type":{"kind":"array","length":3,"type":{"kind":"string","length":5}}}]}},{"name":"bar","type":{"kind":"array","length":3,"type":{"kind":"struct","path":"MyStruct","fields":[{"name":"foo","type":{"kind":"boolean"}},{"name":"bar","type":{"kind":"array","length":3,"type":{"kind":"string","length":5}}}]}}},{"name":"baz","type":{"kind":"integer","sign":"unsigned","width":64}}]},"visibility":"private"},{"name":"string","type":{"kind":"string","length":5},"visibility":"private"}],"param_witnesses":{"array":[{"start":3,"end":8}],"my_struct":[{"start":8,"end":73}],"string":[{"start":73,"end":78}],"x":[{"start":1,"end":2}],"y":[{"start":2,"end":3}]},"return_type":{"abi_type":{"kind":"tuple","fields":[{"kind":"integer","sign":"unsigned","width":64},{"kind":"integer","sign":"unsigned","width":64},{"kind":"struct","path":"MyStruct","fields":[{"name":"foo","type":{"kind":"boolean"}},{"name":"bar","type":{"kind":"array","length":3,"type":{"kind":"string","length":5}}}]}]},"visibility":"public"},"return_witnesses":[98,99,100,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23]},"bytecode":"H4sIAAAAAAAA/+1cC3PTRhBex8SJ40cezsNxaOtSoGnpQ7Jsy+6LkFJaSFIoKVCgQGzsTJlpCWU85Zf1/9Ur6drz9pAF3jtZM9FMRvliZff7bqXV6bTeYwD4E/wtNfyZCfY7Ep4hOD38mZfwGYJnCc4QPBdg4RMC+7hZk232vORLtutYzXq979b6tmN3rFq722pY9Ua32bJbdqPV6NVajtNv1Vtuu9t2rbZdd/r2caPtHAeG5/k4WmIcsmRcFgjOEZwnuEBwkeBFgpcIXiZ4heASwasErxG8TvCGxjiXhzbKGuJcBv44b5JxqRC8RfBZgt8h+F2C3yO4SvD7BJ8j+AOCzxN8geCLBH+oMc7bQxvbGuK8Dfxx/oiMy8cEXyL4E4I/Jfgzgj8n2CLYJrhGsENwneAGwU2CXY1xbg1ttDTEuQX8cW6TcfmC4C8J/orgrwn+huDLBO8QfIXgXYK/Jfgqwd8RfI3g7wn+AUbv/9fJ5zcI3iN4n+AD+O88EpuOecAO0zkk85whPCe1fZ3R1o9s4+fYaSkmOuPEx9m/PsV2E/x5qRwz3NLBPqNBCxA/dPyKir+xOtcRnJsa7N4C3otTh+5b/DHSmkhuMNr6iU2zUzOVSPg4jyaS23CaSFiCc1uD3UOY7kSCug/5Y6Q1kewx2vqZTbPjmEokfJxHE8kdOE0kLMG5o8HuXZjuRIK67/LHSAtXnDkdAv+j/D1IRgLdZ7T1C5tmp24qgfJxHk2g9+E0gbIE574Guw9guhMo6n7AHyMtXDHR3wP+BPoQkpFADxht/cqm2WmYSqB8nEcT6CM4TaAswXmkwe5jmO4Eirof88dIC1dM9A+BP4E+mXLdGJ8nihhNqttU4ksB/w3q72CPxRYzhH9GMVbWhJuGJGrJ3MV+VuJfDH5Pa9CSYbVZa6CWM6A+j2gs5BhlAs2M+my0Ocdr0xLFTnw23S7ayEpjIsZPcM9Jn89L45VlHq+U5FPYFTirzW8N6xi84qgw/QsKHgsG9S9o89vsCo1h+nMKHjmD+nPa/Dax3sIrfgvTn1fwyBvUn9fmt9lHG4Ux+gsKHgWD+gva/DaxTsm7x4XpLyp4FA3ql/lF5ZqNmSu/X6eDNhbH6F9U8Fg0qF/mF5VrNmau/H5drPfzioLD9C8peCwZ1C/zi8o1HzNXfr8u1m56Bdth+pcVPJYN6pf5ReWai5krv98m1sl6xfRh+lcUPFYM6pf5ReWaj5krv9+mlwNLY/SXFDxKBvXL/KJyLSSIay5BXPMxc+X363rz69Ux+lcVPFYN6pf5ReW6kiCuywniGve48vt1vTWmtTH61xQ81gzql/lF5ZqNmSu/39pTtLE+Rv+6gse6Qf0yv6hcszFz5ffrenPhjTH6NxQ8Ngzql/lF5ZpLENdCgriWYubK77eJ30H0vqwcpr+s4FE2qF/mF5VrNkFcizFz5ffr4PdzvS+Xh+nfVPDYNKh/U5tfx3tuqYzRX1HwqBjUX9Hm1+2hja0x+rcUPLYM6he+8L20eH//4uWz5wNKGre09LsodEhJx1xQHDsHGivI0vD/qhGZHJcfufJl0mqdmbfnZdM/KKSz2ZbH9SjY4+ujWSnw4hg8AXal41PSPiXFZBdGTxx6TOo1duTqCvH/iStNPAoGkrvs7YiP4789FDqSTbExly9pu6gmtdVnHM+w8XtDnhbhaR8xau4wxnWC8QtNcNN83YTF2Zps4yy7tDmvky7w5x3Zpq64P+Xj7ejk2WOM+77EEc9PvGGLElXR7w1vslieJsq1cFkCb7JYWoCvrHEJHJeWcQkAl1hx6Q6XhPCRGB9fcAqP01icUmKPLOyLhb2wsP9VFfw+V+fA72d1HvzZ4kXw+1Nhvyfsz4Q9mS6B33sJ+y1hjyXsq4QXGN6UcR0YBx2fLfDdID5j4RoWPsNjHRr2+sH+PtjTB/v4YO+ey8E4XgF/coH9d66C32fnGvj9dLCHDvZ6wTYNe+D3xDmA0cmLWFafZJzw/Mbzrwejk5fnwf5ssO8MBv0/Xgyqg5Nqp9ervno2+K168lf/5fHvJ6/+ASeieLQUUAAA"} \ No newline at end of file diff --git a/tooling/noir_codegen/test/index.test.ts b/tooling/noir_codegen/test/index.test.ts index 70f3d5bbf89..822993b2f1e 100644 --- a/tooling/noir_codegen/test/index.test.ts +++ b/tooling/noir_codegen/test/index.test.ts @@ -1,5 +1,5 @@ import { expect } from 'chai'; -import { assert_lt, MyStruct, u64 } from './codegen/index.js'; +import { assert_lt, MyStruct, u64, ForeignCallHandler } from './codegen/index.js'; it('codegens a callable function', async () => { const my_struct = { foo: true, bar: ['12345', '12345', '12345'] }; @@ -20,3 +20,85 @@ it('codegens a callable function', async () => { expect(constant).to.be.eq('0x03'); expect(struct).to.be.deep.eq({ foo: true, bar: ['12345', '12345', '12345'] }); }); + +it('allows passing a custom foreign call handler', async () => { + let observedName = ''; + let observedInputs: string[][] = []; + const foreignCallHandler: ForeignCallHandler = async (name: string, inputs: string[][]) => { + // Throwing inside the oracle callback causes a timeout so we log the observed values + // and defer the check against expected values until after the execution is complete. + observedName = name; + observedInputs = inputs; + + return []; + }; + + const my_struct = { foo: true, bar: ['12345', '12345', '12345'] }; + + const [sum, constant, struct]: [u64, u64, MyStruct] = await assert_lt( + '2', + '3', + [0, 0, 0, 0, 0], + { + foo: my_struct, + bar: [my_struct, my_struct, my_struct], + baz: '64', + }, + '12345', + foreignCallHandler, + ); + + expect(observedName).to.be.eq('print'); + expect(observedInputs).to.be.deep.eq([ + // add newline? + ['0x0000000000000000000000000000000000000000000000000000000000000000'], + // x + ['0x0000000000000000000000000000000000000000000000000000000000000002'], + // Type metadata + [ + '0x000000000000000000000000000000000000000000000000000000000000007b', + '0x0000000000000000000000000000000000000000000000000000000000000022', + '0x000000000000000000000000000000000000000000000000000000000000006b', + '0x0000000000000000000000000000000000000000000000000000000000000069', + '0x000000000000000000000000000000000000000000000000000000000000006e', + '0x0000000000000000000000000000000000000000000000000000000000000064', + '0x0000000000000000000000000000000000000000000000000000000000000022', + '0x000000000000000000000000000000000000000000000000000000000000003a', + '0x0000000000000000000000000000000000000000000000000000000000000022', + '0x0000000000000000000000000000000000000000000000000000000000000075', + '0x000000000000000000000000000000000000000000000000000000000000006e', + '0x0000000000000000000000000000000000000000000000000000000000000073', + '0x0000000000000000000000000000000000000000000000000000000000000069', + '0x0000000000000000000000000000000000000000000000000000000000000067', + '0x000000000000000000000000000000000000000000000000000000000000006e', + '0x0000000000000000000000000000000000000000000000000000000000000065', + '0x0000000000000000000000000000000000000000000000000000000000000064', + '0x0000000000000000000000000000000000000000000000000000000000000069', + '0x000000000000000000000000000000000000000000000000000000000000006e', + '0x0000000000000000000000000000000000000000000000000000000000000074', + '0x0000000000000000000000000000000000000000000000000000000000000065', + '0x0000000000000000000000000000000000000000000000000000000000000067', + '0x0000000000000000000000000000000000000000000000000000000000000065', + '0x0000000000000000000000000000000000000000000000000000000000000072', + '0x0000000000000000000000000000000000000000000000000000000000000022', + '0x000000000000000000000000000000000000000000000000000000000000002c', + '0x0000000000000000000000000000000000000000000000000000000000000022', + '0x0000000000000000000000000000000000000000000000000000000000000077', + '0x0000000000000000000000000000000000000000000000000000000000000069', + '0x0000000000000000000000000000000000000000000000000000000000000064', + '0x0000000000000000000000000000000000000000000000000000000000000074', + '0x0000000000000000000000000000000000000000000000000000000000000068', + '0x0000000000000000000000000000000000000000000000000000000000000022', + '0x000000000000000000000000000000000000000000000000000000000000003a', + '0x0000000000000000000000000000000000000000000000000000000000000036', + '0x0000000000000000000000000000000000000000000000000000000000000034', + '0x000000000000000000000000000000000000000000000000000000000000007d', + ], + // format string? + ['0x0000000000000000000000000000000000000000000000000000000000000000'], + ]); + + expect(sum).to.be.eq('0x05'); + expect(constant).to.be.eq('0x03'); + expect(struct).to.be.deep.eq({ foo: true, bar: ['12345', '12345', '12345'] }); +}); diff --git a/tooling/noir_js/package.json b/tooling/noir_js/package.json index d5ea4b4ad5d..ed2fd225810 100644 --- a/tooling/noir_js/package.json +++ b/tooling/noir_js/package.json @@ -1,12 +1,21 @@ { "name": "@noir-lang/noir_js", - "collaborators": [ + "contributors": [ "The Noir Team " ], "version": "0.22.0", "packageManager": "yarn@3.5.1", "license": "(MIT OR Apache-2.0)", "type": "module", + "homepage": "https://noir-lang.org/", + "repository": { + "url": "https://github.com/noir-lang/noir.git", + "directory": "tooling/noir_js", + "type": "git" + }, + "bugs": { + "url": "https://github.com/noir-lang/noir/issues" + }, "dependencies": { "@noir-lang/acvm_js": "workspace:*", "@noir-lang/noirc_abi": "workspace:*", diff --git a/tooling/noir_js_backend_barretenberg/package.json b/tooling/noir_js_backend_barretenberg/package.json index c34b8dfc825..93fdc856338 100644 --- a/tooling/noir_js_backend_barretenberg/package.json +++ b/tooling/noir_js_backend_barretenberg/package.json @@ -1,12 +1,21 @@ { "name": "@noir-lang/backend_barretenberg", - "collaborators": [ + "contributors": [ "The Noir Team " ], "version": "0.22.0", "packageManager": "yarn@3.5.1", "license": "(MIT OR Apache-2.0)", "type": "module", + "homepage": "https://noir-lang.org/", + "repository": { + "url": "https://github.com/noir-lang/noir.git", + "directory": "tooling/noir_js_backend_barretenberg", + "type": "git" + }, + "bugs": { + "url": "https://github.com/noir-lang/noir/issues" + }, "source": "src/index.ts", "main": "lib/cjs/index.js", "module": "lib/esm/index.js", diff --git a/tooling/noir_js_types/package.json b/tooling/noir_js_types/package.json index 51856cfe465..0276b8d087c 100644 --- a/tooling/noir_js_types/package.json +++ b/tooling/noir_js_types/package.json @@ -1,11 +1,20 @@ { "name": "@noir-lang/types", - "collaborators": [ + "contributors": [ "The Noir Team " ], "packageManager": "yarn@3.5.1", "version": "0.22.0", "license": "(MIT OR Apache-2.0)", + "homepage": "https://noir-lang.org/", + "repository": { + "url": "https://github.com/noir-lang/noir.git", + "directory": "tooling/noir_js_types", + "type": "git" + }, + "bugs": { + "url": "https://github.com/noir-lang/noir/issues" + }, "files": [ "lib", "package.json" diff --git a/tooling/noirc_abi_wasm/package.json b/tooling/noirc_abi_wasm/package.json index 5b2cd344eab..d023e1e4391 100644 --- a/tooling/noirc_abi_wasm/package.json +++ b/tooling/noirc_abi_wasm/package.json @@ -1,10 +1,19 @@ { "name": "@noir-lang/noirc_abi", - "collaborators": [ + "contributors": [ "The Noir Team " ], "version": "0.22.0", "license": "(MIT OR Apache-2.0)", + "homepage": "https://noir-lang.org/", + "repository": { + "url": "https://github.com/noir-lang/noir.git", + "directory": "tooling/noirc_abi_wasm", + "type": "git" + }, + "bugs": { + "url": "https://github.com/noir-lang/noir/issues" + }, "files": [ "nodejs", "web", @@ -17,10 +26,6 @@ "types": "./web/noirc_abi_wasm.d.ts", "module": "./web/noirc_abi_wasm.js", "sideEffects": false, - "repository": { - "type": "git", - "url": "https://github.com/noir-lang/noir.git" - }, "scripts": { "build": "bash ./build.sh", "test": "env TS_NODE_COMPILER_OPTIONS='{\"module\": \"commonjs\" }' mocha", diff --git a/tooling/noirc_abi_wasm/src/lib.rs b/tooling/noirc_abi_wasm/src/lib.rs index 281ec712c39..5557cc917bf 100644 --- a/tooling/noirc_abi_wasm/src/lib.rs +++ b/tooling/noirc_abi_wasm/src/lib.rs @@ -27,7 +27,7 @@ use js_witness_map::JsWitnessMap; #[wasm_bindgen(typescript_custom_section)] const INPUT_MAP: &'static str = r#" export type Field = string | number | boolean; -export type InputValue = Field | Field[] | InputMap | InputMap[]; +export type InputValue = Field | InputMap | (Field | InputMap)[]; export type InputMap = { [key: string]: InputValue }; "#;