From a64394fa9dad50e41c120fe630d86ee2f695eaad Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Nicol=C3=A1s=20Venturo?= Date: Thu, 27 Feb 2025 19:51:16 +0000 Subject: [PATCH 01/22] Redo partial notes --- .../aztec-nr/aztec/src/macros/events/mod.nr | 10 +- .../aztec-nr/aztec/src/macros/mod.nr | 2 +- .../aztec-nr/aztec/src/macros/notes/mod.nr | 659 +----------------- .../aztec-nr/aztec/src/macros/utils.nr | 4 +- noir-projects/aztec-nr/aztec/src/note/mod.nr | 1 + .../aztec-nr/aztec/src/note/note_hash.nr | 11 + .../aztec-nr/uint-note/src/uint_note.nr | 308 +++++++- .../contracts/amm_contract/Nargo.toml | 3 +- .../contracts/amm_contract/src/main.nr | 78 ++- .../contracts/fpc_contract/Nargo.toml | 1 + .../contracts/fpc_contract/src/main.nr | 22 +- .../contracts/nft_contract/src/main.nr | 111 +-- .../src/test/transfer_to_private.nr | 23 +- .../contracts/nft_contract/src/types.nr | 2 +- .../nft_contract/src/types/nft_note.nr | 311 ++++++++- .../contracts/token_contract/src/main.nr | 156 ++--- .../src/test/transfer_to_private.nr | 17 +- .../src/archiver/archiver_store_test_suite.ts | 18 +- .../archiver/kv_archiver_store/log_store.ts | 48 +- .../memory_archiver_store.ts | 46 +- yarn-project/end-to-end/src/e2e_nft.test.ts | 30 - .../end-to-end/src/e2e_partial_notes.test.ts | 43 ++ .../private_transfer_recursion.test.ts | 3 +- .../add_public_values_to_payload.ts | 64 -- .../pxe/src/pxe_data_provider/index.ts | 36 +- .../pxe_data_provider.test.ts | 3 +- .../src/logs/l1_payload/l1_note_payload.ts | 73 +- yarn-project/txe/src/node/txe_node.ts | 41 +- yarn-project/txe/src/oracle/txe_oracle.ts | 2 +- 29 files changed, 900 insertions(+), 1226 deletions(-) create mode 100644 noir-projects/aztec-nr/aztec/src/note/note_hash.nr create mode 100644 yarn-project/end-to-end/src/e2e_partial_notes.test.ts delete mode 100644 yarn-project/pxe/src/note_decryption_utils/add_public_values_to_payload.ts diff --git a/noir-projects/aztec-nr/aztec/src/macros/events/mod.nr b/noir-projects/aztec-nr/aztec/src/macros/events/mod.nr index 7477da40943..268c8fda541 100644 --- a/noir-projects/aztec-nr/aztec/src/macros/events/mod.nr +++ b/noir-projects/aztec-nr/aztec/src/macros/events/mod.nr @@ -1,4 +1,4 @@ -use super::utils::compute_event_selector; +use super::utils::{compute_event_selector, get_trait_impl_method}; use protocol_types::meta::generate_serialize_to_fields; comptime fn generate_event_interface(s: StructDefinition) -> Quoted { @@ -9,10 +9,16 @@ comptime fn generate_event_interface(s: StructDefinition) -> Quoted { let event_type_id = compute_event_selector(s); + let from_field = get_trait_impl_method( + quote { crate::protocol_types::abis::event_selector::EventSelector }.as_type(), + quote { crate::protocol_types::traits::FromField }, + quote { from_field }, + ); + quote { impl aztec::event::event_interface::EventInterface<$content_len> for $name { fn get_event_type_id() -> aztec::protocol_types::abis::event_selector::EventSelector { - aztec::protocol_types::abis::event_selector::EventSelector::from_field($event_type_id) + $from_field($event_type_id) } fn emit(self, _emit: fn[Env](Self) -> ()) { diff --git a/noir-projects/aztec-nr/aztec/src/macros/mod.nr b/noir-projects/aztec-nr/aztec/src/macros/mod.nr index 062d82d5e38..716e9d10aac 100644 --- a/noir-projects/aztec-nr/aztec/src/macros/mod.nr +++ b/noir-projects/aztec-nr/aztec/src/macros/mod.nr @@ -234,7 +234,7 @@ comptime fn generate_contract_library_method_compute_note_hash_and_nullifier() - note_type_id: Field, contract_address: aztec::protocol_types::address::AztecAddress, nonce: Field, - ) -> pub Option { + ) -> Option { $if_note_type_id_match_statements else { Option::none() diff --git a/noir-projects/aztec-nr/aztec/src/macros/notes/mod.nr b/noir-projects/aztec-nr/aztec/src/macros/notes/mod.nr index 8fd18fd7c8e..35ea0a3e483 100644 --- a/noir-projects/aztec-nr/aztec/src/macros/notes/mod.nr +++ b/noir-projects/aztec-nr/aztec/src/macros/notes/mod.nr @@ -1,8 +1,8 @@ -use crate::{note::note_getter_options::PropertySelector, prelude::Point}; +use crate::{macros::utils::AsStrQuote, note::note_getter_options::PropertySelector, prelude::Point}; use protocol_types::meta::{derive_packable_and_get_packed_len, generate_serialize_to_fields}; use std::{ collections::umap::UHashMap, - hash::{BuildHasherDefault, derive_generators, poseidon2::Poseidon2Hasher}, + hash::{BuildHasherDefault, derive_generators, Hash, Hasher, poseidon2::Poseidon2Hasher}, meta::{type_of, unquote}, }; @@ -125,109 +125,6 @@ comptime fn generate_note_hash_trait_impl(s: StructDefinition) -> Quoted { } } -/// Generates default `NoteHash` implementation for a given partial note struct `s` and returns it as a quote. -/// -/// impl NoteHash for NoteStruct { -/// fn compute_note_hash(self, storage_slot: Field) -> Field { -/// ... -/// } -/// -/// fn compute_nullifier(self, context: &mut PrivateContext, note_hash_for_nullify: Field) -> Field { -/// ... -/// } -/// -/// unconstrained fn compute_nullifier_unconstrained(note_hash_for_nullify: Field) -> Field { ... } -/// } -/// -/// # On differences from `generate_note_hash_trait_impl` -/// We use multi-scalar multiplication (MSM) instead of Poseidon2 here since this is a partial note and therefore -/// does require MSM's additive homomorphism property (the property is used to add to the commitment in public). -/// We don't use this implementation for standard notes as well because Poseidon2 is significantly cheaper -/// constraints-wise. -/// -/// # On including length in note hash preimage -/// For a given commitment C = a*G1 + b*G2 + c*G3 we take an x-coordinate of C.x and use it as the hash. -/// However, due to elliptic curve symmetry about the x-axis, for any x-coordinate, -/// there are two points with that x-coordinate. This means -C has the same hash (x-coord) as C, -/// and the tuple [-a, -b, -c] produces the same hash as [a, b, c]. -/// -/// This property makes the hash trivially not collision resistant without including the length. -/// By including the length l, the commitment becomes: -/// C = a*G1 + b*G2 + c*G3 + l*G_len -/// -/// Since -l would be -3 (an extraordinarily large number that cannot be a valid preimage length), -/// including the length protects against these collisions. -comptime fn generate_note_hash_trait_impl_for_partial_note( - s: StructDefinition, - indexed_fixed_fields: [(Quoted, Type, u32)], - indexed_nullable_fields: [(Quoted, Type, u32)], -) -> Quoted { - let name = s.name(); - - // First we compute quotes for MSM - // `compute_note_hash()` is computed over all the fields so we need to merge fixed and nullable. - let merged_fields = indexed_fixed_fields.append(indexed_nullable_fields); - // Now we prefix each of the merged fields with `self.` since they refer to the struct members here. - let prefixed_merged_fields = merged_fields.map(|(name, typ, index): (Quoted, Type, u32)| { - (quote { self.$name }, typ, index) - }); - let (new_generators_list, new_scalars_list, _, new_aux_vars) = - generate_multi_scalar_mul(prefixed_merged_fields); - - let (g_slot, g_len) = generate_fixed_generators(); - let new_generators = new_generators_list.push_back(g_slot).push_back(g_len).join(quote {,}); - - let merged_fields_len = merged_fields.len() + 1; // +1 for the storage slot appended below - let new_scalars = new_scalars_list - .push_back(quote { std::hash::from_field_unsafe(storage_slot) }) - .push_back(quote { std::hash::from_field_unsafe($merged_fields_len) }) - .join(quote {,}); - - quote { - impl aztec::note::note_interface::NoteHash for $name { - fn compute_note_hash(self, storage_slot: Field) -> Field { - $new_aux_vars - let point = std::embedded_curve_ops::multi_scalar_mul( - [$new_generators], - [$new_scalars] - ); - point.x - } - - fn compute_nullifier( - self, - context: &mut aztec::prelude::PrivateContext, - note_hash_for_nullify: Field, - ) -> Field { - let owner_npk_m = aztec::keys::getters::get_public_keys(self.owner).npk_m; - // We invoke hash as a static trait function rather than calling owner_npk_m.hash() directly in - // the quote to avoid "trait not in scope" compiler warnings. - let owner_npk_m_hash = aztec::protocol_types::traits::Hash::hash(owner_npk_m); - let secret = context.request_nsk_app(owner_npk_m_hash); - aztec::protocol_types::hash::poseidon2_hash_with_separator( - [note_hash_for_nullify, secret], - aztec::protocol_types::constants::GENERATOR_INDEX__NOTE_NULLIFIER as Field, - ) - } - - unconstrained fn compute_nullifier_unconstrained( - self, - note_hash_for_nullify: Field, - ) -> Field { - let owner_npk_m = aztec::keys::getters::get_public_keys(self.owner).npk_m; - // We invoke hash as a static trait function rather than calling owner_npk_m.hash() directly in - // the quote to avoid "trait not in scope" compiler warnings. - let owner_npk_m_hash = aztec::protocol_types::traits::Hash::hash(owner_npk_m); - let secret = aztec::keys::getters::get_nsk_app(owner_npk_m_hash); - aztec::protocol_types::hash::poseidon2_hash_with_separator( - [note_hash_for_nullify, secret], - aztec::protocol_types::constants::GENERATOR_INDEX__NOTE_NULLIFIER as Field, - ) - } - } - } -} - /// Generates note properties struct for a given note struct `s`. /// /// Example: @@ -434,498 +331,6 @@ comptime fn generate_multi_scalar_mul( (generators_list, scalars_list, args_list, aux_vars) } -/// TODO: The macros shouldn't have a hard-coded opinion of a log layout; not even for partial notes. -/// Since partial notes are about to be refactored, I won't tackle it yet. -// -/// Generates setup payload for a given note struct `s`. The setup payload contains log plaintext and hiding point. -/// -/// # On including length in note hash preimage -/// The hiding point is computed as a multi-scalar multiplication that includes the length of the preimage -/// to protect against collisions due to elliptic curve symmetry. -/// -/// When computing a note hash in the partial notes flow, we take the hiding point, add the nullable fields to it -/// in public and then we take the x-coordinate of the point and use it as the note hash. E.g. for a given commitment -/// C = a*G1 + b*G2 + c*G3 we take an x-coordinate of C.x. However, due to elliptic curve symmetry about the x-axis, -/// for any x-coordinate, there are two points with that x-coordinate. This means -C has the same hash (x-coord) as C, -/// and the tuple [-a, -b, -c] produces the same hash as [a, b, c]. -/// -/// This property makes the hash trivially not collision resistant without including the length. -/// By including the length l, the commitment becomes: -/// C = a*G1 + b*G2 + c*G3 + l*G_len -/// -/// Since -l would be -3 (an extraordinarily large number that cannot be a valid preimage length), -/// including the length protects against these collisions. -/// -/// # Example function output -/// ``` -/// struct TokenNoteSetupPayload { -/// log_plaintext: [u8; 160], -/// hiding_point: aztec::protocol_types::point::Point -/// } -/// -/// impl TokenNoteSetupPayload { -/// fn new(mut self, npk_m_hash: Field, randomness: Field, storage_slot: Field) -> TokenNoteSetupPayload { -/// let hiding_point = std::embedded_curve_ops::multi_scalar_mul( -/// [ -/// Point { x: 0x..., y: 0x... }, -/// Point { x: 0x..., y: 0x... }, -/// Point { x: 0x..., y: 0x... }, -/// Point { x: 0x..., y: 0x... } -/// ], -/// [ -/// std::hash::from_field_unsafe(npk_m_hash), -/// std::hash::from_field_unsafe(randomness), -/// std::hash::from_field_unsafe(storage_slot), -/// std::hash::from_field_unsafe(3) -/// ] -/// ); -/// -/// let let storage_slot_bytes = storage_slot.to_be_bytes(); -/// let let note_type_id_bytes = TokenNote::get_id().to_be_bytes(); -/// -/// for i in 0..32 { -/// log_plaintext[i] = storage_slot_bytes[i]; -/// log_plaintext[32 + i] = note_type_id_bytes[i]; -/// } -/// -/// let packed_note = [npk_m_hash as Field, randomness as Field]; -/// -/// for i in 0..packed_note.len() { -/// let bytes: [u8; 32] = packed_note[i].to_be_bytes(); -/// for j in 0..32 { -/// log_plaintext[64 + i * 32 + j] = bytes[j]; -/// } -/// } -/// -/// TokenNoteSetupPayload { -/// log_plaintext, -/// hiding_point -/// } -/// } -/// -/// fn encrypt_log(self, context: &mut PrivateContext, recipient_keys: aztec::protocol_types::public_keys::PublicKeys, recipient: aztec::protocol_types::address::AztecAddress) -> [Field; 17] { -/// -/// let encrypted_log_bytes: [u8; 513] = aztec::encrypted_logs::log_assembly_strategies::default_aes128::partial_note::::compute_partial_public_log_payload( -/// context.this_address(), -/// self.log_plaintext, -/// recipient, -/// sender -/// ); -/// -/// aztec::utils::bytes::be_bytes_31_to_fields(encrypted_log_bytes) -/// } -/// -/// impl aztec::protocol_types::traits::Empty for TokenNoteSetupPayload { -/// fn empty() -> Self { -/// Self { log_plaintext: [0; 160], hiding_point: aztec::protocol_types::point::Point::empty() } -/// } -/// } -/// ``` -comptime fn generate_setup_payload( - s: StructDefinition, - indexed_fixed_fields: [(Quoted, Type, u32)], - indexed_nullable_fields: [(Quoted, Type, u32)], -) -> (Quoted, Quoted) { - let name = s.name(); - let setup_payload_name = f"{name}SetupPayload".quoted_contents(); - - // First we get the MSM related quotes - let (new_generators_list, new_scalars_list, new_args_list, new_aux_vars) = - generate_multi_scalar_mul(indexed_fixed_fields); - let new_args = &[quote {mut self}] - .append(new_args_list) - .push_back(quote { storage_slot: Field }) - .join(quote {,}); - - let (g_slot, g_len) = generate_fixed_generators(); - let new_generators = new_generators_list.push_back(g_slot).push_back(g_len).join(quote {,}); - let merged_fields_len = indexed_fixed_fields.len() + indexed_nullable_fields.len() + 1; // +1 for storage_slot - let new_scalars = new_scalars_list - .push_back(quote { std::hash::from_field_unsafe(storage_slot) }) - .push_back(quote { std::hash::from_field_unsafe($merged_fields_len) }) - .join(quote {,}); - - // Then the log plaintext ones - let log_plaintext_length = indexed_fixed_fields.len() * 32 + 64; - let setup_log_plaintext: Quoted = - get_setup_log_plaintext_body(s, log_plaintext_length, indexed_nullable_fields); - - // Then we compute values for `encrypt_log(...)` function. - // First, the length of the items that are broken into bytes: - let encrypted_log_bytes_length = 1 /* eph_pk_sign */ - + 48 /* header_ciphertext */ - + log_plaintext_length /* log_plaintext */ - + 16 - - (log_plaintext_length % 16); /* pkcs#7 aes padding */ - - // Each field contains 31 bytes so the length in fields is computed as ceil(encrypted_log_byte_length / 31) - // Recall: ceil(x / y) = (x + y - 1) // y (integer division). - let encrypted_log_fields_length = 1 /* tag */ - + 1 /* eph_pk.x */ - + (encrypted_log_bytes_length + 30) / 31; - - ( - quote { - pub struct $setup_payload_name { - pub log_plaintext: [u8; $log_plaintext_length], - pub hiding_point: aztec::protocol_types::point::Point - } - - impl $setup_payload_name { - pub fn new($new_args) -> $setup_payload_name { - $new_aux_vars - let hiding_point = std::embedded_curve_ops::multi_scalar_mul( - [$new_generators], - [$new_scalars] - ); - $setup_log_plaintext - - $setup_payload_name { - log_plaintext, - hiding_point - } - } - - pub fn encrypt_log(self, context: &mut aztec::prelude::PrivateContext, recipient: aztec::protocol_types::address::AztecAddress, sender: aztec::protocol_types::address::AztecAddress) -> [Field; $encrypted_log_fields_length] { - aztec::encrypted_logs::log_assembly_strategies::default_aes128::partial_note::compute_partial_public_log_payload( - context.this_address(), - self.log_plaintext, - recipient, - sender, - ) - } - } - - impl aztec::protocol_types::traits::Empty for $setup_payload_name { - fn empty() -> Self { - Self { log_plaintext: [0; $log_plaintext_length], hiding_point: aztec::protocol_types::point::Point::empty() } - } - } - }, - setup_payload_name, - ) -} - -/// Generates setup log plaintext for a given note struct `s`. The setup log plaintext is computed by serializing -/// storage slot from target function arguments, note type id from the note struct `s` and the fixed fields. The fixed -/// fields are obtained by passing the whole note struct to the `generate_serialize_to_fields(...)` function but omitting the -/// nullable fields. -comptime fn get_setup_log_plaintext_body( - s: StructDefinition, - log_plaintext_length: u32, - indexed_nullable_fields: [(Quoted, Type, u32)], -) -> Quoted { - let name = s.name(); - - // Now we compute serialization of the fixed fields. We do that by passing the whole note struct - // to the generate_serialize_to_fields function but we omit the nullable fields. - let to_omit = indexed_nullable_fields.map(|(name, _, _): (Quoted, Type, u32)| name); - let (fields_list, aux_vars) = - generate_serialize_to_fields(quote { }, s.as_type(), to_omit, true); - - // If there are `aux_vars` we need to join them with `;` and add a trailing `;` to the joined string. - let aux_vars_for_serialization = if aux_vars.len() > 0 { - let joint = aux_vars.join(quote {;}); - quote { $joint; } - } else { - quote {} - }; - let fields = fields_list.join(quote {,}); - - quote { - let mut log_plaintext: [u8; $log_plaintext_length] = [0; $log_plaintext_length]; - - let storage_slot_bytes: [u8; 32] = storage_slot.to_be_bytes(); - let note_type_id_bytes: [u8; 32] = $name::get_id().to_be_bytes(); - - for i in 0..32 { - log_plaintext[i] = storage_slot_bytes[i]; - log_plaintext[32 + i] = note_type_id_bytes[i]; - } - - $aux_vars_for_serialization - let packed_note = [$fields]; - - for i in 0..packed_note.len() { - let bytes: [u8; 32] = packed_note[i].to_be_bytes(); - for j in 0..32 { - log_plaintext[64 + i * 32 + j] = bytes[j]; - } - } - } -} - -/// Generates finalization payload for a given note struct `s`. The finalization payload contains log and note hash. -/// -/// Example: -/// ``` -/// struct TokenNoteFinalizationPayload { -/// context: &mut aztec::prelude::PublicContext, -/// hiding_point_slot: Field, -/// setup_log_slot: Field, -/// public_values: [Field; 2] -/// } -/// -/// impl TokenNoteFinalizationPayload { -/// fn new(mut self, context: &mut aztec::prelude::PublicContext, slot: Field, amount: u128) -> TokenNoteFinalizationPayload { -/// self.context = context; -/// self.hiding_point_slot = slot; -/// self.setup_log_slot = slot + aztec::protocol_types::point::POINT_LENGTH as Field; -/// self.public_values = [amount.lo as Field, amount.hi as Field]; -/// self -/// } -/// -/// fn emit(self) { -/// self.emit_note_hash(); -/// self.emit_log(); -/// } -/// -/// fn emit_note_hash(self) { -/// let hiding_point: aztec::prelude::Point = self.context.storage_read(self.hiding_point_slot); -/// assert(!aztec::protocol_types::traits::is_empty(hiding_point), "transfer not prepared"); -/// -/// let finalization_hiding_point = std::embedded_curve_ops::multi_scalar_mul([aztec::generators::Ga3, aztec::generators::Ga4], [std::hash::from_field_unsafe(self.public_values[0]), std::hash::from_field_unsafe(self.public_values[1])]) + hiding_point; -/// -/// let note_hash = finalization_hiding_point.x; -/// -/// self.context.push_note_hash(note_hash); -/// -/// // We reset public storage to zero to achieve the effect of transient storage - kernels will squash -/// // the writes -/// // self.context.storage_write(self.hiding_point_slot, [0; aztec::protocol_types::point::POINT_LENGTH]); -/// } -/// -/// fn emit_log(self) { -/// let setup_log_fields: [Field; 8] = self.context.storage_read(self.setup_log_slot); -/// -/// let mut finalization_log = [0; 11]; -/// -/// for i in 0..setup_log_fields.len() { -/// finalization_log[i + 1] = setup_log_fields[i]; -/// } -/// -/// for i in 0..self.public_values.len() { -/// finalization_log[i + 1 + 8] = self.public_values[j]; -/// } -/// -/// finalization_log[0] = aztec::protocol_types::utils::field::field_from_bytes([ -/// (2 >> 8) as u8, 2 as u8, 0, -/// (8 >> 8) as u8, 8 as u8, 0, -/// (91 >> 8) as u8, 91 as u8, -/// ], true); -/// -/// self.context.emit_public_log(finalization_log); -/// -/// // We reset public storage to zero to achieve the effect of transient storage - kernels will squash -/// // the writes -/// // self.context.storage_write(self.setup_log_slot, [0; 8]); -/// } -/// } -/// -/// impl aztec::protocol_types::traits::Empty for TokenNoteFinalizationPayload { -/// fn empty() -> Self { -/// Self { context: &mut aztec::prelude::PublicContext::empty(), hiding_point_slot: 0, setup_log_slot: 0, public_values: [0, 0] } -/// } -/// } -/// ``` -comptime fn generate_finalization_payload( - s: StructDefinition, - indexed_fixed_fields: [(Quoted, Type, u32)], - indexed_nullable_fields: [(Quoted, Type, u32)], -) -> (Quoted, Quoted) { - let name = s.name(); - let finalization_payload_name = f"{name}FinalizationPayload".quoted_contents(); - - // We compute serialization of the nullable fields which are to be emitted as a public log. We do that by - // passing the whole note struct to the `generate_serialize_to_fields(...)` function but we omit the fixed fields. - let to_omit = indexed_fixed_fields.map(|(name, _, _): (Quoted, Type, u32)| name); - let (nullable_fields_list, aux_vars) = - generate_serialize_to_fields(quote { }, s.as_type(), to_omit, true); - - // If there are `aux_vars` we need to join them with `;` and add a trailing `;` to the joined string. - let aux_vars_for_serialization = if aux_vars.len() > 0 { - let joint = aux_vars.join(quote {;}); - quote { $joint; } - } else { - quote {} - }; - - // We compute the log length and we concatenate the fields into a single quote. - let public_values_length = nullable_fields_list.len(); - let nullable_fields = nullable_fields_list.join(quote {,}); - - // Now we compute quotes relevant to the multi-scalar multiplication. - // Note 1: We ignore the `scalars_list` and `aux_vars` return values because it's not used by the `emit_note_hash` - // function. Instead, we use `public_values` (defined on the finalization payload struct) and the scalar list - // is computed in the for-loop below. - // Note 2: The `args_list` is not used for note hash MSM but instead for the `new` function. - let (generators_list, _, args_list, _) = generate_multi_scalar_mul(indexed_nullable_fields); - - // We generate scalars_list manually as we need it to refer self.public_values - let mut scalars_list: [Quoted] = &[]; - for i in 0..public_values_length { - scalars_list = - scalars_list.push_back(quote { std::hash::from_field_unsafe(self.public_values[$i]) }); - } - - let generators = generators_list.join(quote {,}); - let scalars = scalars_list.join(quote {,}); - let args = args_list.join(quote {,}); - - // Then we compute values for `encrypt_log(...)` function - let setup_log_plaintext_length = indexed_fixed_fields.len() * 32 + 64; - - let setup_log_bytes_length = 1 /* eph_pk_sign */ - + 48 /* header_ciphertext */ - + setup_log_plaintext_length /* log_plaintext */ - + 16 - - (setup_log_plaintext_length % 16); /* pkcs#7 aes padding */ - - // Each field contains 31 bytes so the length in fields is computed as ceil(encrypted_log_byte_length / 31) - // Recall: ceil(x / y) = (x + y - 1) // y (integer division). - let setup_log_fields_length = 1 /* tag */ - + 1 /* eph_pk.x */ - + (setup_log_bytes_length + 30) / 31; - - let finalization_log_fields_length = - 1 /* some length encodings (see below) */ + setup_log_fields_length + public_values_length; - - ( - quote { - pub struct $finalization_payload_name { - pub context: &mut aztec::prelude::PublicContext, - pub hiding_point_slot: Field, - pub setup_log_slot: Field, - pub public_values: [Field; $public_values_length], - } - - impl $finalization_payload_name { - pub fn new(mut self, context: &mut aztec::prelude::PublicContext, slot: Field, $args) -> $finalization_payload_name { - self.context = context; - - self.hiding_point_slot = slot; - self.setup_log_slot = slot + aztec::protocol_types::point::POINT_LENGTH as Field; - - $aux_vars_for_serialization - self.public_values = [$nullable_fields]; - - self - } - - pub fn emit(self) { - self.emit_note_hash(); - self.emit_log(); - } - - pub fn emit_note_hash(self) { - // Read the hiding point from "transient" storage and check it's not empty to ensure the transfer was prepared - let hiding_point: aztec::prelude::Point = self.context.storage_read(self.hiding_point_slot); - assert(!aztec::protocol_types::traits::is_empty(hiding_point), "transfer not prepared"); - - let finalization_hiding_point = std::embedded_curve_ops::multi_scalar_mul( - [$generators], - [$scalars] - ) + hiding_point; - - let note_hash = finalization_hiding_point.x; - - self.context.push_note_hash(note_hash); - - // We reset public storage to zero to achieve the effect of transient storage - kernels will squash - // the writes - // TODO(#9376): Uncomment the following line. - // self.context.storage_write(self.hiding_point_slot, [0; aztec::protocol_types::point::POINT_LENGTH]); - } - - pub fn emit_log(self) { - let max_log_len = aztec::protocol_types::constants::PUBLIC_LOG_DATA_SIZE_IN_FIELDS; - // Make sure we aren't overflowing the public log maximum - assert( - $finalization_log_fields_length <= max_log_len, - f"finalization public log must not exceed {max_log_len} fields", - ); - - // We load the setup log from storage - let setup_log_fields: [Field; $setup_log_fields_length] = self.context.storage_read(self.setup_log_slot); - - // We append the public value to the log and emit it as unencrypted log - let mut finalization_log = [0; $finalization_log_fields_length]; - - // Populate the first field with number of public values and private values: - // Search the codebase for "disgusting encoding" to see other hardcoded instances of this encoding, that you might need to change if you ever find yourself here. - finalization_log[0] = aztec::protocol_types::utils::field::field_from_bytes([ - ($public_values_length >> 8) as u8, - $public_values_length as u8, - 0, - ($setup_log_fields_length >> 8) as u8, - $setup_log_fields_length as u8, - ], true); - let mut offset = 1; - - // Iterate over the partial log and copy it to the final log - for i in 0..setup_log_fields.len() { - finalization_log[offset + i] = setup_log_fields[i]; - } - offset += setup_log_fields.len(); - - // Iterate over the public values and append them to the log - for i in 0..self.public_values.len() { - finalization_log[offset + i] = self.public_values[i]; - } - - // We emit the finalization log via the public logs stream - self.context.emit_public_log(finalization_log); - - // We reset public storage to zero to achieve the effect of transient storage - kernels will squash - // the writes - // TODO(#9376): Uncomment the following line. - // self.context.storage_write(self.setup_log_slot, [0; $setup_log_field_length]); - } - } - - impl aztec::protocol_types::traits::Empty for $finalization_payload_name { - fn empty() -> Self { - Self { context: &mut aztec::prelude::PublicContext::empty(), public_values: [0; $public_values_length], hiding_point_slot: 0, setup_log_slot: 0 } - } - } - }, - finalization_payload_name, - ) -} - -/// Generates `PartialNote` implementation for a given note struct `s`. -/// -/// Example: -/// ``` -/// impl PartialNote for TokenNote { -/// fn setup_payload() -> TokenNoteSetupPayload { -/// TokenNoteSetupPayload::empty() -/// } -/// -/// fn finalization_payload() -> TokenNoteFinalizationPayload { -/// TokenNoteFinalizationPayload::empty() -/// } -/// } -/// ``` -comptime fn generate_partial_note_impl( - s: StructDefinition, - setup_payload_name: Quoted, - finalization_payload_name: Quoted, -) -> Quoted { - let name = s.name(); - quote { - impl aztec::note::note_interface::PartialNote<$setup_payload_name, $finalization_payload_name> for $name { - fn setup_payload() -> $setup_payload_name { - $setup_payload_name::empty() - } - - fn finalization_payload() -> $finalization_payload_name { - $finalization_payload_name::empty() - } - } - } -} - /// Registers a note struct `note` with the given `note_packed_len`, `note_type_id`, `fixed_fields` and /// `nullable_fields` in the global `NOTES` map. comptime fn register_note( @@ -971,64 +376,6 @@ comptime fn index_note_fields( (indexed_fixed_fields, indexed_nullable_fields) } -/// Generates the following: -/// - NoteTypeProperties -/// - SetupPayload -/// - FinalizationPayload -/// - PartialNote trait implementation -/// - NoteType trait implementation -/// - NoteHash trait implementation -/// - Packable implementation -/// -/// Registers the note in the global `NOTES` map. -/// -/// For more details on the generated code, see the individual functions. -/// -/// `nullable_fields` are a list of quotes passed in as varargs which are used to identify which fields/struct members -/// in the partial note are nullable. -#[varargs] -pub comptime fn partial_note(s: StructDefinition, nullable_fields: [Quoted]) -> Quoted { - assert_has_owner(s); - - // We separate struct members into fixed ones and nullable ones and we store info about the start index of each - // member in the packed note array. - let (indexed_fixed_fields, indexed_nullable_fields) = index_note_fields(s, nullable_fields); - - let note_properties = generate_note_properties(s); - let note_type_id = get_next_note_type_id(); - let (setup_payload_impl, setup_payload_name) = - generate_setup_payload(s, indexed_fixed_fields, indexed_nullable_fields); - let (finalization_payload_impl, finalization_payload_name) = - generate_finalization_payload(s, indexed_fixed_fields, indexed_nullable_fields); - let note_interface_impl = generate_note_interface(s, note_type_id); - let note_hash_impl = generate_note_hash_trait_impl_for_partial_note( - s, - indexed_fixed_fields, - indexed_nullable_fields, - ); - let partial_note_impl = - generate_partial_note_impl(s, setup_payload_name, finalization_payload_name); - let (packable_impl, note_packed_len) = derive_packable_if_not_implemented_and_get_len(s); - - register_note( - s, - note_packed_len, - note_type_id, - indexed_fixed_fields, - indexed_nullable_fields, - ); - - quote { - $note_properties - $setup_payload_impl - $finalization_payload_impl - $note_interface_impl - $note_hash_impl - $partial_note_impl - $packable_impl - } -} - /// Generates the following: /// - NoteTypeProperties /// - NoteType trait implementation @@ -1127,7 +474,7 @@ pub comptime fn custom_note(s: StructDefinition) -> Quoted { /// Asserts that the note has an 'owner' field. /// -/// We require notes implemented with #[note] macro and #[partial_note] macro to have an 'owner' field because our +/// We require notes implemented with #[note] macro macro to have an 'owner' field because our /// auto-generated nullifier functions expect it. This requirement is most likely only temporary. comptime fn assert_has_owner(note: StructDefinition) { let fields = note.fields_as_written(); diff --git a/noir-projects/aztec-nr/aztec/src/macros/utils.nr b/noir-projects/aztec-nr/aztec/src/macros/utils.nr index ecd42051abe..c9d3dc79b6d 100644 --- a/noir-projects/aztec-nr/aztec/src/macros/utils.nr +++ b/noir-projects/aztec-nr/aztec/src/macros/utils.nr @@ -273,6 +273,8 @@ pub(crate) comptime fn get_trait_impl_method( target_method: Quoted, ) -> TypedExpr { let trait_constraint = target_trait.as_trait_constraint(); - typ.get_trait_impl(trait_constraint).unwrap().methods().filter(|m| m.name() == target_method)[0] + typ.get_trait_impl(trait_constraint).expect(f"Type does not implement trait").methods().filter( + |m| m.name() == target_method, + )[0] .as_typed_expr() } diff --git a/noir-projects/aztec-nr/aztec/src/note/mod.nr b/noir-projects/aztec-nr/aztec/src/note/mod.nr index ab9c50a007f..ef0b9d43cd1 100644 --- a/noir-projects/aztec-nr/aztec/src/note/mod.nr +++ b/noir-projects/aztec-nr/aztec/src/note/mod.nr @@ -9,3 +9,4 @@ pub mod retrieved_note; pub mod utils; pub mod note_emission; pub mod note_field; +pub mod note_hash; diff --git a/noir-projects/aztec-nr/aztec/src/note/note_hash.nr b/noir-projects/aztec-nr/aztec/src/note/note_hash.nr new file mode 100644 index 00000000000..93de41a82d6 --- /dev/null +++ b/noir-projects/aztec-nr/aztec/src/note/note_hash.nr @@ -0,0 +1,11 @@ +use protocol_types::{ + constants::GENERATOR_INDEX__NOTE_HASH, hash::poseidon2_hash_with_separator, + utils::arrays::array_concat, +}; + +pub fn note_hash(values: [Field; N], storage_slot: Field) -> Field { + poseidon2_hash_with_separator( + array_concat(values, [storage_slot]), + GENERATOR_INDEX__NOTE_HASH, + ) +} diff --git a/noir-projects/aztec-nr/uint-note/src/uint_note.nr b/noir-projects/aztec-nr/uint-note/src/uint_note.nr index 23c4e687c49..a1d8c826207 100644 --- a/noir-projects/aztec-nr/uint-note/src/uint_note.nr +++ b/noir-projects/aztec-nr/uint-note/src/uint_note.nr @@ -1,21 +1,92 @@ use dep::aztec::{ - macros::notes::partial_note, + context::{PrivateContext, PublicContext}, + encrypted_logs::log_assembly_strategies::default_aes128, + keys::getters::{get_nsk_app, get_public_keys}, + macros::notes::custom_note, + note::note_interface::{NoteHash, NoteType}, oracle::random::random, - protocol_types::{address::AztecAddress, traits::Serialize}, + protocol_types::{ + address::AztecAddress, + constants::{GENERATOR_INDEX__NOTE_HASH, GENERATOR_INDEX__NOTE_NULLIFIER}, + hash::poseidon2_hash_with_separator, + traits::{Deserialize, Hash, Packable, Serialize, ToField}, + utils::arrays::array_concat, + }, }; -// docs:start:UintNote -// We derive the Serialize trait because in some cases notes are used as parameters to contract functions. -#[partial_note(quote {value})] +// UintNote supports partial notes, i.e. the ability to create an incomplete note in private, hiding certain values (the +// owner, storage slot and randomness), and then completing the note in public with the ones missing (the amount). +// Partial notes are being actively developed and are not currently fully supported via macros, and so we rely on the +// #[custom_note] macro to implement it manually, resulting in some boilerplate. This is expected to be unnecessary once +// macro support is expanded. + +/// A private note representing a numeric value associated to an account (e.g. a token balance). +#[custom_note] #[derive(Eq, Serialize)] pub struct UintNote { - // The amount of tokens in the note - value: u128, + // The ordering of these fields is important given that it must: + // a) match that of UintPartialNotePrivateContent, and + // b) have the public field at the end + // Correct ordering is checked by the tests in this module. + + /// The owner of the note, i.e. the account whose nullifier secret key is required to compute the nullifier. owner: AztecAddress, - // Randomness of the note to protect against note hash preimage attacks + /// Random value, protects against note hash preimage attacks. randomness: Field, + /// The number stored in the note. + value: u128, +} + +impl NoteHash for UintNote { + fn compute_note_hash(self, storage_slot: Field) -> Field { + // Partial notes can be implemented by having the note hash be either the result of multiscalar multiplication + // (MSM), or two rounds of poseidon. MSM results in more constraints and is only required when multiple variants + // of partial notes are supported. Because UintNote has just one variant (where the value is public), we use + // poseidon instead. + + // We must compute the same note hash as would be produced by a partial note created and completed with the same + // values, so that notes all behave the same way regardless of how they were created. To achieve this, we + // perform both steps of the partial note computation. + + // First we create the partial note from a commitment to the private content (including storage slot). + let private_content = + UintPartialNotePrivateContent { owner: self.owner, randomness: self.randomness }; + let partial_note = PartialUintNote { + commitment: private_content.compute_partial_commitment(storage_slot), + }; + + // Then compute the completion note hash. In a real partial note this step would be performed in public. + partial_note.compute_complete_note_hash(self.value) + } + + // The nullifiers are nothing special - this is just the canonical implementation that would be injected by the + // #[note] macro. + + fn compute_nullifier( + self, + context: &mut PrivateContext, + note_hash_for_nullify: Field, + ) -> Field { + let owner_npk_m = get_public_keys(self.owner).npk_m; + let owner_npk_m_hash = owner_npk_m.hash(); + let secret = context.request_nsk_app(owner_npk_m_hash); + poseidon2_hash_with_separator( + [note_hash_for_nullify, secret], + GENERATOR_INDEX__NOTE_NULLIFIER, + ) + } + + unconstrained fn compute_nullifier_unconstrained(self, note_hash_for_nullify: Field) -> Field { + let owner_npk_m = get_public_keys(self.owner).npk_m; + let owner_npk_m_hash = owner_npk_m.hash(); + let secret = get_nsk_app(owner_npk_m_hash); + poseidon2_hash_with_separator( + [note_hash_for_nullify, secret], + GENERATOR_INDEX__NOTE_NULLIFIER, + ) + } } -// docs:end:UintNote + impl UintNote { pub fn new(value: u128, owner: AztecAddress) -> Self { // Safety: We use the randomness to preserve the privacy of the note recipient by preventing brute-forcing, @@ -29,4 +100,223 @@ impl UintNote { pub fn get_value(self) -> u128 { self.value } + + /// Creates a partial note that will hide the owner and storage slot but not the value, since the note will be later + /// completed in public. This is a powerful technique for scenarios in which the value cannot be known in private + /// (e.g. because it depends on some public state, such as a DEX). + /// + /// The returned `PartialUintNote` value must be sent to public execution via a secure channel, since it is not + /// possible to verify the integrity of its contents due to it hiding information. The recommended ways to do this + /// are to retrieve it from public storage, or to receive it in an internal public function call. + /// + /// Each partial note should only be used once, since otherwise multiple notes would be linked together and known to + /// belong to the same owner. + /// + /// As part of the partial note cration process, a log will be sent to `recipient` from `sender` so that they can + /// discover the note. `recipient` will typically be the same as `owner`. + pub fn partial( + owner: AztecAddress, + storage_slot: Field, + context: &mut PrivateContext, + recipient: AztecAddress, + sender: AztecAddress, + ) -> PartialUintNote { + // Safety: We use the randomness to preserve the privacy of the note recipient by preventing brute-forcing, + // so a malicious sender could use non-random values to make the note less private. But they already know + // the full note pre-image anyway, and so the recipient already trusts them to not disclose this + // information. We can therefore assume that the sender will cooperate in the random value generation. + let randomness = unsafe { random() }; + + // We create a commitment to the private data, which we then use to construct the log we send to the recipient. + let commitment = UintPartialNotePrivateContent { owner, randomness } + .compute_partial_commitment(storage_slot); + + // Our partial note log encoding scheme includes a field with the tag of the public completion log, and we use + // the commitment as the tag. This is good for multiple reasons: + // - the commitment is uniquely tied to this partial note + // - the commitment is already public information, so we're not revealing anything else + // - we don't need to create any additional information, private or public, for the tag + // - other contracts cannot impersonate us and emit logs with the same tag due to public log siloing + let private_log_content = PrivateUintPartialNotePrivateLogContent { + owner, + randomness, + public_log_tag: commitment, + }; + + // TODO: we're abusing the note encoding scheme by computing the log for a fake note type with such a note type + // id that the recipient will realize that these are the private fields of a partial note. Ideally we'd not rely + // on this crude mechanism and we'd instead compute it as a proper event log. However, given the current state + // of the log library it's far easier to do it this way. + let encrypted_log = default_aes128::note::compute_log( + *context, + private_log_content, + storage_slot, + recipient, + sender, + ); + context.emit_private_log(encrypted_log); + + PartialUintNote { commitment } + } +} + +/// The private content of a partial UintNote, i.e. the fields that will remain private. All other note fields will be +/// made public. +#[derive(Packable)] +struct UintPartialNotePrivateContent { + // The ordering of these fields is important given that it must match that of UintNote. + // Correct ordering is checked by the tests in this module. + owner: AztecAddress, + randomness: Field, +} + +impl UintPartialNotePrivateContent { + fn compute_partial_commitment(self, storage_slot: Field) -> Field { + // Here we commit to all private values, including the storage slot. + poseidon2_hash_with_separator( + array_concat(self.pack(), [storage_slot]), + GENERATOR_INDEX__NOTE_HASH, + ) + } +} + +#[derive(Packable)] +struct PrivateUintPartialNotePrivateLogContent { + // The ordering of these fields is important given that it must: + // a) match that of UintNote, and + // b) have the public log tag at the beginning + // Correct ordering is checked by the tests in this module. + public_log_tag: Field, + owner: AztecAddress, + randomness: Field, +} + +impl NoteType for PrivateUintPartialNotePrivateLogContent { + fn get_id() -> Field { + // We abuse the fact that note type ids are 7 bits long to use the 8th bit indicate the log corresponds to a + // partial note. Ideally we'd use proper events with selectors, but those are not handled well at the moment. + UintNote::get_id() + 128 + } +} + +/// A partial instance of a UintNote. This value represents a private commitment to the owner, randomness and storage +/// slot, but the value field has not yet been set. A partial note can be completed in public with the `complete` +/// function (revealing the value to the public), resulting in a UintNote that can be used like any other one (except +/// of course that its value is known). +#[derive(Packable, Serialize, Deserialize)] +pub struct PartialUintNote { + commitment: Field, +} + +impl PartialUintNote { + pub fn commitment(self) -> Field { + self.commitment + } +} + +impl PartialUintNote { + /// Completes the partial note, creating a new note that can be used like any other UintNote. + pub fn complete(self, value: u128, context: &mut PublicContext) { + // We need to do two things: + // - emit a public log containing the public fields (the value). The contract will later find it by searching + // for the expected tag (which is simply the partial note commitment). + // - insert the completion note hash (i.e. the hash of the note) into the note hash tree. This is typically + // only done in private to hide the preimage of the hash that is inserted, but completed partial notes are + // inserted in public as the public values are provided and the note hash computed. + context.emit_public_log(self.compute_note_completion_log(value)); + context.push_note_hash(self.compute_complete_note_hash(value)); + } + + fn compute_note_completion_log(self, value: u128) -> [Field; 2] { + // The first field of this log must be the tag that the recipient of the partial note private field logs + // expects, which is equal to the partial note commitment. + [self.commitment, value.to_field()] + } + + fn compute_complete_note_hash(self, value: u128) -> Field { + // Here we finalize the note hash by including the (public) value into the partial note commitment. Note that we + // use the same generator index as we used for the first round of poseidon - this is not an issue. + poseidon2_hash_with_separator( + [self.commitment, value.to_field()], + GENERATOR_INDEX__NOTE_HASH, + ) + } +} + +mod test { + use super::{ + PartialUintNote, PrivateUintPartialNotePrivateLogContent, UintNote, + UintPartialNotePrivateContent, + }; + use dep::aztec::{ + note::note_interface::NoteHash, + protocol_types::{ + address::AztecAddress, + traits::{FromField, Packable}, + utils::arrays::array_concat, + }, + utils::array::subarray, + }; + + global value: u128 = 17; + global randomness: Field = 42; + global owner: AztecAddress = AztecAddress::from_field(50); + global storage_slot: Field = 13; + + #[test] + fn note_hash_matches_completed_partial_note_hash() { + // Tests that a UintNote has the same note hash as a PartialUintNote created and then completed with the same + // private values. This requires for the same hash function to be used in both flows, with the fields in the + // same order. + + let note = UintNote { value, randomness, owner }; + let note_hash = note.compute_note_hash(storage_slot); + + let partial_note_private_content = UintPartialNotePrivateContent { owner, randomness }; + + let partial_note = PartialUintNote { + commitment: partial_note_private_content.compute_partial_commitment(storage_slot), + }; + let completed_partial_note_hash = partial_note.compute_complete_note_hash(value); + + assert_eq(note_hash, completed_partial_note_hash); + } + + #[test] + fn unpack_from_partial_note_encoding() { + // Tests that the packed representation of a regular UintNote can be reconstructed given the partial note + // private fields log and the public completion log, ensuring the recipient will be able to compute the + // completed note as if it were a regular UintNote. + + let note = UintNote { value, randomness, owner }; + + let partial_note_private_content = UintPartialNotePrivateContent { owner, randomness }; + let commitment = partial_note_private_content.compute_partial_commitment(storage_slot); + + let private_log_content = PrivateUintPartialNotePrivateLogContent { + owner, + randomness, + public_log_tag: commitment, + }; + let partial_note = PartialUintNote { commitment }; + + // The first field of the partial note private content is the public completion log tag, so it should match the + // first field of the public log. + assert_eq( + private_log_content.pack()[0], + partial_note.compute_note_completion_log(value)[0], + ); + + // Then we exctract all fields except the first of both logs (i.e. the public log tag), and combine them to + // produce the note's packed representation. This requires that the members of the intermediate structs are in + // the same order as in UintNote. + let private_log_without_public_tag: [_; 2] = subarray(private_log_content.pack(), 1); + let public_log_without_tag: [_; 1] = + subarray(partial_note.compute_note_completion_log(value), 1); + + assert_eq( + array_concat(private_log_without_public_tag, public_log_without_tag), + note.pack(), + ); + } } diff --git a/noir-projects/noir-contracts/contracts/amm_contract/Nargo.toml b/noir-projects/noir-contracts/contracts/amm_contract/Nargo.toml index e5c4e342ed8..d00746cfeb1 100644 --- a/noir-projects/noir-contracts/contracts/amm_contract/Nargo.toml +++ b/noir-projects/noir-contracts/contracts/amm_contract/Nargo.toml @@ -6,4 +6,5 @@ type = "contract" [dependencies] aztec = { path = "../../../aztec-nr/aztec" } -token = { path = "../token_contract" } \ No newline at end of file +token = { path = "../token_contract" } +uint_note = { path = "../../../aztec-nr/uint-note" } diff --git a/noir-projects/noir-contracts/contracts/amm_contract/src/main.nr b/noir-projects/noir-contracts/contracts/amm_contract/src/main.nr index d4e9d65fc63..65f0578c6e0 100644 --- a/noir-projects/noir-contracts/contracts/amm_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/amm_contract/src/main.nr @@ -42,7 +42,9 @@ pub contract AMM { macros::{functions::{initializer, internal, private, public}, storage::storage}, prelude::{AztecAddress, PublicImmutable}, }; + use dep::token::Token; + use dep::uint_note::uint_note::PartialUintNote; #[storage] struct Storage { @@ -103,19 +105,19 @@ pub contract AMM { token0.transfer_to_public(sender, context.this_address(), amount0_max, nonce).call( &mut context, ); - let refund_token0_hiding_point_slot = + let refund_token0_partial_note = token0.prepare_private_balance_increase(sender, sender).call(&mut context); token1.transfer_to_public(sender, context.this_address(), amount1_max, nonce).call( &mut context, ); - let refund_token1_hiding_point_slot = + let refund_token1_partial_note = token1.prepare_private_balance_increase(sender, sender).call(&mut context); // The number of liquidity tokens to mint for the caller depends on both the live balances and the amount // supplied, both of which can only be known during public execution. We therefore prepare a partial note that // will get completed via minting. - let liquidity_hiding_point_slot = + let liquidity_partial_note = liquidity_token.prepare_private_balance_increase(sender, sender).call(&mut context); // We then complete the flow in public. Note that the type of operation and amounts will all be publicly known, @@ -124,9 +126,9 @@ pub contract AMM { AMM::at(context.this_address()) ._add_liquidity( config, - refund_token0_hiding_point_slot, - refund_token1_hiding_point_slot, - liquidity_hiding_point_slot, + refund_token0_partial_note, + refund_token1_partial_note, + liquidity_partial_note, amount0_max, amount1_max, amount0_min, @@ -139,9 +141,9 @@ pub contract AMM { #[internal] fn _add_liquidity( config: Config, // We could read this in public, but it's cheaper to receive from private - refund_token0_hiding_point_slot: Field, - refund_token1_hiding_point_slot: Field, - liquidity_hiding_point_slot: Field, + refund_token0_partial_note: PartialUintNote, + refund_token1_partial_note: PartialUintNote, + liquidity_partial_note: PartialUintNote, amount0_max: u128, amount1_max: u128, amount0_min: u128, @@ -181,12 +183,12 @@ pub contract AMM { // simply stay in public storage and not be completed, but this is not an issue. if (refund_amount_token0 > 0 as u128) { token0 - .finalize_transfer_to_private(refund_amount_token0, refund_token0_hiding_point_slot) + .finalize_transfer_to_private(refund_amount_token0, refund_token0_partial_note) .call(&mut context); } if (refund_amount_token1 > 0 as u128) { token1 - .finalize_transfer_to_private(refund_amount_token1, refund_token1_hiding_point_slot) + .finalize_transfer_to_private(refund_amount_token1, refund_token1_partial_note) .call(&mut context); } @@ -217,9 +219,9 @@ pub contract AMM { }; assert(liquidity_amount > 0 as u128, "INSUFFICIENT_LIQUIDITY_MINTED"); - liquidity_token - .finalize_mint_to_private(liquidity_amount, liquidity_hiding_point_slot) - .call(&mut context); + liquidity_token.finalize_mint_to_private(liquidity_amount, liquidity_partial_note).call( + &mut context, + ); } /// Privately removes liquidity from the pool. This function receives how many liquidity tokens to burn, and the @@ -248,9 +250,9 @@ pub contract AMM { // We don't yet know how many tokens the sender will get - that can only be computed during public execution // since the it depends on the live balances. We therefore simply prepare partial notes to the sender. - let token0_hiding_point_slot = + let token0_partial_note = token0.prepare_private_balance_increase(sender, sender).call(&mut context); - let token1_hiding_point_slot = + let token1_partial_note = token1.prepare_private_balance_increase(sender, sender).call(&mut context); // We then complete the flow in public. Note that the type of operation and amounts will all be publicly known, @@ -260,8 +262,8 @@ pub contract AMM { ._remove_liquidity( config, liquidity, - token0_hiding_point_slot, - token1_hiding_point_slot, + token0_partial_note, + token1_partial_note, amount0_min, amount1_min, ) @@ -273,8 +275,8 @@ pub contract AMM { fn _remove_liquidity( config: Config, // We could read this in public, but it's cheaper to receive from private liquidity: u128, - token0_hiding_point_slot: Field, - token1_hiding_point_slot: Field, + token0_partial_note: PartialUintNote, + token1_partial_note: PartialUintNote, amount0_min: u128, amount1_min: u128, ) { @@ -297,8 +299,8 @@ pub contract AMM { // We can now burn the liquidity tokens that had been privately transferred into the AMM, as well as complete // both partial notes. liquidity_token.burn_public(context.this_address(), liquidity, 0).call(&mut context); - token0.finalize_transfer_to_private(amount0, token0_hiding_point_slot).call(&mut context); - token1.finalize_transfer_to_private(amount1, token1_hiding_point_slot).call(&mut context); + token0.finalize_transfer_to_private(amount0, token0_partial_note).call(&mut context); + token1.finalize_transfer_to_private(amount1, token1_partial_note).call(&mut context); } /// Privately swaps `amount_in` `token_in` tokens for at least `amount_out_mint` `token_out` tokens with the pool. @@ -328,7 +330,7 @@ pub contract AMM { Token::at(token_in) .transfer_to_public(sender, context.this_address(), amount_in, nonce) .call(&mut context); - let token_out_hiding_point_slot = Token::at(token_out) + let token_out_partial_note = Token::at(token_out) .prepare_private_balance_increase(sender, sender) .call(&mut context); @@ -338,7 +340,7 @@ pub contract AMM { token_out, amount_in, amount_out_min, - token_out_hiding_point_slot, + token_out_partial_note, ) .enqueue(&mut context); } @@ -350,7 +352,7 @@ pub contract AMM { token_out: AztecAddress, amount_in: u128, amount_out_min: u128, - token_out_hiding_point_slot: Field, + token_out_partial_note: PartialUintNote, ) { // In order to compute the amount to swap we need the live token balances. Note that at this state the token in // transfer has already been completed as that function call was enqueued before this one. We therefore need to @@ -366,9 +368,9 @@ pub contract AMM { let amount_out = get_amount_out(amount_in, balance_in, balance_out); assert(amount_out >= amount_out_min, "INSUFFICIENT_OUTPUT_AMOUNT"); - Token::at(token_out) - .finalize_transfer_to_private(amount_out, token_out_hiding_point_slot) - .call(&mut context); + Token::at(token_out).finalize_transfer_to_private(amount_out, token_out_partial_note).call( + &mut context, + ); } /// Privately swaps at most `amount_in_max` `token_in` tokens for `amount_out` `token_out` tokens with the pool. @@ -402,10 +404,10 @@ pub contract AMM { Token::at(token_in) .transfer_to_public(sender, context.this_address(), amount_in_max, nonce) .call(&mut context); - let change_token_in_hiding_point_slot = + let change_token_in_partial_note = Token::at(token_in).prepare_private_balance_increase(sender, sender).call(&mut context); - let token_out_hiding_point_slot = Token::at(token_out) + let token_out_partial_note = Token::at(token_out) .prepare_private_balance_increase(sender, sender) .call(&mut context); @@ -415,8 +417,8 @@ pub contract AMM { token_out, amount_in_max, amount_out, - change_token_in_hiding_point_slot, - token_out_hiding_point_slot, + change_token_in_partial_note, + token_out_partial_note, ) .enqueue(&mut context); } @@ -428,8 +430,8 @@ pub contract AMM { token_out: AztecAddress, amount_in_max: u128, amount_out: u128, - change_token_in_hiding_point_slot: Field, - token_out_hiding_point_slot: Field, + change_token_in_partial_note: PartialUintNote, + token_out_partial_note: PartialUintNote, ) { // In order to compute the amount to swap we need the live token balances. Note that at this state the token in // transfer has already been completed as that function call was enqueued before this one. We therefore need to @@ -448,15 +450,15 @@ pub contract AMM { let change = amount_in_max - amount_in; if (change > 0 as u128) { Token::at(token_in) - .finalize_transfer_to_private(change, change_token_in_hiding_point_slot) + .finalize_transfer_to_private(change, change_token_in_partial_note) .call(&mut context); } // Note again that we already knew the amount out, but for consistency we want to only commit this note once // all other steps have been performed. - Token::at(token_out) - .finalize_transfer_to_private(amount_out, token_out_hiding_point_slot) - .call(&mut context); + Token::at(token_out).finalize_transfer_to_private(amount_out, token_out_partial_note).call( + &mut context, + ); } unconstrained fn get_amount_out_for_exact_in( diff --git a/noir-projects/noir-contracts/contracts/fpc_contract/Nargo.toml b/noir-projects/noir-contracts/contracts/fpc_contract/Nargo.toml index b1f9481c44e..1911d8a7a32 100644 --- a/noir-projects/noir-contracts/contracts/fpc_contract/Nargo.toml +++ b/noir-projects/noir-contracts/contracts/fpc_contract/Nargo.toml @@ -8,3 +8,4 @@ type = "contract" aztec = { path = "../../../aztec-nr/aztec" } authwit = { path = "../../../aztec-nr/authwit" } token = { path = "../token_contract" } +uint_note = { path = "../../../aztec-nr/uint-note" } diff --git a/noir-projects/noir-contracts/contracts/fpc_contract/src/main.nr b/noir-projects/noir-contracts/contracts/fpc_contract/src/main.nr index a2ebf7506d3..e0a1c66de0d 100644 --- a/noir-projects/noir-contracts/contracts/fpc_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/fpc_contract/src/main.nr @@ -12,9 +12,13 @@ use dep::aztec::macros::aztec; #[aztec] pub contract FPC { use crate::{config::Config, utils::safe_cast_to_u128}; + use dep::uint_note::uint_note::PartialUintNote; use aztec::{ macros::{functions::{initializer, internal, private, public}, storage::storage}, - protocol_types::{abis::function_selector::FunctionSelector, address::AztecAddress}, + protocol_types::{ + abis::function_selector::FunctionSelector, address::AztecAddress, traits::Serialize, + utils::arrays::array_concat, + }, state_vars::PublicImmutable, }; use token::Token; @@ -89,13 +93,16 @@ pub contract FPC { token.transfer_to_public(user, context.this_address(), max_fee, nonce).call(&mut context); // Prepare a partial note for the refund for the user. - let refund_slot = token.prepare_private_balance_increase(user, user).call(&mut context); + let partial_note = token.prepare_private_balance_increase(user, user).call(&mut context); // Set a public teardown function in which the refund will be paid back to the user by finalizing the partial note. context.set_public_teardown_function( context.this_address(), comptime { FunctionSelector::from_signature("complete_refund((Field),Field,u128)") }, - [accepted_asset.to_field(), refund_slot, max_fee as Field], + array_concat( + array_concat(accepted_asset.serialize(), partial_note.serialize()), + max_fee.serialize(), + ), ); // Set the FPC as the fee payer of the tx. @@ -108,7 +115,7 @@ pub contract FPC { // docs:start:complete_refund #[public] #[internal] - fn complete_refund(accepted_asset: AztecAddress, refund_slot: Field, max_fee: u128) { + fn complete_refund(accepted_asset: AztecAddress, partial_note: PartialUintNote, max_fee: u128) { let tx_fee = safe_cast_to_u128(context.transaction_fee()); // 1. Check that user funded the fee payer contract with at least the transaction fee. @@ -119,7 +126,7 @@ pub contract FPC { // TODO(#10805): Introduce a real exchange rate let refund_amount = max_fee - tx_fee; - Token::at(accepted_asset).finalize_transfer_to_private(refund_amount, refund_slot).call( + Token::at(accepted_asset).finalize_transfer_to_private(refund_amount, partial_note).call( &mut context, ); } @@ -162,7 +169,10 @@ pub contract FPC { context.set_public_teardown_function( context.this_address(), comptime { FunctionSelector::from_signature("pay_refund((Field),u128,(Field))") }, - [context.msg_sender().to_field(), max_fee as Field, config.accepted_asset.to_field()], + array_concat( + array_concat(context.msg_sender().serialize(), max_fee.serialize()), + config.accepted_asset.serialize(), + ), ); } diff --git a/noir-projects/noir-contracts/contracts/nft_contract/src/main.nr b/noir-projects/noir-contracts/contracts/nft_contract/src/main.nr index 15cdb727687..db34d835d94 100644 --- a/noir-projects/noir-contracts/contracts/nft_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/nft_contract/src/main.nr @@ -8,7 +8,7 @@ use dep::aztec::macros::aztec; // and private. #[aztec] pub contract NFT { - use crate::types::nft_note::NFTNote; + use crate::types::nft_note::{NFTNote, PartialNFTNote}; use dep::authwit::auth::{ assert_current_call_valid_authwit, assert_current_call_valid_authwit_public, compute_authwit_nullifier, @@ -21,12 +21,11 @@ pub contract NFT { storage::storage, }, note::constants::MAX_NOTES_PER_PAGE, - oracle::random::random, prelude::{ AztecAddress, Map, NoteGetterOptions, NoteViewerOptions, PrivateContext, PrivateSet, PublicContext, PublicImmutable, PublicMutable, }, - protocol_types::{point::Point, traits::Serialize}, + protocol_types::traits::Serialize, utils::comparison::Comparator, }; use dep::compressed_string::FieldCompressedString; @@ -170,21 +169,19 @@ pub contract NFT { let nft = NFT::at(context.this_address()); // We prepare the private balance increase. - let hiding_point_slot = _prepare_private_balance_increase(to, &mut context, storage); + let partial_note = _prepare_private_balance_increase(to, &mut context, storage); // At last we finalize the transfer. Usage of the `unsafe` method here is safe because we set the `from` // function argument to a message sender, guaranteeing that he can transfer only his own NFTs. - nft._finalize_transfer_to_private_unsafe(from, token_id, hiding_point_slot).enqueue( - &mut context, - ); + nft._finalize_transfer_to_private_unsafe(from, token_id, partial_note).enqueue(&mut context); } // docs:end:transfer_to_private /// Prepares an increase of private balance of `to` (partial note). The increase needs to be finalized by calling - /// `finalize_transfer_to_private. Returns a hiding point slot. + /// `finalize_transfer_to_private` with the returned partial note. // docs:start:prepare_private_balance_increase #[private] - fn prepare_private_balance_increase(to: AztecAddress) -> Field { + fn prepare_private_balance_increase(to: AztecAddress) -> PartialNFTNote { _prepare_private_balance_increase(to, &mut context, storage) } @@ -198,75 +195,41 @@ pub contract NFT { to: AztecAddress, context: &mut PrivateContext, storage: Storage<&mut PrivateContext>, - ) -> Field { - let to_note_slot = storage.private_nfts.at(to).storage_slot; - - // We create a setup payload with unpopulated/zero token id for 'to' - // TODO(#7775): Manually fetching the randomness here is not great. If we decide to include randomness in all - // notes we could just inject it in macros. - - // Safety: We use the randomness to preserve the privacy of the note recipient by preventing brute-forcing, - // so a malicious sender could use non-random values to make the note less private. But they already know - // the full note pre-image anyway, and so the recipient already trusts them to not disclose this - // information. We can therefore assume that the sender will cooperate in the random value generation. - let note_randomness = unsafe { random() }; - let note_setup_payload = NFTNote::setup_payload().new(to, note_randomness, to_note_slot); - - let setup_log = note_setup_payload.encrypt_log(context, to, context.msg_sender()); - - // Using the x-coordinate as a hiding point slot is safe against someone else interfering with it because - // we have a guarantee that the public functions of the transaction are executed right after the private ones - // and for this reason the protocol guarantees that nobody can front-run us in consuming the hiding point. - // This guarantee would break if `finalize_transfer_to_private` was not called in the same transaction. This - // however is not the flow we are currently concerned with. To support the multi-transaction flow we could - // introduce a `from` function argument, hash the x-coordinate with it and then repeat the hashing in - // `finalize_transfer_to_private`. - // - // We can also be sure that the `hiding_point_slot` will not overwrite any other value in the storage because - // in our state variables we derive slots using a different hash function from multi scalar multiplication - // (MSM). - let hiding_point_slot = note_setup_payload.hiding_point.x; - - // We don't need to perform a check that the value overwritten by `_store_point_in_transient_storage_unsafe` - // is zero because the slot is the x-coordinate of the hiding point and hence we could only overwrite - // the value in the slot with the same value. This makes usage of the `unsafe` method safe. - NFT::at(context.this_address()) - ._store_payload_in_transient_storage_unsafe( - hiding_point_slot, - note_setup_payload.hiding_point, - setup_log, - ) - .enqueue(context); - - hiding_point_slot + ) -> PartialNFTNote { + // We create a partial note with unpopulated/zero token id for 'to' + let partial_note = NFTNote::partial( + to, + storage.private_nfts.at(to).storage_slot, + context, + to, + context.msg_sender(), + ); + + NFT::at(context.this_address())._store_nft_set_partial_note(partial_note).enqueue(context); + + partial_note } // docs:end:prepare_private_balance_increase - // TODO(#9375): Having to define the note log length here is very unfortunate as it's basically impossible for - // users to derive manually. This will however go away once we have a real transient storage since we will not need - // the public call and instead we would do something like `context.transient_storage_write(slot, payload)` and that - // will allow us to use generics and hence user will not need to define it explicitly. We cannot use generics here - // as it is an entrypoint function. // docs:start:store_payload_in_transient_storage_unsafe #[public] #[internal] - fn _store_payload_in_transient_storage_unsafe( - slot: Field, - point: Point, - setup_log: [Field; 9], - ) { - context.storage_write(slot, point); - context.storage_write(slot + aztec::protocol_types::point::POINT_LENGTH as Field, setup_log); + fn _store_nft_set_partial_note(partial_note: PartialNFTNote) { + // We store the partial note in a slot equal to its commitment. This is safe because the commitment is computed + // using a generator different from the one used to compute storage slots, so there can be no collisions. + // We could consider storing all pending partial notes in e.g. some array, but ultimately this is pointless: all + // we need to verify is that the note is valid. + context.storage_write(partial_note.commitment(), true); } // docs:end:store_payload_in_transient_storage_unsafe /// Finalizes a transfer of NFT with `token_id` from public balance of `from` to a private balance of `to`. /// The transfer must be prepared by calling `prepare_private_balance_increase` first and the resulting - /// `hiding_point_slot` must be passed as an argument to this function. + /// `partial_note` must be passed as an argument to this function. // docs:start:finalize_transfer_to_private #[public] - fn finalize_transfer_to_private(token_id: Field, hiding_point_slot: Field) { + fn finalize_transfer_to_private(token_id: Field, partial_note: PartialNFTNote) { let from = context.msg_sender(); - _finalize_transfer_to_private(from, token_id, hiding_point_slot, &mut context, storage); + _finalize_transfer_to_private(from, token_id, partial_note, &mut context, storage); } // docs:end:finalize_transfer_to_private @@ -276,9 +239,9 @@ pub contract NFT { fn _finalize_transfer_to_private_unsafe( from: AztecAddress, token_id: Field, - hiding_point_slot: Field, + partial_note: PartialNFTNote, ) { - _finalize_transfer_to_private(from, token_id, hiding_point_slot, &mut context, storage); + _finalize_transfer_to_private(from, token_id, partial_note, &mut context, storage); } // docs:end:finalize_transfer_to_private_unsafe @@ -286,7 +249,7 @@ pub contract NFT { fn _finalize_transfer_to_private( from: AztecAddress, token_id: Field, - hiding_point_slot: Field, + partial_note: PartialNFTNote, context: &mut PublicContext, storage: Storage<&mut PublicContext>, ) { @@ -296,12 +259,12 @@ pub contract NFT { // Set the public NFT owner to zero public_owners_storage.write(AztecAddress::zero()); - // Finalize the partial note with the `token_id` - let finalization_payload = - NFTNote::finalization_payload().new(context, hiding_point_slot, token_id); - - // At last we emit the note hash and the final log - finalization_payload.emit(); + // We verify that the partial note we're completing is valid (i.e. it uses the correct state variable's storage + // slot, and it is internally consistent). We *could* clear the storage since each partial note should only be + // used once, but since the AVM offers no gas refunds for doing so this would just make the transaction be more + // expensive. + assert(context.storage_read(partial_note.commitment()), "Invalid partial note"); + partial_note.complete(token_id, context); } /** diff --git a/noir-projects/noir-contracts/contracts/nft_contract/src/test/transfer_to_private.nr b/noir-projects/noir-contracts/contracts/nft_contract/src/test/transfer_to_private.nr index 12380e61cbe..3418564242b 100644 --- a/noir-projects/noir-contracts/contracts/nft_contract/src/test/transfer_to_private.nr +++ b/noir-projects/noir-contracts/contracts/nft_contract/src/test/transfer_to_private.nr @@ -1,8 +1,5 @@ -use crate::{NFT, test::utils, types::nft_note::NFTNote}; -use dep::aztec::{ - oracle::random::random, prelude::AztecAddress, - protocol_types::storage::map::derive_storage_slot_in_map, -}; +use crate::{NFT, test::utils, types::nft_note::PartialNFTNote}; +use dep::aztec::{oracle::random::random, prelude::AztecAddress}; use std::test::OracleMock; /// Internal orchestration means that the calls to `prepare_private_balance_increase` @@ -38,12 +35,12 @@ unconstrained fn transfer_to_private_external_orchestration() { let _ = OracleMock::mock("getRandomField").returns(note_randomness); // We prepare the transfer - let hiding_point_slot: Field = NFT::at(nft_contract_address) + let partial_note = NFT::at(nft_contract_address) .prepare_private_balance_increase(recipient) .call(&mut env.private()); // Finalize the transfer of the NFT (message sender owns the NFT in public) - NFT::at(nft_contract_address).finalize_transfer_to_private(token_id, hiding_point_slot).call( + NFT::at(nft_contract_address).finalize_transfer_to_private(token_id, partial_note).call( &mut env.public(), ); @@ -56,17 +53,17 @@ unconstrained fn transfer_to_private_external_orchestration() { utils::assert_owns_public_nft(env, nft_contract_address, AztecAddress::zero(), token_id); } -#[test(should_fail_with = "transfer not prepared")] +#[test(should_fail_with = "Invalid partial note")] unconstrained fn transfer_to_private_transfer_not_prepared() { // Setup without account contracts. We are not using authwits here, so dummy accounts are enough let (env, nft_contract_address, _, _, token_id) = utils::setup_and_mint(/* with_account_contracts */ false); - // Transfer was not prepared so we can use random value for the hiding point slot - let hiding_point_slot = random(); + // Transfer was not prepared so we can use random value for the partial note + let partial_note = PartialNFTNote { commitment: random() }; // Try finalizing the transfer without preparing it - NFT::at(nft_contract_address).finalize_transfer_to_private(token_id, hiding_point_slot).call( + NFT::at(nft_contract_address).finalize_transfer_to_private(token_id, partial_note).call( &mut env.public(), ); } @@ -80,13 +77,13 @@ unconstrained fn transfer_to_private_failure_not_an_owner() { // (For this specific test we could set a random value for the commitment and not do the call to `prepare...` // as the NFT owner check is before we use the value but that would made the test less robust against changes // in the contract.) - let hiding_point_slot: Field = NFT::at(nft_contract_address) + let partial_note = NFT::at(nft_contract_address) .prepare_private_balance_increase(not_owner) .call(&mut env.private()); // Try transferring someone else's public NFT env.impersonate(not_owner); - NFT::at(nft_contract_address).finalize_transfer_to_private(token_id, hiding_point_slot).call( + NFT::at(nft_contract_address).finalize_transfer_to_private(token_id, partial_note).call( &mut env.public(), ); } diff --git a/noir-projects/noir-contracts/contracts/nft_contract/src/types.nr b/noir-projects/noir-contracts/contracts/nft_contract/src/types.nr index 26ed97fbe8a..21eb0341a07 100644 --- a/noir-projects/noir-contracts/contracts/nft_contract/src/types.nr +++ b/noir-projects/noir-contracts/contracts/nft_contract/src/types.nr @@ -1 +1 @@ -mod nft_note; +pub mod nft_note; diff --git a/noir-projects/noir-contracts/contracts/nft_contract/src/types/nft_note.nr b/noir-projects/noir-contracts/contracts/nft_contract/src/types/nft_note.nr index 77baf7856a2..d5eb63638c8 100644 --- a/noir-projects/noir-contracts/contracts/nft_contract/src/types/nft_note.nr +++ b/noir-projects/noir-contracts/contracts/nft_contract/src/types/nft_note.nr @@ -1,18 +1,93 @@ -use aztec::{macros::notes::partial_note, oracle::random::random, prelude::AztecAddress}; +use dep::aztec::{ + context::{PrivateContext, PublicContext}, + encrypted_logs::log_assembly_strategies::default_aes128, + keys::getters::{get_nsk_app, get_public_keys}, + macros::notes::custom_note, + note::note_interface::{NoteHash, NoteType}, + oracle::random::random, + protocol_types::{ + address::AztecAddress, + constants::{GENERATOR_INDEX__NOTE_HASH, GENERATOR_INDEX__NOTE_NULLIFIER}, + hash::poseidon2_hash_with_separator, + traits::{Deserialize, Hash, Packable, Serialize}, + utils::arrays::array_concat, + }, +}; + +// NFTNote supports partial notes, i.e. the ability to create an incomplete note in private, hiding certain values (the +// owner, storage slot and randomness), and then completing the note in public with the ones missing (the token id). +// Partial notes are being actively developed and are not currently fully supported via macros, and so we rely on the +// #[custom_note] macro to implement it manually, resulting in some boilerplate. This is expected to be unnecessary once +// macro support is expanded. // docs:start:nft_note -#[partial_note(quote { token_id})] -#[derive(Eq)] +/// A private note representing a token id associated to an account. +#[custom_note] +#[derive(Eq, Serialize)] pub struct NFTNote { - // ID of the token - token_id: Field, - // The owner of the note + // The ordering of these fields is important given that it must: + // a) match that of NFTPartialNotePrivateContent, and + // b) have the public field at the end + // Correct ordering is checked by the tests in this module. + + /// The owner of the note, i.e. the account whose nullifier secret key is required to compute the nullifier. owner: AztecAddress, - // Randomness of the note to protect against note hash preimage attacks + /// Random value, protects against note hash preimage attacks. randomness: Field, + /// The ID of the token represented by this note. + token_id: Field, } // docs:end:nft_note +impl NoteHash for NFTNote { + fn compute_note_hash(self, storage_slot: Field) -> Field { + // Partial notes can be implemented by having the note hash be either the result of multiscalar multiplication + // (MSM), or two rounds of poseidon. MSM results in more constraints and is only required when multiple variants + // of partial notes are supported. Because NFTNote has just one variant (where the token id is public), we use + // poseidon instead. + + // We must compute the same note hash as would be produced by a partial note created and completed with the same + // values, so that notes all behave the same way regardless of how they were created. To achieve this, we + // perform both steps of the partial note computation. + + // First we create the partial note from a commitment to the private content (including storage slot). + let private_content = + NFTPartialNotePrivateContent { owner: self.owner, randomness: self.randomness }; + let partial_note = + PartialNFTNote { commitment: private_content.compute_partial_commitment(storage_slot) }; + + // Then compute the completion note hash. In a real partial note this step would be performed in public. + partial_note.compute_complete_note_hash(self.token_id) + } + + // The nullifiers are nothing special - this is just the canonical implementation that would be injected by the + // #[note] macro. + + fn compute_nullifier( + self, + context: &mut PrivateContext, + note_hash_for_nullify: Field, + ) -> Field { + let owner_npk_m = get_public_keys(self.owner).npk_m; + let owner_npk_m_hash = owner_npk_m.hash(); + let secret = context.request_nsk_app(owner_npk_m_hash); + poseidon2_hash_with_separator( + [note_hash_for_nullify, secret], + GENERATOR_INDEX__NOTE_NULLIFIER, + ) + } + + unconstrained fn compute_nullifier_unconstrained(self, note_hash_for_nullify: Field) -> Field { + let owner_npk_m = get_public_keys(self.owner).npk_m; + let owner_npk_m_hash = owner_npk_m.hash(); + let secret = get_nsk_app(owner_npk_m_hash); + poseidon2_hash_with_separator( + [note_hash_for_nullify, secret], + GENERATOR_INDEX__NOTE_NULLIFIER, + ) + } +} + impl NFTNote { pub fn new(token_id: Field, owner: AztecAddress) -> Self { // Safety: We use the randomness to preserve the privacy of the note recipient by preventing brute-forcing, @@ -20,6 +95,226 @@ impl NFTNote { // the full note pre-image anyway, and so the recipient already trusts them to not disclose this // information. We can therefore assume that the sender will cooperate in the random value generation. let randomness = unsafe { random() }; - NFTNote { token_id, owner, randomness } + Self { token_id, owner, randomness } + } + + pub fn get_token_id(self) -> Field { + self.token_id + } + + /// Creates a partial note that will hide the owner and storage slot but not the token id, since the note will be + /// later completed in public. This is a powerful technique for scenarios in which the token id cannot be known in + /// private (e.g. because it depends on some public state, such as a DEX). + /// + /// The returned `PartialNFTNote` value must be sent to public execution via a secure channel, since it is not + /// possible to verify the integrity of its contents due to it hiding information. The recommended ways to do this + /// are to retrieve it from public storage, or to receive it in an internal public function call. + /// + /// Each partial note should only be used once, since otherwise multiple notes would be linked together and known to + /// belong to the same owner. + /// + /// As part of the partial note cration process, a log will be sent to `recipient` from `sender` so that they can + /// discover the note. `recipient` will typically be the same as `owner`. + pub fn partial( + owner: AztecAddress, + storage_slot: Field, + context: &mut PrivateContext, + recipient: AztecAddress, + sender: AztecAddress, + ) -> PartialNFTNote { + // Safety: We use the randomness to preserve the privacy of the note recipient by preventing brute-forcing, + // so a malicious sender could use non-random values to make the note less private. But they already know + // the full note pre-image anyway, and so the recipient already trusts them to not disclose this + // information. We can therefore assume that the sender will cooperate in the random value generation. + let randomness = unsafe { random() }; + + // We create a commitment to the private data, which we then use to construct the log we send to the recipient. + let commitment = NFTPartialNotePrivateContent { owner, randomness } + .compute_partial_commitment(storage_slot); + + // Our partial note log encoding scheme includes a field with the tag of the public completion log, and we use + // the commitment as the tag. This is good for multiple reasons: + // - the commitment is uniquely tied to this partial note + // - the commitment is already public information, so we're not revealing anything else + // - we don't need to create any additional information, private or public, for the tag + // - other contracts cannot impersonate us and emit logs with the same tag due to public log siloing + let private_log_content = PrivateNFTPartialNotePrivateLogContent { + owner, + randomness, + public_log_tag: commitment, + }; + + // TODO: we're abusing the note encoding scheme by computing the log for a fake note type with such a note type + // id that the recipient will realize that these are the private fields of a partial note. Ideally we'd not rely + // on this crude mechanism and we'd instead compute it as a proper event log. However, given the current state + // of the log library it's far easier to do it this way. + let encrypted_log = default_aes128::note::compute_log( + *context, + private_log_content, + storage_slot, + recipient, + sender, + ); + context.emit_private_log(encrypted_log); + + PartialNFTNote { commitment } + } +} + +/// The private content of a partial NFTNote, i.e. the fields that will remain private. All other note fields will be +/// made public. +#[derive(Packable)] +struct NFTPartialNotePrivateContent { + // The ordering of these fields is important given that it must match that of NFTNote. + // Correct ordering is checked by the tests in this module. + owner: AztecAddress, + randomness: Field, +} + +impl NFTPartialNotePrivateContent { + fn compute_partial_commitment(self, storage_slot: Field) -> Field { + // Here we commit to all private values, including the storage slot. + poseidon2_hash_with_separator( + array_concat(self.pack(), [storage_slot]), + GENERATOR_INDEX__NOTE_HASH, + ) + } +} + +#[derive(Packable)] +struct PrivateNFTPartialNotePrivateLogContent { + // The ordering of these fields is important given that it must: + // a) match that of NFTNote, and + // b) have the public log tag at the beginning + // Correct ordering is checked by the tests in this module. + public_log_tag: Field, + owner: AztecAddress, + randomness: Field, +} + +impl NoteType for PrivateNFTPartialNotePrivateLogContent { + fn get_id() -> Field { + // We abuse the fact that note type ids are 7 bits long to use the 8th bit indicate the log corresponds to a + // partial note. Ideally we'd use proper events with selectors, but those are not handled well at the moment. + NFTNote::get_id() + 128 + } +} + +/// A partial instance of a NFTNote. This value represents a private commitment to the owner, randomness and storage +/// slot, but the token id field has not yet been set. A partial note can be completed in public with the `complete` +/// function (revealing the tolken id to the public), resulting in a NFTNote that can be used like any other one (except +/// of course that its token id is known). +#[derive(Packable, Serialize, Deserialize)] +pub struct PartialNFTNote { + commitment: Field, +} + +impl PartialNFTNote { + pub fn commitment(self) -> Field { + self.commitment + } +} + +impl PartialNFTNote { + /// Completes the partial note, creating a new note that can be used like any other NFTNote. + pub fn complete(self, token_id: Field, context: &mut PublicContext) { + // We need to do two things: + // - emit a public log containing the public fields (the token id). The contract will later find it by + // searching for the expected tag (which is simply the partial note commitment). + // - insert the completion note hash (i.e. the hash of the note) into the note hash tree. This is typically + // only done in private to hide the preimage of the hash that is inserted, but completed partial notes are + // inserted in public as the public values are provided and the note hash computed. + context.emit_public_log(self.compute_note_completion_log(token_id)); + context.push_note_hash(self.compute_complete_note_hash(token_id)); + } + + fn compute_note_completion_log(self, token_id: Field) -> [Field; 2] { + // The first field of this log must be the tag that the recipient of the partial note private field logs + // expects, which is equal to the partial note commitment. + [self.commitment, token_id] + } + + fn compute_complete_note_hash(self, token_id: Field) -> Field { + // Here we finalize the note hash by including the (public) token id into the partial note commitment. Note that + // we use the same generator index as we used for the first round of poseidon - this is not an issue. + poseidon2_hash_with_separator([self.commitment, token_id], GENERATOR_INDEX__NOTE_HASH) + } +} + +mod test { + use super::{ + NFTNote, NFTPartialNotePrivateContent, PartialNFTNote, + PrivateNFTPartialNotePrivateLogContent, + }; + use dep::aztec::{ + note::note_interface::NoteHash, + protocol_types::{ + address::AztecAddress, + traits::{FromField, Packable}, + utils::arrays::array_concat, + }, + utils::array::subarray, + }; + + global token_id: Field = 17; + global randomness: Field = 42; + global owner: AztecAddress = AztecAddress::from_field(50); + global storage_slot: Field = 13; + + #[test] + fn note_hash_matches_completed_partial_note_hash() { + // Tests that a NFTNote has the same note hash as a PartialNFTNote created and then completed with the same + // private values. This requires for the same hash function to be used in both flows, with the fields in the + // same order. + + let note = NFTNote { token_id, randomness, owner }; + let note_hash = note.compute_note_hash(storage_slot); + + let partial_note_private_content = NFTPartialNotePrivateContent { owner, randomness }; + + let partial_note = PartialNFTNote { + commitment: partial_note_private_content.compute_partial_commitment(storage_slot), + }; + let completed_partial_note_hash = partial_note.compute_complete_note_hash(token_id); + + assert_eq(note_hash, completed_partial_note_hash); + } + + #[test] + fn unpack_from_partial_note_encoding() { + // Tests that the packed representation of a regular NFTNote can be reconstructed given the partial note + // private fields log and the public completion log, ensuring the recipient will be able to compute the + // completed note as if it were a regular NFTNote. + + let note = NFTNote { token_id, randomness, owner }; + + let partial_note_private_content = NFTPartialNotePrivateContent { owner, randomness }; + let commitment = partial_note_private_content.compute_partial_commitment(storage_slot); + + let private_log_content = PrivateNFTPartialNotePrivateLogContent { + owner, + randomness, + public_log_tag: commitment, + }; + let partial_note = PartialNFTNote { commitment }; + + // The first field of the partial note private content is the public completion log tag, so it should match the + // first field of the public log. + assert_eq( + private_log_content.pack()[0], + partial_note.compute_note_completion_log(token_id)[0], + ); + + // Then we exctract all fields except the first of both logs (i.e. the public log tag), and combine them to + // produce the note's packed representation. This requires that the members of the intermediate structs are in + // the same order as in NFTNote. + let private_log_without_public_tag: [_; 2] = subarray(private_log_content.pack(), 1); + let public_log_without_tag: [_; 1] = + subarray(partial_note.compute_note_completion_log(token_id), 1); + + assert_eq( + array_concat(private_log_without_public_tag, public_log_without_tag), + note.pack(), + ); } } diff --git a/noir-projects/noir-contracts/contracts/token_contract/src/main.nr b/noir-projects/noir-contracts/contracts/token_contract/src/main.nr index f6012b35ada..12fc1dfb95b 100644 --- a/noir-projects/noir-contracts/contracts/token_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/token_contract/src/main.nr @@ -24,17 +24,17 @@ pub contract Token { event::encode_and_encrypt_event_unconstrained, note::{encode_and_encrypt_note, encode_and_encrypt_note_unconstrained}, }, + event::event_interface::EventInterface, macros::{ events::event, functions::{initializer, internal, private, public, view}, storage::storage, }, - oracle::random::random, prelude::{AztecAddress, Map, PublicContext, PublicImmutable, PublicMutable}, - protocol_types::{point::Point, traits::Serialize}, + protocol_types::traits::Serialize, }; - use dep::uint_note::uint_note::UintNote; + use dep::uint_note::uint_note::{PartialUintNote, UintNote}; // docs:start:import_authwit use dep::authwit::auth::{ @@ -44,6 +44,7 @@ pub contract Token { // docs:end:import_authwit use crate::types::balance_set::BalanceSet; + use std::ops::{Add, Sub}; // docs:end::imports @@ -406,29 +407,27 @@ pub contract Token { let token = Token::at(context.this_address()); // We prepare the private balance increase (the partial note). - let hiding_point_slot = _prepare_private_balance_increase(from, to, &mut context, storage); + let partial_note = _prepare_private_balance_increase(from, to, &mut context, storage); // At last we finalize the transfer. Usage of the `unsafe` method here is safe because we set the `from` // function argument to a message sender, guaranteeing that he can transfer only his own tokens. - token._finalize_transfer_to_private_unsafe(from, amount, hiding_point_slot).enqueue( - &mut context, - ); + token._finalize_transfer_to_private_unsafe(from, amount, partial_note).enqueue(&mut context); } // docs:end:transfer_to_private // docs:start:prepare_private_balance_increase /// Prepares an increase of private balance of `to` (partial note). The increase needs to be finalized by calling - /// some of the finalization functions (`finalize_transfer_to_private`, `finalize_mint_to_private`). - /// Returns a hiding point slot. + /// some of the finalization functions (`finalize_transfer_to_private`, `finalize_mint_to_private`) with the + /// returned partial note. #[private] - fn prepare_private_balance_increase(to: AztecAddress, from: AztecAddress) -> Field { + fn prepare_private_balance_increase(to: AztecAddress, from: AztecAddress) -> PartialUintNote { // ideally we'd not have `from` here, but we do need a `from` address to produce a tagging secret with `to`. _prepare_private_balance_increase(from, to, &mut context, storage) } // docs:end:prepare_private_balance_increase /// This function exists separately from `prepare_private_balance_increase` solely as an optimization as it allows - /// us to have it inlined in the `transfer_to_private` function which results in one less kernel iteration. + /// us to have it inlined in the `transfer_to_private` function which results in one fewer kernel iteration. /// /// TODO(#9180): Consider adding macro support for functions callable both as an entrypoint and as an internal /// function. @@ -438,58 +437,35 @@ pub contract Token { to: AztecAddress, context: &mut PrivateContext, storage: Storage<&mut PrivateContext>, - ) -> Field { - let to_note_slot = storage.balances.at(to).set.storage_slot; - - // We create a setup payload with unpopulated/zero `amount` for 'to' - // TODO(#7775): Manually fetching the randomness here is not great. If we decide to include randomness in all - // notes we could just inject it in macros. - - // Safety: We use the randomness to preserve the privacy of the note recipient by preventing brute-forcing, - // so a malicious sender could use non-random values to make the note less private. But they already know - // the full note pre-image anyway, and so the recipient already trusts them to not disclose this - // information. We can therefore assume that the sender will cooperate in the random value generation. - let note_randomness = unsafe { random() }; - let note_setup_payload = UintNote::setup_payload().new(to, note_randomness, to_note_slot); - - // We get the keys and encrypt the log of the note - let setup_log = note_setup_payload.encrypt_log(context, to, from); - - // Using the x-coordinate as a hiding point slot is safe against someone else interfering with it because - // we have a guarantee that the public functions of the transaction are executed right after the private ones - // and for this reason the protocol guarantees that nobody can front-run us in consuming the hiding point. - // This guarantee would break if `finalize_transfer_to_private` was not called in the same transaction. This - // however is not the flow we are currently concerned with. To support the multi-transaction flow we could - // introduce a `from` function argument, hash the x-coordinate with it and then repeat the hashing in - // `finalize_transfer_to_private`. - // - // We can also be sure that the `hiding_point_slot` will not overwrite any other value in the storage because - // in our state variables we derive slots using a different hash function from multi scalar multiplication - // (MSM). - let hiding_point_slot = note_setup_payload.hiding_point.x; - - // We don't need to perform a check that the value overwritten by `_store_point_in_transient_storage_unsafe` - // is zero because the slot is the x-coordinate of the hiding point and hence we could only overwrite - // the value in the slot with the same value. This makes usage of the `unsafe` method safe. - Token::at(context.this_address()) - ._store_payload_in_transient_storage_unsafe( - hiding_point_slot, - note_setup_payload.hiding_point, - setup_log, - ) - .enqueue(context); - - hiding_point_slot + ) -> PartialUintNote { + let partial_note = UintNote::partial( + to, + storage.balances.at(to).set.storage_slot, + context, + to, + from, + ); + + // We can't simply return the partial note because we won't be able to later on verify that it was created + // correctly (e.g. that the storage slot corresponds to the owner, and that we're using the balance set and not + // another state variable) once this information is hidden in the partial note commitment. We therefore store + // the partial note in our own public storage, so that we can later check that we're only completing correctly + // created partial notes. + Token::at(context.this_address())._store_balances_set_partial_note(partial_note).enqueue( + context, + ); + + partial_note } // docs:start:finalize_transfer_to_private /// Finalizes a transfer of token `amount` from public balance of `from` to a private balance of `to`. /// The transfer must be prepared by calling `prepare_private_balance_increase` first and the resulting - /// `hiding_point_slot` must be passed as an argument to this function. + /// `partial_note` must be passed as an argument to this function. #[public] - fn finalize_transfer_to_private(amount: u128, hiding_point_slot: Field) { + fn finalize_transfer_to_private(amount: u128, partial_note: PartialUintNote) { let from = context.msg_sender(); - _finalize_transfer_to_private(from, amount, hiding_point_slot, &mut context, storage); + _finalize_transfer_to_private(from, amount, partial_note, &mut context, storage); } // docs:end:finalize_transfer_to_private @@ -502,9 +478,9 @@ pub contract Token { fn _finalize_transfer_to_private_unsafe( from: AztecAddress, amount: u128, - hiding_point_slot: Field, + partial_note: PartialUintNote, ) { - _finalize_transfer_to_private(from, amount, hiding_point_slot, &mut context, storage); + _finalize_transfer_to_private(from, amount, partial_note, &mut context, storage); } // docs:end:finalize_transfer_to_private_unsafe @@ -512,7 +488,7 @@ pub contract Token { fn _finalize_transfer_to_private( from: AztecAddress, amount: u128, - hiding_point_slot: Field, + partial_note: PartialUintNote, context: &mut PublicContext, storage: Storage<&mut PublicContext>, ) { @@ -520,12 +496,12 @@ pub contract Token { let from_balance = storage.public_balances.at(from).read().sub(amount); storage.public_balances.at(from).write(from_balance); - // Then we finalize the partial note with the `amount` - let finalization_payload = - UintNote::finalization_payload().new(context, hiding_point_slot, amount); - - // At last we emit the note hash and the final log - finalization_payload.emit(); + // We verify that the partial note we're completing is valid (i.e. it uses the correct state variable's storage + // slot, and it is internally consistent). We *could* clear the storage since each partial note should only be + // used once, but since the AVM offers no gas refunds for doing so this would just make the transaction be more + // expensive. + assert(context.storage_read(partial_note.commitment()), "Invalid partial note"); + partial_note.complete(amount, context); } // docs:start:mint_to_private @@ -540,30 +516,30 @@ pub contract Token { let token = Token::at(context.this_address()); // We prepare the partial note to which we'll "send" the minted amount. - let hiding_point_slot = _prepare_private_balance_increase(from, to, &mut context, storage); + let partial_note = _prepare_private_balance_increase(from, to, &mut context, storage); // At last we finalize the mint. Usage of the `unsafe` method here is safe because we set the `from` // function argument to a message sender, guaranteeing that only a message sender with minter permissions // can successfully execute the function. - token - ._finalize_mint_to_private_unsafe(context.msg_sender(), amount, hiding_point_slot) - .enqueue(&mut context); + token._finalize_mint_to_private_unsafe(context.msg_sender(), amount, partial_note).enqueue( + &mut context, + ); } // docs:end:mint_to_private // docs:start:finalize_mint_to_private /// Finalizes a mint of token `amount` to a private balance of `to`. The mint must be prepared by calling /// `prepare_private_balance_increase` first and the resulting - /// `hiding_point_slot` must be passed as an argument to this function. + /// `partial_note` must be passed as an argument to this function. /// /// Note: This function is only an optimization as it could be replaced by a combination of `mint_to_public` /// and `finalize_transfer_to_private`. It is however used very commonly so it makes sense to optimize it /// (e.g. used during token bridging, in AMM liquidity token etc.). #[public] - fn finalize_mint_to_private(amount: u128, hiding_point_slot: Field) { + fn finalize_mint_to_private(amount: u128, partial_note: PartialUintNote) { assert(storage.minters.at(context.msg_sender()).read(), "caller is not minter"); - _finalize_mint_to_private(amount, hiding_point_slot, &mut context, storage); + _finalize_mint_to_private(amount, partial_note, &mut context, storage); } // docs:end:finalize_mint_to_private @@ -573,18 +549,18 @@ pub contract Token { fn _finalize_mint_to_private_unsafe( from: AztecAddress, amount: u128, - hiding_point_slot: Field, + partial_note: PartialUintNote, ) { // We check the minter permissions as it was not done in `mint_to_private` function. assert(storage.minters.at(from).read(), "caller is not minter"); - _finalize_mint_to_private(amount, hiding_point_slot, &mut context, storage); + _finalize_mint_to_private(amount, partial_note, &mut context, storage); } // docs:end:finalize_mint_to_private_unsafe #[contract_library_method] fn _finalize_mint_to_private( amount: u128, - hiding_point_slot: Field, + partial_note: PartialUintNote, context: &mut PublicContext, storage: Storage<&mut PublicContext>, ) { @@ -592,28 +568,22 @@ pub contract Token { let supply = storage.total_supply.read().add(amount); storage.total_supply.write(supply); - // Then we finalize the partial note with the `amount` - let finalization_payload = - UintNote::finalization_payload().new(context, hiding_point_slot, amount); - - // At last we emit the note hash and the final log - finalization_payload.emit(); + // We verify that the partial note we're completing is valid (i.e. it uses the correct state variable's storage + // slot, and it is internally consistent). We *could* clear the storage since each partial note should only be + // used once, but since the AVM offers no gas refunds for doing so this would just make the transaction be more + // expensive. + assert(context.storage_read(partial_note.commitment()), "Invalid partial note"); + partial_note.complete(amount, context); } - // TODO(#9375): Having to define the note log length here is very unfortunate as it's basically impossible for - // users to derive manually. This will however go away once we have a real transient storage since we will not need - // the public call and instead we would do something like `context.transient_storage_write(slot, payload)` and that - // will allow us to use generics and hence user will not need to define it explicitly. We cannot use generics here - // as it is an entrypoint function. #[public] #[internal] - fn _store_payload_in_transient_storage_unsafe( - slot: Field, - point: Point, - setup_log: [Field; 9], - ) { - context.storage_write(slot, point); - context.storage_write(slot + aztec::protocol_types::point::POINT_LENGTH as Field, setup_log); + fn _store_balances_set_partial_note(partial_note: PartialUintNote) { + // We store the partial note in a slot equal to its commitment. This is safe because the commitment is computed + // using a generator different from the one used to compute storage slots, so there can be no collisions. + // We could consider storing all pending partial notes in e.g. some array, but ultimately this is pointless: all + // we need to verify is that the note is valid. + context.storage_write(partial_note.commitment(), true); } /// Internal /// diff --git a/noir-projects/noir-contracts/contracts/token_contract/src/test/transfer_to_private.nr b/noir-projects/noir-contracts/contracts/token_contract/src/test/transfer_to_private.nr index 0f9e64693a6..1434b5e9487 100644 --- a/noir-projects/noir-contracts/contracts/token_contract/src/test/transfer_to_private.nr +++ b/noir-projects/noir-contracts/contracts/token_contract/src/test/transfer_to_private.nr @@ -1,5 +1,6 @@ use crate::{test::utils, Token}; use dep::aztec::oracle::random::random; +use dep::uint_note::uint_note::PartialUintNote; use std::test::OracleMock; /// Internal orchestration means that the calls to `prepare_private_balance_increase` @@ -32,12 +33,12 @@ unconstrained fn transfer_to_private_external_orchestration() { let _ = OracleMock::mock("getRandomField").returns(note_randomness); // We prepare the transfer - let hiding_point_slot: Field = Token::at(token_contract_address) + let partial_uint_note = Token::at(token_contract_address) .prepare_private_balance_increase(recipient, owner) .call(&mut env.private()); // Finalize the transfer of the tokens (message sender owns the tokens in public) - Token::at(token_contract_address).finalize_transfer_to_private(amount, hiding_point_slot).call( + Token::at(token_contract_address).finalize_transfer_to_private(amount, partial_uint_note).call( &mut env.public(), ); @@ -47,17 +48,17 @@ unconstrained fn transfer_to_private_external_orchestration() { utils::check_private_balance(token_contract_address, recipient, amount); } -#[test(should_fail_with = "transfer not prepared")] +#[test(should_fail_with = "Invalid partial note")] unconstrained fn transfer_to_private_transfer_not_prepared() { // Setup without account contracts. We are not using authwits here, so dummy accounts are enough let (env, token_contract_address, _, _, amount) = utils::setup_and_mint_to_public(/* with_account_contracts */ false); - // Transfer was not prepared so we can use random value for the hiding point slot - let hiding_point_slot = random(); + // Transfer was not prepared so we can use random value for the partial note + let partial_uint_note = PartialUintNote { commitment: random() }; // Try finalizing the transfer without preparing it - Token::at(token_contract_address).finalize_transfer_to_private(amount, hiding_point_slot).call( + Token::at(token_contract_address).finalize_transfer_to_private(amount, partial_uint_note).call( &mut env.public(), ); } @@ -71,13 +72,13 @@ unconstrained fn transfer_to_private_failure_not_an_owner() { // (For this specific test we could set a random value for the commitment and not do the call to `prepare...` // as the token balance check is before we use the value but that would made the test less robust against changes // in the contract.) - let hiding_point_slot: Field = Token::at(token_contract_address) + let partial_uint_note = Token::at(token_contract_address) .prepare_private_balance_increase(not_owner, owner) .call(&mut env.private()); // Try transferring someone else's token balance env.impersonate(not_owner); - Token::at(token_contract_address).finalize_transfer_to_private(amount, hiding_point_slot).call( + Token::at(token_contract_address).finalize_transfer_to_private(amount, partial_uint_note).call( &mut env.public(), ); } diff --git a/yarn-project/archiver/src/archiver/archiver_store_test_suite.ts b/yarn-project/archiver/src/archiver/archiver_store_test_suite.ts index 1c25edec58a..7d23fa16ab7 100644 --- a/yarn-project/archiver/src/archiver/archiver_store_test_suite.ts +++ b/yarn-project/archiver/src/archiver/archiver_store_test_suite.ts @@ -383,26 +383,14 @@ export function describeArchiverDataStore( const makeTag = (blockNumber: number, txIndex: number, logIndex: number, isPublic = false) => new Fr((blockNumber * 100 + txIndex * 10 + logIndex) * (isPublic ? 123 : 1)); - // See parseLogFromPublic - // Search the codebase for "disgusting encoding" to see other hardcoded instances of this encoding, that you might need to change if you ever find yourself here. - const makeLengthsField = (publicValuesLen: number, privateValuesLen: number) => { - const buf = Buffer.alloc(32); - buf.writeUint16BE(publicValuesLen, 27); - buf.writeUint16BE(privateValuesLen, 30); - return Fr.fromBuffer(buf); - }; - const makePrivateLog = (tag: Fr) => PrivateLog.fromFields([tag, ...times(PRIVATE_LOG_SIZE_IN_FIELDS - 1, i => new Fr(tag.toNumber() + i))]); - // The tag lives in field 1, not 0, of a public log - // See extractTaggedLogsFromPublic and noir-projects/aztec-nr/aztec/src/macros/notes/mod.nr -> emit_log const makePublicLog = (tag: Fr) => PublicLog.fromFields([ AztecAddress.fromNumber(1).toField(), // log address - makeLengthsField(2, PUBLIC_LOG_DATA_SIZE_IN_FIELDS - 3), // field 0 - tag, // field 1 - ...times(PUBLIC_LOG_DATA_SIZE_IN_FIELDS - 1, i => new Fr(tag.toNumber() + i)), // fields 2 to end + tag, // field 0 + ...times(PUBLIC_LOG_DATA_SIZE_IN_FIELDS - 1, i => new Fr(tag.toNumber() + i)), // fields 1 to end ]); const mockPrivateLogs = (blockNumber: number, txIndex: number) => { @@ -542,13 +530,11 @@ export function describeArchiverDataStore( const invalidLogs = [ PublicLog.fromFields([ AztecAddress.fromNumber(1).toField(), - makeLengthsField(2, 3), // This field claims we have 5 items, but we actually have more tag, ...times(PUBLIC_LOG_DATA_SIZE_IN_FIELDS - 1, i => new Fr(tag.toNumber() + i)), ]), PublicLog.fromFields([ AztecAddress.fromNumber(1).toField(), - makeLengthsField(2, PUBLIC_LOG_DATA_SIZE_IN_FIELDS), // This field claims we have more than the max items tag, ...times(PUBLIC_LOG_DATA_SIZE_IN_FIELDS - 1, i => new Fr(tag.toNumber() + i)), ]), diff --git a/yarn-project/archiver/src/archiver/kv_archiver_store/log_store.ts b/yarn-project/archiver/src/archiver/kv_archiver_store/log_store.ts index 3fab13064d3..be616670e0b 100644 --- a/yarn-project/archiver/src/archiver/kv_archiver_store/log_store.ts +++ b/yarn-project/archiver/src/archiver/kv_archiver_store/log_store.ts @@ -40,7 +40,7 @@ export class LogStore { this.#logsMaxPageSize = logsMaxPageSize; } - #extractTaggedLogsFromPrivate(block: L2Block) { + #extractTaggedLogs(block: L2Block) { const taggedLogs = new Map(); const dataStartIndexForBlock = block.header.state.partial.noteHashTree.nextAvailableLeafIndex - @@ -48,8 +48,11 @@ export class LogStore { block.body.txEffects.forEach((txEffect, txIndex) => { const txHash = txEffect.txHash; const dataStartIndexForTx = dataStartIndexForBlock + txIndex * MAX_NOTE_HASHES_PER_TX; + txEffect.privateLogs.forEach(log => { const tag = log.fields[0]; + this.#log.debug(`Found private log with tag ${tag.toString()} in block ${block.number}`); + const currentLogs = taggedLogs.get(tag.toString()) ?? []; currentLogs.push( new TxScopedL2Log( @@ -62,44 +65,11 @@ export class LogStore { ); taggedLogs.set(tag.toString(), currentLogs); }); - }); - return taggedLogs; - } - #extractTaggedLogsFromPublic(block: L2Block) { - const taggedLogs = new Map(); - const dataStartIndexForBlock = - block.header.state.partial.noteHashTree.nextAvailableLeafIndex - - block.body.txEffects.length * MAX_NOTE_HASHES_PER_TX; - block.body.txEffects.forEach((txEffect, txIndex) => { - const txHash = txEffect.txHash; - const dataStartIndexForTx = dataStartIndexForBlock + txIndex * MAX_NOTE_HASHES_PER_TX; txEffect.publicLogs.forEach(log => { - // Check that each log stores 2 lengths in its first field. If not, it's not a tagged log: - const firstFieldBuf = log.log[0].toBuffer(); - // See macros/note/mod/ and see how finalization_log[0] is constructed, to understand this monstrosity. (It wasn't me). - // Search the codebase for "disgusting encoding" to see other hardcoded instances of this encoding, that you might need to change if you ever find yourself here. - if (!firstFieldBuf.subarray(0, 27).equals(Buffer.alloc(27)) || firstFieldBuf[29] !== 0) { - // See parseLogFromPublic - the first field of a tagged log is 5 bytes structured: - // [ publicLen[0], publicLen[1], 0, privateLen[0], privateLen[1]] - this.#log.warn(`Skipping public log with invalid first field: ${log.log[0]}`); - return; - } - // Check that the length values line up with the log contents - const publicValuesLength = firstFieldBuf.subarray(-5).readUint16BE(); - const privateValuesLength = firstFieldBuf.subarray(-5).readUint16BE(3); - // Add 1 for the first field holding lengths - const totalLogLength = 1 + publicValuesLength + privateValuesLength; - // Note that zeroes can be valid log values, so we can only assert that we do not go over the given length - if (totalLogLength > PUBLIC_LOG_DATA_SIZE_IN_FIELDS || log.log.slice(totalLogLength).find(f => !f.isZero())) { - this.#log.warn(`Skipping invalid tagged public log with first field: ${log.log[0]}`); - return; - } - - // The first elt stores lengths as above => tag is in fields[1] - const tag = log.log[1]; + const tag = log.log[0]; + this.#log.debug(`Found public log with tag ${tag.toString()} in block ${block.number}`); - this.#log.debug(`Found tagged public log with tag ${tag.toString()} in block ${block.number}`); const currentLogs = taggedLogs.get(tag.toString()) ?? []; currentLogs.push( new TxScopedL2Log( @@ -123,7 +93,7 @@ export class LogStore { */ addLogs(blocks: L2Block[]): Promise { const taggedLogsToAdd = blocks - .flatMap(block => [this.#extractTaggedLogsFromPrivate(block), this.#extractTaggedLogsFromPublic(block)]) + .map(block => this.#extractTaggedLogs(block)) .reduce((acc, val) => { for (const [tag, logs] of val.entries()) { const currentLogs = acc.get(tag) ?? []; @@ -238,9 +208,7 @@ export class LogStore { */ async getLogsByTags(tags: Fr[]): Promise { const logs = await Promise.all(tags.map(tag => this.#logsByTag.getAsync(tag.toString()))); - return logs.map( - noteLogBuffers => noteLogBuffers?.map(noteLogBuffer => TxScopedL2Log.fromBuffer(noteLogBuffer)) ?? [], - ); + return logs.map(logBuffers => logBuffers?.map(logBuffer => TxScopedL2Log.fromBuffer(logBuffer)) ?? []); } /** diff --git a/yarn-project/archiver/src/archiver/memory_archiver_store/memory_archiver_store.ts b/yarn-project/archiver/src/archiver/memory_archiver_store/memory_archiver_store.ts index 5316dc6b854..3bec497c5a5 100644 --- a/yarn-project/archiver/src/archiver/memory_archiver_store/memory_archiver_store.ts +++ b/yarn-project/archiver/src/archiver/memory_archiver_store/memory_archiver_store.ts @@ -276,15 +276,18 @@ export class MemoryArchiverStore implements ArchiverDataStore { return Promise.resolve(true); } - #storeTaggedLogsFromPrivate(block: L2Block): void { + #storeTaggedLogs(block: L2Block): void { const dataStartIndexForBlock = block.header.state.partial.noteHashTree.nextAvailableLeafIndex - block.body.txEffects.length * MAX_NOTE_HASHES_PER_TX; block.body.txEffects.forEach((txEffect, txIndex) => { const txHash = txEffect.txHash; const dataStartIndexForTx = dataStartIndexForBlock + txIndex * MAX_NOTE_HASHES_PER_TX; + txEffect.privateLogs.forEach(log => { const tag = log.fields[0]; + this.#log.verbose(`Storing private log with tag ${tag.toString()} from block ${block.number}`); + const currentLogs = this.taggedLogs.get(tag.toString()) || []; this.taggedLogs.set(tag.toString(), [ ...currentLogs, @@ -293,41 +296,11 @@ export class MemoryArchiverStore implements ArchiverDataStore { const currentTagsInBlock = this.logTagsPerBlock.get(block.number) || []; this.logTagsPerBlock.set(block.number, [...currentTagsInBlock, tag]); }); - }); - } - #storeTaggedLogsFromPublic(block: L2Block): void { - const dataStartIndexForBlock = - block.header.state.partial.noteHashTree.nextAvailableLeafIndex - - block.body.txEffects.length * MAX_NOTE_HASHES_PER_TX; - block.body.txEffects.forEach((txEffect, txIndex) => { - const txHash = txEffect.txHash; - const dataStartIndexForTx = dataStartIndexForBlock + txIndex * MAX_NOTE_HASHES_PER_TX; txEffect.publicLogs.forEach(log => { - // Check that each log stores 3 lengths in its first field. If not, it's not a tagged log: - // See macros/note/mod/ and see how finalization_log[0] is constructed, to understand this monstrosity. (It wasn't me). - // Search the codebase for "disgusting encoding" to see other hardcoded instances of this encoding, that you might need to change if you ever find yourself here. - const firstFieldBuf = log.log[0].toBuffer(); - if (!firstFieldBuf.subarray(0, 27).equals(Buffer.alloc(27)) || firstFieldBuf[29] !== 0) { - // See parseLogFromPublic - the first field of a tagged log is 8 bytes structured: - // [ publicLen[0], publicLen[1], 0, privateLen[0], privateLen[1]] - this.#log.warn(`Skipping public log with invalid first field: ${log.log[0]}`); - return; - } - // Check that the length values line up with the log contents - const publicValuesLength = firstFieldBuf.subarray(-5).readUint16BE(); - const privateValuesLength = firstFieldBuf.subarray(-5).readUint16BE(3); - // Add 1 for the first field holding lengths - const totalLogLength = 1 + publicValuesLength + privateValuesLength; - // Note that zeroes can be valid log values, so we can only assert that we do not go over the given length - if (totalLogLength > PUBLIC_LOG_DATA_SIZE_IN_FIELDS || log.log.slice(totalLogLength).find(f => !f.isZero())) { - this.#log.warn(`Skipping invalid tagged public log with first field: ${log.log[0]}`); - return; - } + const tag = log.log[0]; + this.#log.verbose(`Storing public log with tag ${tag.toString()} from block ${block.number}`); - // The first elt stores lengths => tag is in fields[1] - const tag = log.log[1]; - this.#log.verbose(`Storing public tagged log with tag ${tag.toString()} in block ${block.number}`); const currentLogs = this.taggedLogs.get(tag.toString()) || []; this.taggedLogs.set(tag.toString(), [ ...currentLogs, @@ -346,8 +319,7 @@ export class MemoryArchiverStore implements ArchiverDataStore { */ addLogs(blocks: L2Block[]): Promise { blocks.forEach(block => { - this.#storeTaggedLogsFromPrivate(block); - this.#storeTaggedLogsFromPublic(block); + this.#storeTaggedLogs(block); this.privateLogsPerBlock.set(block.number, block.body.txEffects.map(txEffect => txEffect.privateLogs).flat()); this.publicLogsPerBlock.set(block.number, block.body.txEffects.map(txEffect => txEffect.publicLogs).flat()); this.contractClassLogsPerBlock.set( @@ -566,8 +538,8 @@ export class MemoryArchiverStore implements ArchiverDataStore { * that tag. */ getLogsByTags(tags: Fr[]): Promise { - const noteLogs = tags.map(tag => this.taggedLogs.get(tag.toString()) || []); - return Promise.resolve(noteLogs); + const logs = tags.map(tag => this.taggedLogs.get(tag.toString()) || []); + return Promise.resolve(logs); } /** diff --git a/yarn-project/end-to-end/src/e2e_nft.test.ts b/yarn-project/end-to-end/src/e2e_nft.test.ts index b39228d5643..fcbd17115df 100644 --- a/yarn-project/end-to-end/src/e2e_nft.test.ts +++ b/yarn-project/end-to-end/src/e2e_nft.test.ts @@ -66,36 +66,6 @@ describe('NFT', () => { const publicOwnerAfter = await nftContractAsUser1.methods.owner_of(TOKEN_ID).simulate(); expect(publicOwnerAfter).toEqual(AztecAddress.ZERO); - - // We should get 20 data writes setting values to 0 - 3 for note hiding point, 16 for partial log and 1 for public - // owner (we transfer to private so public owner is set to 0). Ideally we would have here only 1 data write as the - // 4 values change from zero to non-zero to zero in the tx and hence no write could be committed. This makes public - // writes squashing too expensive for transient storage. This however probably does not matter as I assume we will - // want to implement a real transient storage anyway. (Informed Leila about the potential optimization.) - // TODO(#9376): Re-enable the following check. - // const publicDataWritesValues = debugInfo!.publicDataWrites!.map(write => write.newValue.toBigInt()); - // expect(publicDataWritesValues).toEqual([ - // 0n, - // 0n, - // 0n, - // 0n, - // 0n, - // 0n, - // 0n, - // 0n, - // 0n, - // 0n, - // 0n, - // 0n, - // 0n, - // 0n, - // 0n, - // 0n, - // 0n, - // 0n, - // 0n, - // 0n, - // ]); }); it('transfers in private', async () => { diff --git a/yarn-project/end-to-end/src/e2e_partial_notes.test.ts b/yarn-project/end-to-end/src/e2e_partial_notes.test.ts new file mode 100644 index 00000000000..627b9867c77 --- /dev/null +++ b/yarn-project/end-to-end/src/e2e_partial_notes.test.ts @@ -0,0 +1,43 @@ +import { type AccountWallet, type Logger } from '@aztec/aztec.js'; +import { type TokenContract } from '@aztec/noir-contracts.js/Token'; + +import { jest } from '@jest/globals'; + +import { deployToken, mintTokensToPrivate } from './fixtures/token_utils.js'; +import { setup } from './fixtures/utils.js'; + +const TIMEOUT = 120_000; + +describe('partial notes', () => { + jest.setTimeout(TIMEOUT); + + let teardown: () => Promise; + + let logger: Logger; + + let adminWallet: AccountWallet; + let liquidityProvider: AccountWallet; + + let token0: TokenContract; + + const INITIAL_TOKEN_BALANCE = 1_000_000_000n; + + beforeAll(async () => { + ({ + teardown, + wallets: [adminWallet, liquidityProvider], + logger, + } = await setup(2)); + + token0 = await deployToken(adminWallet, 0n, logger); + }); + + afterAll(() => teardown()); + + it('mint to private', async () => { + await mintTokensToPrivate(token0, adminWallet, liquidityProvider.getAddress(), INITIAL_TOKEN_BALANCE); + expect(await token0.methods.balance_of_private(liquidityProvider.getAddress()).simulate()).toEqual( + INITIAL_TOKEN_BALANCE, + ); + }); +}); diff --git a/yarn-project/end-to-end/src/e2e_token_contract/private_transfer_recursion.test.ts b/yarn-project/end-to-end/src/e2e_token_contract/private_transfer_recursion.test.ts index 7b12d1c1002..1c05ac6376a 100644 --- a/yarn-project/end-to-end/src/e2e_token_contract/private_transfer_recursion.test.ts +++ b/yarn-project/end-to-end/src/e2e_token_contract/private_transfer_recursion.test.ts @@ -18,8 +18,7 @@ describe('e2e_token_contract private transfer recursion', () => { }); async function mintNotes(noteAmounts: bigint[]): Promise { - // We mint only 3 notes in 1 transaction as that is the maximum public data writes we can squeeze into a tx. - // --> Minting one note requires 19 public data writes (16 for the note encrypted log, 3 for note hiding point). + // We mint only 3 notes in 1 transaction as we're limited by how many public data writes we can squeeze into a tx. const notesPerIteration = 3; for (let mintedNotes = 0; mintedNotes < noteAmounts.length; mintedNotes += notesPerIteration) { const toMint = noteAmounts.slice(mintedNotes, mintedNotes + notesPerIteration); diff --git a/yarn-project/pxe/src/note_decryption_utils/add_public_values_to_payload.ts b/yarn-project/pxe/src/note_decryption_utils/add_public_values_to_payload.ts deleted file mode 100644 index 17c911040fe..00000000000 --- a/yarn-project/pxe/src/note_decryption_utils/add_public_values_to_payload.ts +++ /dev/null @@ -1,64 +0,0 @@ -import { ContractNotFoundError } from '@aztec/simulator/client'; -import type { L1NotePayload } from '@aztec/stdlib/logs'; -import { Note } from '@aztec/stdlib/note'; - -import type { PxeDatabase } from '../database/interfaces/pxe_database.js'; - -/** - * Merges privately and publicly delivered note values. - * @param db - PXE database used to fetch contract instance and artifact. - * @param payload - Payload corresponding to the note. - * @returns Note payload with public fields added. - */ -export async function getOrderedNoteItems( - db: PxeDatabase, - { contractAddress, noteTypeId, privateNoteValues, publicNoteValues }: L1NotePayload, -): Promise { - if (publicNoteValues.length === 0) { - return new Note(privateNoteValues); - } - - const instance = await db.getContractInstance(contractAddress); - if (!instance) { - throw new ContractNotFoundError( - `Could not find instance for ${contractAddress.toString()}. This should never happen here as the partial notes flow should be triggered only for non-deferred notes.`, - ); - } - - const artifact = await db.getContractArtifact(instance.currentContractClassId); - if (!artifact) { - throw new Error( - `Could not find artifact for contract class ${instance.currentContractClassId.toString()}. This should never happen here as the partial notes flow should be triggered only for non-deferred notes.`, - ); - } - - const noteFields = Object.values(artifact.notes).find(note => note.id.equals(noteTypeId))?.fields; - - if (!noteFields) { - throw new Error(`Could not find note fields for note type ${noteTypeId.toString()}.`); - } - - // We sort note fields by index so that we can iterate over them in order. - noteFields.sort((a, b) => a.index - b.index); - - // Now we insert the public fields into the note based on its indices defined in the ABI. - const modifiedNoteItems = [...privateNoteValues]; - let indexInPublicValues = 0; - for (let i = 0; i < noteFields.length; i++) { - const noteField = noteFields[i]; - if (noteField.nullable) { - if (i == noteFields.length - 1) { - // We are processing the last field so we simply insert the rest of the public fields at the end - modifiedNoteItems.push(...publicNoteValues.slice(indexInPublicValues)); - } else { - const noteFieldLength = noteFields[i + 1].index - noteField.index; - const publicValuesToInsert = publicNoteValues.slice(indexInPublicValues, indexInPublicValues + noteFieldLength); - indexInPublicValues += noteFieldLength; - // Now we insert the public fields at the note field index - modifiedNoteItems.splice(noteField.index, 0, ...publicValuesToInsert); - } - } - } - - return new Note(modifiedNoteItems); -} diff --git a/yarn-project/pxe/src/pxe_data_provider/index.ts b/yarn-project/pxe/src/pxe_data_provider/index.ts index 8a7374c6728..cefcb1b9a1e 100644 --- a/yarn-project/pxe/src/pxe_data_provider/index.ts +++ b/yarn-project/pxe/src/pxe_data_provider/index.ts @@ -45,7 +45,6 @@ import { TxHash } from '@aztec/stdlib/tx'; import { ContractDataProvider } from '../contract_data_provider/index.js'; import type { PxeDatabase } from '../database/index.js'; import { NoteDao } from '../database/note_dao.js'; -import { getOrderedNoteItems } from '../note_decryption_utils/add_public_values_to_payload.js'; import { WINDOW_HALF_SIZE, getIndexedTaggingSecretsForTheWindow, getInitialIndexesMap } from './tagging_utils.js'; /** @@ -506,31 +505,21 @@ export class PXEDataProvider implements ExecutionDataProvider { logsByTags.forEach((logsByTag, logIndex) => { if (logsByTag.length > 0) { - // Check that public logs have the correct contract address - const checkedLogsbyTag = logsByTag.filter( - l => !l.isFromPublic || PublicLog.fromBuffer(l.logData).contractAddress.equals(contractAddress), - ); - if (checkedLogsbyTag.length < logsByTag.length) { - const discarded = logsByTag.filter( - log => checkedLogsbyTag.find(filteredLog => filteredLog.equals(log)) === undefined, - ); - this.log.warn( - `Discarded ${ - logsByTag.length - checkedLogsbyTag.length - } public logs with mismatched contract address ${contractAddress}:`, - discarded.map(l => PublicLog.fromBuffer(l.logData)), - ); + // Discard public logs + const filteredLogsbyTag = logsByTag.filter(l => !l.isFromPublic); + if (filteredLogsbyTag.length < logsByTag.length) { + this.log.warn(`Discarded ${logsByTag.filter(l => l.isFromPublic).length} public logs with matching tags`); } // The logs for the given tag exist so we store them for later processing - logsForRecipient.push(...checkedLogsbyTag); + logsForRecipient.push(...filteredLogsbyTag); // We retrieve the indexed tagging secret corresponding to the log as I need that to evaluate whether // a new largest index have been found. const secretCorrespondingToLog = secretsForTheWholeWindow[logIndex]; const initialIndex = initialIndexesMap[secretCorrespondingToLog.appTaggingSecret.toString()]; - this.log.debug(`Found ${checkedLogsbyTag.length} logs as recipient ${recipient}`, { + this.log.debug(`Found ${filteredLogsbyTag.length} logs as recipient ${recipient}`, { recipient, secret: secretCorrespondingToLog.appTaggingSecret, contractName, @@ -619,12 +608,14 @@ export class PXEDataProvider implements ExecutionDataProvider { const decrypted = []; for (const scopedLog of scopedLogs) { - const payload = scopedLog.isFromPublic - ? await L1NotePayload.decryptAsIncomingFromPublic(PublicLog.fromBuffer(scopedLog.logData), addressSecret) - : await L1NotePayload.decryptAsIncoming(PrivateLog.fromBuffer(scopedLog.logData), addressSecret); + if (scopedLog.isFromPublic) { + throw new Error('Attempted to decrypt public log'); + } + + const payload = await L1NotePayload.decryptAsIncoming(PrivateLog.fromBuffer(scopedLog.logData), addressSecret); if (!payload) { - this.log.verbose('Unable to decrypt log'); + this.log.warn('Unable to decrypt tagged log - was it not meant for us?'); continue; } @@ -632,8 +623,7 @@ export class PXEDataProvider implements ExecutionDataProvider { excludedIndices.set(scopedLog.txHash.toString(), new Set()); } - const note = await getOrderedNoteItems(this.db, payload); - const plaintext = [payload.storageSlot, payload.noteTypeId.toField(), ...note.items]; + const plaintext = [payload.storageSlot, payload.noteTypeId.toField(), ...payload.privateNoteValues]; decrypted.push({ plaintext, txHash: scopedLog.txHash, contractAddress: payload.contractAddress }); } diff --git a/yarn-project/pxe/src/pxe_data_provider/pxe_data_provider.test.ts b/yarn-project/pxe/src/pxe_data_provider/pxe_data_provider.test.ts index e491e2132fb..6950118152e 100644 --- a/yarn-project/pxe/src/pxe_data_provider/pxe_data_provider.test.ts +++ b/yarn-project/pxe/src/pxe_data_provider/pxe_data_provider.test.ts @@ -76,10 +76,9 @@ class MockNoteRequest { } get snippetOfNoteDao() { - const payload = L1NotePayload.fromIncomingBodyPlaintextContractAndPublicValues( + const payload = L1NotePayload.fromIncomingBodyPlaintextContract( this.logPayload.incomingBodyPlaintext, this.logPayload.contractAddress, - [], )!; return { note: new Note(payload.privateNoteValues), diff --git a/yarn-project/stdlib/src/logs/l1_payload/l1_note_payload.ts b/yarn-project/stdlib/src/logs/l1_payload/l1_note_payload.ts index ab7d86331b7..4c9ab923d33 100644 --- a/yarn-project/stdlib/src/logs/l1_payload/l1_note_payload.ts +++ b/yarn-project/stdlib/src/logs/l1_payload/l1_note_payload.ts @@ -6,7 +6,6 @@ import { NoteSelector } from '../../abi/note_selector.js'; import { AztecAddress } from '../../aztec-address/index.js'; import { Vector } from '../../types/index.js'; import type { PrivateLog } from '../private_log.js'; -import { PublicLog } from '../public_log.js'; import { EncryptedLogPayload } from './encrypted_log_payload.js'; /** @@ -30,22 +29,13 @@ export class L1NotePayload { public noteTypeId: NoteSelector, /** * Note values delivered encrypted. - * @dev Note that to recreate the correct note we need to merge privateNoteValues and publicNoteValues. To do that - * we need access to the contract ABI (that is done in the NoteProcessor). */ public privateNoteValues: Fr[], - /** - * Note values delivered in plaintext. - * @dev Note that to recreate the correct note we need to merge privateNoteValues and publicNoteValues. To do that - * we need access to the contract ABI (that is done in the NoteProcessor). - */ - public publicNoteValues: Fr[], ) {} - static fromIncomingBodyPlaintextContractAndPublicValues( + static fromIncomingBodyPlaintextContract( plaintext: Buffer, contractAddress: AztecAddress, - publicNoteValues: Fr[], ): L1NotePayload | undefined { try { const reader = BufferReader.asReader(plaintext); @@ -56,7 +46,7 @@ export class L1NotePayload { const privateNoteValues = fields.slice(2); - return new L1NotePayload(contractAddress, storageSlot, noteTypeId, privateNoteValues, publicNoteValues); + return new L1NotePayload(contractAddress, storageSlot, noteTypeId, privateNoteValues); } catch (e) { return undefined; } @@ -68,29 +58,7 @@ export class L1NotePayload { return undefined; } - return this.fromIncomingBodyPlaintextContractAndPublicValues( - decryptedLog.incomingBodyPlaintext, - decryptedLog.contractAddress, - /* publicValues */ [], - ); - } - - static async decryptAsIncomingFromPublic(log: PublicLog, sk: Fq): Promise { - const { privateValues, publicValues } = parseLogFromPublic(log); - if (!privateValues) { - return undefined; - } - - const decryptedLog = await EncryptedLogPayload.decryptAsIncoming(privateValues, sk); - if (!decryptedLog) { - return undefined; - } - - return this.fromIncomingBodyPlaintextContractAndPublicValues( - decryptedLog.incomingBodyPlaintext, - decryptedLog.contractAddress, - publicValues, - ); + return this.fromIncomingBodyPlaintextContract(decryptedLog.incomingBodyPlaintext, decryptedLog.contractAddress); } /** @@ -111,15 +79,11 @@ export class L1NotePayload { const numPrivateNoteValues = randomInt(2) + 1; const privateNoteValues = Array.from({ length: numPrivateNoteValues }, () => Fr.random()); - const numPublicNoteValues = randomInt(2) + 1; - const publicNoteValues = Array.from({ length: numPublicNoteValues }, () => Fr.random()); - return new L1NotePayload( contract ?? (await AztecAddress.random()), Fr.random(), NoteSelector.random(), privateNoteValues, - publicNoteValues, ); } @@ -128,8 +92,7 @@ export class L1NotePayload { this.contractAddress.equals(other.contractAddress) && this.storageSlot.equals(other.storageSlot) && this.noteTypeId.equals(other.noteTypeId) && - this.privateNoteValues.every((value, index) => value.equals(other.privateNoteValues[index])) && - this.publicNoteValues.every((value, index) => value.equals(other.publicNoteValues[index])) + this.privateNoteValues.every((value, index) => value.equals(other.privateNoteValues[index])) ); } @@ -139,7 +102,6 @@ export class L1NotePayload { this.storageSlot, this.noteTypeId, new Vector(this.privateNoteValues), - new Vector(this.publicNoteValues), ); } @@ -150,33 +112,6 @@ export class L1NotePayload { reader.readObject(Fr), reader.readObject(NoteSelector), reader.readVector(Fr), - reader.readVector(Fr), ); } } - -/** - * Parse the given log into an array of public values and an encrypted log. - * - * @param log - Log to be parsed. - * @returns An object containing the public values, encrypted log, and ciphertext length. - */ -function parseLogFromPublic(log: PublicLog) { - // Extract lengths from the log - // See aztec_nr/aztec/src/macros/note/mod.nr to see how the "finalization_log" is encoded. - // Each length is stored in 2 bytes with a 0 separator byte between them: - // [ publicLen[0], publicLen[1], 0, privateLen[0], privateLen[1] ] - // Search the codebase for "disgusting encoding" to see other hardcoded instances of this encoding, that you might need to change if you ever find yourself here. - const lengths = log.log[0].toBuffer().subarray(-5); - const publicValuesLength = lengths.readUint16BE(); - const privateValuesLength = lengths.readUint16BE(3); - - // Now we get the fields corresponding to the values generated from private. - // Note: +1 for the length values in position 0 - const privateValues = log.log.slice(1, 1 + privateValuesLength); - - // At last we load the public values - const publicValues = log.log.slice(1 + privateValuesLength, 1 + privateValuesLength + publicValuesLength); - - return { publicValues, privateValues }; -} diff --git a/yarn-project/txe/src/node/txe_node.ts b/yarn-project/txe/src/node/txe_node.ts index 0e446b3d6ad..a044059b394 100644 --- a/yarn-project/txe/src/node/txe_node.ts +++ b/yarn-project/txe/src/node/txe_node.ts @@ -170,13 +170,15 @@ export class TXENode implements AztecNode { } /** - * Adds note logs to the txe node, given a block - * @param blockNumber - The block number at which to add the note logs. - * @param privateLogs - The privateLogs that contain the note logs to be added. + * Adds private logs to the txe node, given a block + * @param blockNumber - The block number at which to add the private logs. + * @param privateLogs - The privateLogs that contain the private logs to be added. */ - addNoteLogsByTags(blockNumber: number, privateLogs: PrivateLog[]) { + addPrivateLogsByTags(blockNumber: number, privateLogs: PrivateLog[]) { privateLogs.forEach(log => { const tag = log.fields[0]; + this.#logger.verbose(`Found private log with tag ${tag.toString()} in block ${this.getBlockNumber()}`); + const currentLogs = this.#logsByTags.get(tag.toString()) ?? []; const scopedLog = new TxScopedL2Log( new TxHash(new Fr(blockNumber)), @@ -189,7 +191,6 @@ export class TXENode implements AztecNode { this.#logsByTags.set(tag.toString(), currentLogs); }); - // TODO: DISTINGUISH BETWEEN EVENT LOGS AND NOTE LOGS ? this.#noteIndex += privateLogs.length; } @@ -200,30 +201,8 @@ export class TXENode implements AztecNode { */ addPublicLogsByTags(blockNumber: number, publicLogs: PublicLog[]) { publicLogs.forEach(log => { - // Check that each log stores 3 lengths in its first field. If not, it's not a tagged log: - const firstFieldBuf = log.log[0].toBuffer(); - // See macros/note/mod/ and see how finalization_log[0] is constructed, to understand this monstrosity. (It wasn't me). - // Search the codebase for "disgusting encoding" to see other hardcoded instances of this encoding, that you might need to change if you ever find yourself here. - if (!firstFieldBuf.subarray(0, 27).equals(Buffer.alloc(27)) || firstFieldBuf[29] !== 0) { - // See parseLogFromPublic - the first field of a tagged log is 5 bytes structured: - // [ publicLen[0], publicLen[1], 0, privateLen[0], privateLen[1]] - this.#logger.warn(`Skipping public log with invalid first field: ${log.log[0]}`); - return; - } - // Check that the length values line up with the log contents - const publicValuesLength = firstFieldBuf.subarray(-5).readUint16BE(); - const privateValuesLength = firstFieldBuf.subarray(-5).readUint16BE(3); - // Add 1 for the first field holding lengths - const totalLogLength = 1 + publicValuesLength + privateValuesLength; - // Note that zeroes can be valid log values, so we can only assert that we do not go over the given length - if (totalLogLength > PUBLIC_LOG_DATA_SIZE_IN_FIELDS || log.log.slice(totalLogLength).find(f => !f.isZero())) { - this.#logger.warn(`Skipping invalid tagged public log with first field: ${log.log[0]}`); - return; - } - // The first elt stores lengths => tag is in fields[1] - const tag = log.log[1]; - - this.#logger.verbose(`Found tagged public log with tag ${tag.toString()} in block ${this.getBlockNumber()}`); + const tag = log.log[0]; + this.#logger.verbose(`Found public log with tag ${tag.toString()} in block ${this.getBlockNumber()}`); const currentLogs = this.#logsByTags.get(tag.toString()) ?? []; const scopedLog = new TxScopedL2Log( @@ -245,9 +224,9 @@ export class TXENode implements AztecNode { array implies no logs match that tag. */ getLogsByTags(tags: Fr[]): Promise { - const noteLogs = tags.map(tag => this.#logsByTags.get(tag.toString()) ?? []); + const logs = tags.map(tag => this.#logsByTags.get(tag.toString()) ?? []); - return Promise.resolve(noteLogs); + return Promise.resolve(logs); } /** diff --git a/yarn-project/txe/src/oracle/txe_oracle.ts b/yarn-project/txe/src/oracle/txe_oracle.ts index eb6a44a7733..b66b4a7b175 100644 --- a/yarn-project/txe/src/oracle/txe_oracle.ts +++ b/yarn-project/txe/src/oracle/txe_oracle.ts @@ -720,7 +720,7 @@ export class TXE implements TypedOracle { await this.node.setTxEffect(blockNumber, new TxHash(new Fr(blockNumber)), txEffect); this.node.setNullifiersIndexesWithBlock(blockNumber, txEffect.nullifiers); - this.node.addNoteLogsByTags(this.blockNumber, this.privateLogs); + this.node.addPrivateLogsByTags(this.blockNumber, this.privateLogs); this.node.addPublicLogsByTags(this.blockNumber, this.publicLogs); const stateReference = await fork.getStateReference(); From 3cd447b6a42f3588a997287b430f8d509b4bc924 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Nicol=C3=A1s=20Venturo?= Date: Fri, 28 Feb 2025 21:52:40 +0000 Subject: [PATCH 02/22] Remove encoding --- .../default_aes128/mod.nr | 1 - .../default_aes128/partial_note.nr | 169 ------------------ 2 files changed, 170 deletions(-) delete mode 100644 noir-projects/aztec-nr/aztec/src/encrypted_logs/log_assembly_strategies/default_aes128/partial_note.nr diff --git a/noir-projects/aztec-nr/aztec/src/encrypted_logs/log_assembly_strategies/default_aes128/mod.nr b/noir-projects/aztec-nr/aztec/src/encrypted_logs/log_assembly_strategies/default_aes128/mod.nr index 57d87dfca27..fc06cf1fa36 100644 --- a/noir-projects/aztec-nr/aztec/src/encrypted_logs/log_assembly_strategies/default_aes128/mod.nr +++ b/noir-projects/aztec-nr/aztec/src/encrypted_logs/log_assembly_strategies/default_aes128/mod.nr @@ -1,3 +1,2 @@ pub mod event; pub mod note; -pub mod partial_note; // TEMPORARY! diff --git a/noir-projects/aztec-nr/aztec/src/encrypted_logs/log_assembly_strategies/default_aes128/partial_note.nr b/noir-projects/aztec-nr/aztec/src/encrypted_logs/log_assembly_strategies/default_aes128/partial_note.nr deleted file mode 100644 index 74fcd4b140c..00000000000 --- a/noir-projects/aztec-nr/aztec/src/encrypted_logs/log_assembly_strategies/default_aes128/partial_note.nr +++ /dev/null @@ -1,169 +0,0 @@ -// THIS FILE WILL GO AWAY WHEN WE REFACTOR PARTIAL NOTES, SO I DON'T FEEL TOO -// GUILTY ABOUT THE OBVIOUS CODE DUPLICATION VS note.nr & event.nr. - -use crate::{ - encrypted_logs::{ - encrypt::aes128::derive_aes_symmetric_key_and_iv_from_ecdh_shared_secret_using_sha256, - log_assembly_strategies::default_aes128::note::{ - get_arr_of_size__log_bytes__from_PT, get_arr_of_size__log_bytes_padding__from_PT, - HEADER_CIPHERTEXT_SIZE_IN_BYTES, - }, - }, - keys::{ - ecdh_shared_secret::derive_ecdh_shared_secret_using_aztec_address, - ephemeral::generate_ephemeral_key_pair, - }, - oracle::notes::{get_app_tag_as_sender, increment_app_tagging_secret_index_as_sender}, - utils::{bytes::{be_bytes_31_to_fields, get_random_bytes}, point::get_sign_of_point}, -}; -use dep::protocol_types::{address::{aztec_address::ToField, AztecAddress}, hash::poseidon2_hash}; -use std::aes128::aes128_encrypt; - -pub fn compute_partial_public_log_payload( - contract_address: AztecAddress, - plaintext: [u8; N], - recipient: AztecAddress, - sender: AztecAddress, -) -> [Field; M] { - // ***************************************************************************** - // Compute the shared secret - // ***************************************************************************** - - let (eph_sk, eph_pk) = generate_ephemeral_key_pair(); - - let eph_pk_sign_byte: u8 = get_sign_of_point(eph_pk) as u8; - - let ciphertext_shared_secret = derive_ecdh_shared_secret_using_aztec_address(eph_sk, recipient); // not to be confused with the tagging shared secret - - // TODO: also use this shared secret for deriving note randomness. - - // ***************************************************************************** - // Prepend/append extra bytes - // ***************************************************************************** - - // "Proper" meaning the main meaty stuff that we care about. - let proper_plaintext: [u8; N] = plaintext; - let final_plaintext = proper_plaintext; - - // ***************************************************************************** - // Convert the plaintext into whatever format the encryption function expects - // ***************************************************************************** - - // Already done for this strategy: AES expects bytes. - - // ***************************************************************************** - // Encrypt - // ***************************************************************************** - - let (sym_key, iv) = derive_aes_symmetric_key_and_iv_from_ecdh_shared_secret_using_sha256( - ciphertext_shared_secret, - ); - - let ciphertext_bytes = aes128_encrypt(final_plaintext, iv, sym_key); - - assert(ciphertext_bytes.len() == 16 * (1 + (2 + N) / 16)); - - // ***************************************************************************** - // Compute the header ciphertext - // ***************************************************************************** - - let contract_address_bytes = contract_address.to_field().to_be_bytes::<32>(); - - let mut header_plaintext: [u8; 32 + 2] = [0; 32 + 2]; - for i in 0..32 { - header_plaintext[i] = contract_address_bytes[i]; - } - let offset = contract_address_bytes.len(); - - let ciphertext_bytes_length = ciphertext_bytes.len(); - header_plaintext[offset] = (ciphertext_bytes_length >> 8) as u8; - header_plaintext[offset + 1] = ciphertext_bytes_length as u8; - - // TODO: this is insecure and wasteful: - // "Insecure", because the esk shouldn't be used twice (once for the header, - // and again for the proper ciphertext) (at least, I never got the - // "go ahead" that this would be safe, unfortunately). - // "Wasteful", because the exact same computation is happening further down. - // I'm leaving that 2nd computation where it is, because this 1st computation - // will be imminently deleted, when the header logic is deleted. - let (sym_key, iv) = derive_aes_symmetric_key_and_iv_from_ecdh_shared_secret_using_sha256( - ciphertext_shared_secret, - ); - - // Note: the aes128_encrypt builtin fn automatically appends bytes to the - // input, according to pkcs#7; hence why the output `header_ciphertext_bytes` is 16 - // bytes larger than the input in this case. - let header_ciphertext_bytes = aes128_encrypt(header_plaintext, iv, sym_key); - // I recall that converting a slice to an array incurs constraints, so I'll check the length this way instead: - assert(header_ciphertext_bytes.len() == HEADER_CIPHERTEXT_SIZE_IN_BYTES); - - // ***************************************************************************** - // Prepend / append more bytes of data to the ciphertext, before converting back - // to fields. - // ***************************************************************************** - - let mut log_bytes_padding_to_mult_31 = get_arr_of_size__log_bytes_padding__from_PT::<2 + N>(); - log_bytes_padding_to_mult_31 = unsafe { get_random_bytes() }; - - let mut log_bytes = get_arr_of_size__log_bytes__from_PT::<2 + N>(); - - log_bytes[0] = eph_pk_sign_byte; - let mut offset = 1; - for i in 0..header_ciphertext_bytes.len() { - log_bytes[offset + i] = header_ciphertext_bytes[i]; - } - offset += header_ciphertext_bytes.len(); - - for i in 0..ciphertext_bytes.len() { - log_bytes[offset + i] = ciphertext_bytes[i]; - } - offset += ciphertext_bytes.len(); - - for i in 0..log_bytes_padding_to_mult_31.len() { - log_bytes[offset + i] = log_bytes_padding_to_mult_31[i]; - } - - // ***************************************************************************** - // Convert bytes back to fields - // ***************************************************************************** - - let log_bytes_as_fields = be_bytes_31_to_fields(log_bytes); - - // ***************************************************************************** - // Prepend / append fields, to create the final log - // ***************************************************************************** - - // We don't add any extra random padding. - // Whilst we do this in note.nr, we won't do it for this partial_note log, because it's going to get stored in public, and so: - // - The nature of the tx is going to be leaked. - // - We therefore don't care if it's padded to obscure the length of the actual ciphertext. - // Note: partial logs are going to be greatly refactored, soon. - - // We assume that the sender wants for the recipient to find the tagged note, and therefore that they will cooperate - // and use the correct tag. Usage of a bad tag will result in the recipient not being able to find the note - // automatically. - let tag = unsafe { get_app_tag_as_sender(sender, recipient) }; - increment_app_tagging_secret_index_as_sender(sender, recipient); - - // Silo the tag with contract address. - // This is done by the kernel circuit to the private logs, but since the partial log will be finalized and emitted - // in public as unencrypted log, its tag is not siloed at the moment. - // To avoid querying logs using two types of tags, we silo the tag manually here. - // TODO(#10273) This should be done by the AVM when it's processing the raw logs instead of their hashes. - let siloed_tag = poseidon2_hash([contract_address.to_field(), tag]); - - // Temporary hack so that the partial public log remains the same format. - // It should return field array and make the tag the first field as compute_private_log_payload does. - - let mut final_log: [Field; M] = [0; M]; - - final_log[0] = siloed_tag; - final_log[1] = eph_pk.x; - - let mut offset = 2; - for i in 0..log_bytes_as_fields.len() { - final_log[offset + i] = log_bytes_as_fields[i]; - } - - final_log -} From 6570d6a5b2872949df2bbff45b8ddae46ea8b352 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Nicol=C3=A1s=20Venturo?= Date: Sat, 1 Mar 2025 04:15:49 +0000 Subject: [PATCH 03/22] Remove unused file --- noir-projects/aztec-nr/aztec/src/note/mod.nr | 1 - noir-projects/aztec-nr/aztec/src/note/note_hash.nr | 11 ----------- 2 files changed, 12 deletions(-) delete mode 100644 noir-projects/aztec-nr/aztec/src/note/note_hash.nr diff --git a/noir-projects/aztec-nr/aztec/src/note/mod.nr b/noir-projects/aztec-nr/aztec/src/note/mod.nr index ef0b9d43cd1..ab9c50a007f 100644 --- a/noir-projects/aztec-nr/aztec/src/note/mod.nr +++ b/noir-projects/aztec-nr/aztec/src/note/mod.nr @@ -9,4 +9,3 @@ pub mod retrieved_note; pub mod utils; pub mod note_emission; pub mod note_field; -pub mod note_hash; diff --git a/noir-projects/aztec-nr/aztec/src/note/note_hash.nr b/noir-projects/aztec-nr/aztec/src/note/note_hash.nr deleted file mode 100644 index 93de41a82d6..00000000000 --- a/noir-projects/aztec-nr/aztec/src/note/note_hash.nr +++ /dev/null @@ -1,11 +0,0 @@ -use protocol_types::{ - constants::GENERATOR_INDEX__NOTE_HASH, hash::poseidon2_hash_with_separator, - utils::arrays::array_concat, -}; - -pub fn note_hash(values: [Field; N], storage_slot: Field) -> Field { - poseidon2_hash_with_separator( - array_concat(values, [storage_slot]), - GENERATOR_INDEX__NOTE_HASH, - ) -} From 3cb58e7bd89673501ef4c3175bf3269bf5720108 Mon Sep 17 00:00:00 2001 From: benesjan Date: Mon, 3 Mar 2025 14:57:47 +0000 Subject: [PATCH 04/22] copying get_trait_impl_method change to protocol types --- .../noir-protocol-circuits/crates/types/src/meta/mod.nr | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/meta/mod.nr b/noir-projects/noir-protocol-circuits/crates/types/src/meta/mod.nr index a06be877c54..78eee73eaed 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/meta/mod.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/meta/mod.nr @@ -16,7 +16,9 @@ comptime fn get_trait_impl_method( target_method: Quoted, ) -> TypedExpr { let trait_constraint = target_trait.as_trait_constraint(); - typ.get_trait_impl(trait_constraint).unwrap().methods().filter(|m| m.name() == target_method)[0] + typ.get_trait_impl(trait_constraint).expect(f"Type does not implement trait").methods().filter( + |m| m.name() == target_method, + )[0] .as_typed_expr() } From 117c667f3c682039afb983a174afbff1d73c3b4d Mon Sep 17 00:00:00 2001 From: benesjan Date: Mon, 3 Mar 2025 15:09:30 +0000 Subject: [PATCH 05/22] linking issue in docs --- .../concepts/advanced/storage/partial_notes.md | 15 +++++++-------- 1 file changed, 7 insertions(+), 8 deletions(-) diff --git a/docs/docs/aztec/concepts/advanced/storage/partial_notes.md b/docs/docs/aztec/concepts/advanced/storage/partial_notes.md index 357a0033e9b..91a88a223e4 100644 --- a/docs/docs/aztec/concepts/advanced/storage/partial_notes.md +++ b/docs/docs/aztec/concepts/advanced/storage/partial_notes.md @@ -1,10 +1,15 @@ --- -title: Partial Notes +title: Partial Notes [OUTDATED DOCS] description: Describes how partial notes are used in Aztec tags: [notes, storage] sidebar_position: 4 --- +:::warning OUTDATED DOCUMENTATION +This documentation is outdated and may not reflect the current state of the Aztec protocol. This is to be updated when tackling [this issue](https://github.com/AztecProtocol/aztec-packages/issues/12414). +TODO(#12414): UPDATE THIS +::: + Partial notes are a concept that allows users to commit to an encrypted value, and allows a counterparty to update that value without knowing the specific details of the encrypted value. ## Use cases @@ -133,13 +138,7 @@ Then we just emit `P_a.x` and `P_b.x` as a note hashes, and we're done! ### Private Fee Payment Implementation -[`NoteInterface.nr`](https://github.com/AztecProtocol/aztec-packages/blob/#include_aztec_version/noir-projects/aztec-nr/aztec/src/note/note_interface.nr) implements `compute_note_hiding_point`, which takes a note and computes the point "hides" it. - -This is implemented by applying the `partial_note` attribute: - -#include_code UintNote noir-projects/aztec-nr/uint-note/src/uint_note.nr rust - -Those `G_x` are generators that generated [here](https://github.com/AztecProtocol/aztec-packages/blob/#include_aztec_version/noir-projects/noir-projects/aztec-nr/aztec/src/generators.nr). Anyone can use them for separating different fields in a "partial note". +TODO(#12414): `setup_refund` no longer exists. We can see the complete implementation of creating and completing partial notes in an Aztec contract in the `setup_refund` and `complete_refund` functions. From 1745041da1067a31f11b1e379ce51a8db5fc30b2 Mon Sep 17 00:00:00 2001 From: benesjan Date: Mon, 3 Mar 2025 15:22:09 +0000 Subject: [PATCH 06/22] fmt --- .../src/archiver/kv_archiver_store/log_store.ts | 2 +- .../memory_archiver_store/memory_archiver_store.ts | 7 +------ .../end-to-end/src/e2e_partial_notes.test.ts | 4 ++-- yarn-project/pxe/src/pxe_data_provider/index.ts | 9 +-------- yarn-project/txe/src/node/txe_node.ts | 13 ++++++------- 5 files changed, 11 insertions(+), 24 deletions(-) diff --git a/yarn-project/archiver/src/archiver/kv_archiver_store/log_store.ts b/yarn-project/archiver/src/archiver/kv_archiver_store/log_store.ts index be616670e0b..d4a4b789727 100644 --- a/yarn-project/archiver/src/archiver/kv_archiver_store/log_store.ts +++ b/yarn-project/archiver/src/archiver/kv_archiver_store/log_store.ts @@ -1,4 +1,4 @@ -import { INITIAL_L2_BLOCK_NUM, MAX_NOTE_HASHES_PER_TX, PUBLIC_LOG_DATA_SIZE_IN_FIELDS } from '@aztec/constants'; +import { INITIAL_L2_BLOCK_NUM, MAX_NOTE_HASHES_PER_TX } from '@aztec/constants'; import type { Fr } from '@aztec/foundation/fields'; import { createLogger } from '@aztec/foundation/log'; import { BufferReader, numToUInt32BE } from '@aztec/foundation/serialize'; diff --git a/yarn-project/archiver/src/archiver/memory_archiver_store/memory_archiver_store.ts b/yarn-project/archiver/src/archiver/memory_archiver_store/memory_archiver_store.ts index 3bec497c5a5..9fb11ce5252 100644 --- a/yarn-project/archiver/src/archiver/memory_archiver_store/memory_archiver_store.ts +++ b/yarn-project/archiver/src/archiver/memory_archiver_store/memory_archiver_store.ts @@ -1,9 +1,4 @@ -import { - INITIAL_L2_BLOCK_NUM, - MAX_NOTE_HASHES_PER_TX, - MAX_NULLIFIERS_PER_TX, - PUBLIC_LOG_DATA_SIZE_IN_FIELDS, -} from '@aztec/constants'; +import { INITIAL_L2_BLOCK_NUM, MAX_NOTE_HASHES_PER_TX, MAX_NULLIFIERS_PER_TX } from '@aztec/constants'; import { Fr } from '@aztec/foundation/fields'; import { createLogger } from '@aztec/foundation/log'; import { FunctionSelector } from '@aztec/stdlib/abi'; diff --git a/yarn-project/end-to-end/src/e2e_partial_notes.test.ts b/yarn-project/end-to-end/src/e2e_partial_notes.test.ts index 627b9867c77..5a749b45bd7 100644 --- a/yarn-project/end-to-end/src/e2e_partial_notes.test.ts +++ b/yarn-project/end-to-end/src/e2e_partial_notes.test.ts @@ -1,5 +1,5 @@ -import { type AccountWallet, type Logger } from '@aztec/aztec.js'; -import { type TokenContract } from '@aztec/noir-contracts.js/Token'; +import type { AccountWallet, Logger } from '@aztec/aztec.js'; +import type { TokenContract } from '@aztec/noir-contracts.js/Token'; import { jest } from '@jest/globals'; diff --git a/yarn-project/pxe/src/pxe_data_provider/index.ts b/yarn-project/pxe/src/pxe_data_provider/index.ts index cefcb1b9a1e..f83506bac13 100644 --- a/yarn-project/pxe/src/pxe_data_provider/index.ts +++ b/yarn-project/pxe/src/pxe_data_provider/index.ts @@ -28,14 +28,7 @@ import { computeUniqueNoteHash, siloNoteHash, siloNullifier } from '@aztec/stdli import type { AztecNode } from '@aztec/stdlib/interfaces/client'; import type { KeyValidationRequest } from '@aztec/stdlib/kernel'; import { computeAddressSecret, computeTaggingSecretPoint } from '@aztec/stdlib/keys'; -import { - IndexedTaggingSecret, - L1NotePayload, - LogWithTxData, - PrivateLog, - PublicLog, - TxScopedL2Log, -} from '@aztec/stdlib/logs'; +import { IndexedTaggingSecret, L1NotePayload, LogWithTxData, PrivateLog, TxScopedL2Log } from '@aztec/stdlib/logs'; import { getNonNullifiedL1ToL2MessageWitness } from '@aztec/stdlib/messaging'; import { Note, type NoteStatus } from '@aztec/stdlib/note'; import { MerkleTreeId, type NullifierMembershipWitness, PublicDataWitness } from '@aztec/stdlib/trees'; diff --git a/yarn-project/txe/src/node/txe_node.ts b/yarn-project/txe/src/node/txe_node.ts index a044059b394..53bcbb48739 100644 --- a/yarn-project/txe/src/node/txe_node.ts +++ b/yarn-project/txe/src/node/txe_node.ts @@ -1,10 +1,9 @@ -import { - type ARCHIVE_HEIGHT, - type L1_TO_L2_MSG_TREE_HEIGHT, - type NOTE_HASH_TREE_HEIGHT, - type NULLIFIER_TREE_HEIGHT, - type PUBLIC_DATA_TREE_HEIGHT, - PUBLIC_LOG_DATA_SIZE_IN_FIELDS, +import type { + ARCHIVE_HEIGHT, + L1_TO_L2_MSG_TREE_HEIGHT, + NOTE_HASH_TREE_HEIGHT, + NULLIFIER_TREE_HEIGHT, + PUBLIC_DATA_TREE_HEIGHT, } from '@aztec/constants'; import type { L1ContractAddresses } from '@aztec/ethereum'; import { poseidon2Hash } from '@aztec/foundation/crypto'; From bc2c80c5a94ec9ebee66616c44ef6c8e1a30208b Mon Sep 17 00:00:00 2001 From: benesjan Date: Mon, 3 Mar 2025 17:39:46 +0000 Subject: [PATCH 07/22] re-adding nullified note fix --- .../pxe/src/database/kv_pxe_database.ts | 34 +++++++++++++++---- 1 file changed, 27 insertions(+), 7 deletions(-) diff --git a/yarn-project/pxe/src/database/kv_pxe_database.ts b/yarn-project/pxe/src/database/kv_pxe_database.ts index 55b7a086b0a..afd850294ab 100644 --- a/yarn-project/pxe/src/database/kv_pxe_database.ts +++ b/yarn-project/pxe/src/database/kv_pxe_database.ts @@ -1,7 +1,7 @@ import { toBufferBE } from '@aztec/foundation/bigint-buffer'; import { Fr, type Point } from '@aztec/foundation/fields'; import { toArray } from '@aztec/foundation/iterable'; -import { type LogFn, createDebugOnlyLogger } from '@aztec/foundation/log'; +import { type Logger, createLogger } from '@aztec/foundation/log'; import type { AztecAsyncArray, AztecAsyncKVStore, @@ -9,8 +9,13 @@ import type { AztecAsyncMultiMap, AztecAsyncSingleton, } from '@aztec/kv-store'; -import { type ContractArtifact, FunctionSelector, FunctionType } from '@aztec/stdlib/abi'; -import { contractArtifactFromBuffer, contractArtifactToBuffer } from '@aztec/stdlib/abi'; +import { + type ContractArtifact, + FunctionSelector, + FunctionType, + contractArtifactFromBuffer, + contractArtifactToBuffer, +} from '@aztec/stdlib/abi'; import { AztecAddress } from '@aztec/stdlib/aztec-address'; import type { InBlock } from '@aztec/stdlib/block'; import { @@ -67,7 +72,7 @@ export class KVPxeDatabase implements PxeDatabase { // Arbitrary data stored by contracts. Key is computed as `${contractAddress}:${key}` #capsules: AztecAsyncMap; - debug: LogFn; + #logger: Logger; protected constructor(private db: AztecAsyncKVStore) { this.#db = db; @@ -108,7 +113,7 @@ export class KVPxeDatabase implements PxeDatabase { this.#capsules = db.openMap('capsules'); - this.debug = createDebugOnlyLogger('aztec:kv-pxe-database'); + this.#logger = createLogger('aztec:kv-pxe-database'); } public static async create(db: AztecAsyncKVStore): Promise { @@ -199,11 +204,26 @@ export class KVPxeDatabase implements PxeDatabase { return this.db.transactionAsync(async () => { for (const dao of notes) { + const noteIndex = toBufferBE(dao.index, 32).toString('hex'); + + // Check if note already exists in #notes and skip if it does + const existingNote = await this.#notes.getAsync(noteIndex); + if (existingNote) { + this.#logger.warn(`Note with index ${noteIndex} already exists in active notes. Skipping re-addition...`); + continue; + } + + // Check if note already exists in #nullifiedNotes and skip if it does + const existingNullifiedNote = await this.#nullifiedNotes.getAsync(noteIndex); + if (existingNullifiedNote) { + this.#logger.warn(`Note with index ${noteIndex} already exists in nullified notes. Skipping re-addition...`); + continue; + } + // store notes by their index in the notes hash tree // this provides the uniqueness we need to store individual notes // and should also return notes in the order that they were created. // Had we stored them by their nullifier, they would be returned in random order - const noteIndex = toBufferBE(dao.index, 32).toString('hex'); await this.#notes.set(noteIndex, dao.toBuffer()); await this.#notesToScope.set(noteIndex, scope.toString()); await this.#nullifierToNoteId.set(dao.siloedNullifier.toString(), noteIndex); @@ -624,7 +644,7 @@ export class KVPxeDatabase implements PxeDatabase { async loadCapsule(contractAddress: AztecAddress, slot: Fr): Promise { const dataBuffer = await this.#capsules.getAsync(dbSlotToKey(contractAddress, slot)); if (!dataBuffer) { - this.debug(`Data not found for contract ${contractAddress.toString()} and slot ${slot.toString()}`); + this.#logger.debug(`Data not found for contract ${contractAddress.toString()} and slot ${slot.toString()}`); return null; } const capsule: Fr[] = []; From eb1b97c4814bbf0b3a5b8d0e4128ca2a969f6c16 Mon Sep 17 00:00:00 2001 From: benesjan Date: Mon, 3 Mar 2025 19:16:19 +0000 Subject: [PATCH 08/22] linking issue --- .../simulator/src/private/unconstrained_execution_oracle.ts | 3 ++- yarn-project/stdlib/src/tx/capsule.ts | 2 ++ 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/yarn-project/simulator/src/private/unconstrained_execution_oracle.ts b/yarn-project/simulator/src/private/unconstrained_execution_oracle.ts index 770c4a703b2..2bd39ca6f18 100644 --- a/yarn-project/simulator/src/private/unconstrained_execution_oracle.ts +++ b/yarn-project/simulator/src/private/unconstrained_execution_oracle.ts @@ -23,7 +23,7 @@ export class UnconstrainedExecutionOracle extends TypedOracle { protected readonly contractAddress: AztecAddress, /** List of transient auth witnesses to be used during this simulation */ protected readonly authWitnesses: AuthWitness[], - protected readonly capsules: Capsule[], + protected readonly capsules: Capsule[], // TODO(#12425): Rename to transientCapsules protected readonly executionDataProvider: ExecutionDataProvider, protected log = createLogger('simulator:client_view_context'), protected readonly scopes?: AztecAddress[], @@ -339,6 +339,7 @@ export class UnconstrainedExecutionOracle extends TypedOracle { throw new Error(`Contract ${contractAddress} is not allowed to access ${this.contractAddress}'s PXE DB`); } return ( + // TODO(#12425): On the following line, the pertinent capsule gets overshadowed by the transient one. Tackle this. this.capsules.find(c => c.contractAddress.equals(contractAddress) && c.storageSlot.equals(slot))?.data ?? (await this.executionDataProvider.loadCapsule(this.contractAddress, slot)) ); diff --git a/yarn-project/stdlib/src/tx/capsule.ts b/yarn-project/stdlib/src/tx/capsule.ts index 3613d9a423d..a09d4cfe8be 100644 --- a/yarn-project/stdlib/src/tx/capsule.ts +++ b/yarn-project/stdlib/src/tx/capsule.ts @@ -8,6 +8,8 @@ import { Vector } from '../types/shared.js'; /** * Read-only data that is passed to the contract through an oracle during a transaction execution. + * TODO(#12425): Check whether this is always used to represent a transient capsule and if so, rename to + * TransientCapsule. */ export class Capsule { constructor( From a8db3832bb858c7f018d278d6726b00bc069849f Mon Sep 17 00:00:00 2001 From: benesjan Date: Tue, 4 Mar 2025 17:08:05 +0000 Subject: [PATCH 09/22] typos --- .../aztec-nr/aztec/src/discovery/private_logs.nr | 4 ++-- .../aztec-nr/aztec/src/oracle/note_discovery.nr | 2 +- noir-projects/aztec-nr/uint-note/src/uint_note.nr | 4 ++-- yarn-project/pxe/src/pxe_data_provider/index.ts | 11 +++++++---- 4 files changed, 12 insertions(+), 9 deletions(-) diff --git a/noir-projects/aztec-nr/aztec/src/discovery/private_logs.nr b/noir-projects/aztec-nr/aztec/src/discovery/private_logs.nr index 52a28eb155b..99ac9dbc33b 100644 --- a/noir-projects/aztec-nr/aztec/src/discovery/private_logs.nr +++ b/noir-projects/aztec-nr/aztec/src/discovery/private_logs.nr @@ -108,8 +108,8 @@ unconstrained fn destructure_log_plaintext( // combined type ID. We can do this because the note type ID is only 7 bits long, and so use an 8th bit to // distinguish private note logs and partial note logs. // This abuses the fact that the encoding of both of these logs is extremely similar, and will need improving and - // more formalization once we introduce other disimilar log types, such as events. Ideally we'd be able to leverage - // enums and tagged unions to achieve this goal. + // more formalization once we introduce other dissimilar log types, such as events. Ideally we'd be able to + // leverage enums and tagged unions to achieve this goal. let combined_type_id = log_plaintext.get(1); let note_type_id = ((combined_type_id as u64) % 128) as Field; let log_type_id = ((combined_type_id as u64) / 128) as Field; diff --git a/noir-projects/aztec-nr/aztec/src/oracle/note_discovery.nr b/noir-projects/aztec-nr/aztec/src/oracle/note_discovery.nr index 35d34e3ae11..75bc0a93f86 100644 --- a/noir-projects/aztec-nr/aztec/src/oracle/note_discovery.nr +++ b/noir-projects/aztec-nr/aztec/src/oracle/note_discovery.nr @@ -64,7 +64,7 @@ pub struct LogWithTxData { /// Fetches a log from the node that has the corresponding `tag`. The log can be either a public or a private log, and /// the tag is the first field in the log's content. Returns `Option::none` if no such log exists. Throws if more than /// one log with that tag exists. -/// Public logs have an extra field included at the beginning with the address of the contract that emtitted them. +/// Public logs have an extra field included at the beginning with the address of the contract that emitted them. // TODO(#11627): handle multiple logs with the same tag. // TODO(#10273): improve contract siloing of logs, don't introduce an extra field. pub unconstrained fn get_log_by_tag(tag: Field) -> Option { diff --git a/noir-projects/aztec-nr/uint-note/src/uint_note.nr b/noir-projects/aztec-nr/uint-note/src/uint_note.nr index a1d8c826207..7f273717b2e 100644 --- a/noir-projects/aztec-nr/uint-note/src/uint_note.nr +++ b/noir-projects/aztec-nr/uint-note/src/uint_note.nr @@ -112,7 +112,7 @@ impl UintNote { /// Each partial note should only be used once, since otherwise multiple notes would be linked together and known to /// belong to the same owner. /// - /// As part of the partial note cration process, a log will be sent to `recipient` from `sender` so that they can + /// As part of the partial note creation process, a log will be sent to `recipient` from `sender` so that they can /// discover the note. `recipient` will typically be the same as `owner`. pub fn partial( owner: AztecAddress, @@ -307,7 +307,7 @@ mod test { partial_note.compute_note_completion_log(value)[0], ); - // Then we exctract all fields except the first of both logs (i.e. the public log tag), and combine them to + // Then we extract all fields except the first of both logs (i.e. the public log tag), and combine them to // produce the note's packed representation. This requires that the members of the intermediate structs are in // the same order as in UintNote. let private_log_without_public_tag: [_; 2] = subarray(private_log_content.pack(), 1); diff --git a/yarn-project/pxe/src/pxe_data_provider/index.ts b/yarn-project/pxe/src/pxe_data_provider/index.ts index f83506bac13..8cc9769b807 100644 --- a/yarn-project/pxe/src/pxe_data_provider/index.ts +++ b/yarn-project/pxe/src/pxe_data_provider/index.ts @@ -360,6 +360,7 @@ export class PXEDataProvider implements ExecutionDataProvider { }), ); const indexes = await this.db.getTaggingSecretsIndexesAsRecipient(appTaggingSecrets); + console.log('indexes for recipient', recipient, 'are', indexes); return appTaggingSecrets.map((secret, i) => new IndexedTaggingSecret(secret, indexes[i])); } @@ -441,6 +442,7 @@ export class PXEDataProvider implements ExecutionDataProvider { maxBlockNumber: number, scopes?: AztecAddress[], ): Promise> { + console.log(`Syncing tagged logs for ${contractAddress} at block ${maxBlockNumber} with scopes ${scopes}`); this.log.verbose('Searching for tagged logs', { contract: contractAddress }); // Ideally this algorithm would be implemented in noir, exposing its building blocks as oracles. @@ -499,20 +501,20 @@ export class PXEDataProvider implements ExecutionDataProvider { logsByTags.forEach((logsByTag, logIndex) => { if (logsByTag.length > 0) { // Discard public logs - const filteredLogsbyTag = logsByTag.filter(l => !l.isFromPublic); - if (filteredLogsbyTag.length < logsByTag.length) { + const filteredLogsByTag = logsByTag.filter(l => !l.isFromPublic); + if (filteredLogsByTag.length < logsByTag.length) { this.log.warn(`Discarded ${logsByTag.filter(l => l.isFromPublic).length} public logs with matching tags`); } // The logs for the given tag exist so we store them for later processing - logsForRecipient.push(...filteredLogsbyTag); + logsForRecipient.push(...filteredLogsByTag); // We retrieve the indexed tagging secret corresponding to the log as I need that to evaluate whether // a new largest index have been found. const secretCorrespondingToLog = secretsForTheWholeWindow[logIndex]; const initialIndex = initialIndexesMap[secretCorrespondingToLog.appTaggingSecret.toString()]; - this.log.debug(`Found ${filteredLogsbyTag.length} logs as recipient ${recipient}`, { + this.log.debug(`Found ${filteredLogsByTag.length} logs as recipient ${recipient}`, { recipient, secret: secretCorrespondingToLog.appTaggingSecret, contractName, @@ -634,6 +636,7 @@ export class PXEDataProvider implements ExecutionDataProvider { recipient: AztecAddress, simulator?: AcirSimulator, ): Promise { + console.log('Processing tagged logs'); const decryptedLogs = await this.#decryptTaggedLogs(logs, recipient); // We've produced the full NoteDao, which we'd be able to simply insert into the database. However, this is From def77d4a852e5b91fd1f7e12619b8530a5a8783f Mon Sep 17 00:00:00 2001 From: benesjan Date: Tue, 4 Mar 2025 18:54:47 +0000 Subject: [PATCH 10/22] more helpful logs + typo fix --- .../aztec/src/discovery/partial_notes.nr | 16 +++++++++++----- yarn-project/pxe/src/pxe_data_provider/index.ts | 7 ++----- 2 files changed, 13 insertions(+), 10 deletions(-) diff --git a/noir-projects/aztec-nr/aztec/src/discovery/partial_notes.nr b/noir-projects/aztec-nr/aztec/src/discovery/partial_notes.nr index bddf62ea668..dab4ee952a4 100644 --- a/noir-projects/aztec-nr/aztec/src/discovery/partial_notes.nr +++ b/noir-projects/aztec-nr/aztec/src/discovery/partial_notes.nr @@ -67,15 +67,18 @@ pub unconstrained fn fetch_and_process_public_partial_note_completion_logs( let maybe_log = get_log_by_tag(pending_partial_note.note_completion_log_tag); if maybe_log.is_none() { debug_log_format( - "Found no completion logs for partial note #{}", - [(*i) as Field], + "Found no completion logs for partial note with tag {}", + [pending_partial_note.note_completion_log_tag], ); *i += 1 as u32; // Note that we're not removing the pending partial note from the PXE DB, so we will continue searching // for this tagged log when performing note discovery in the future until we either find it or the entry // is somehow removed from the PXE DB. } else { - debug_log_format("Completion log found for partial note #{}", [(*i) as Field]); + debug_log_format( + "Completion log found for partial note with tag {}", + [pending_partial_note.note_completion_log_tag], + ); let log = maybe_log.unwrap(); // Public logs have an extra field at the beginning with the contract address, which we use to verify @@ -108,8 +111,8 @@ pub unconstrained fn fetch_and_process_public_partial_note_completion_logs( ); debug_log_format( - "Discovered {0} notes for partial note {1}", - [discovered_notes.len() as Field, (*i) as Field], + "Discovered {0} notes for partial note with tag {1}", + [discovered_notes.len() as Field, pending_partial_note.note_completion_log_tag], ); array::for_each_in_bounded_vec( @@ -138,6 +141,9 @@ pub unconstrained fn fetch_and_process_public_partial_note_completion_logs( // TODO(#11627): only remove the pending entry if we actually process a log that results in the note // being completed. pending_partial_notes.remove(*i); + + // We don't increment `i` here, because CapsuleArray is contiguous and its `remove(...)` function + // shifts the elements to the left if the removed element is not the last element. } }, ); diff --git a/yarn-project/pxe/src/pxe_data_provider/index.ts b/yarn-project/pxe/src/pxe_data_provider/index.ts index 8cc9769b807..2a1a31fa8e9 100644 --- a/yarn-project/pxe/src/pxe_data_provider/index.ts +++ b/yarn-project/pxe/src/pxe_data_provider/index.ts @@ -21,7 +21,7 @@ import { encodeArguments, getFunctionArtifact, } from '@aztec/stdlib/abi'; -import type { AztecAddress } from '@aztec/stdlib/aztec-address'; +import { AztecAddress } from '@aztec/stdlib/aztec-address'; import type { InBlock, L2Block, L2BlockNumber } from '@aztec/stdlib/block'; import type { CompleteAddress, ContractInstance } from '@aztec/stdlib/contract'; import { computeUniqueNoteHash, siloNoteHash, siloNullifier } from '@aztec/stdlib/hash'; @@ -360,7 +360,6 @@ export class PXEDataProvider implements ExecutionDataProvider { }), ); const indexes = await this.db.getTaggingSecretsIndexesAsRecipient(appTaggingSecrets); - console.log('indexes for recipient', recipient, 'are', indexes); return appTaggingSecrets.map((secret, i) => new IndexedTaggingSecret(secret, indexes[i])); } @@ -442,7 +441,6 @@ export class PXEDataProvider implements ExecutionDataProvider { maxBlockNumber: number, scopes?: AztecAddress[], ): Promise> { - console.log(`Syncing tagged logs for ${contractAddress} at block ${maxBlockNumber} with scopes ${scopes}`); this.log.verbose('Searching for tagged logs', { contract: contractAddress }); // Ideally this algorithm would be implemented in noir, exposing its building blocks as oracles. @@ -454,7 +452,7 @@ export class PXEDataProvider implements ExecutionDataProvider { const recipients = scopes ? scopes : await this.keyStore.getAccounts(); // A map of logs going from recipient address to logs. Note that the logs might have been processed before // due to us having a sliding window that "looks back" for logs as well. (We look back as there is no guarantee - // that a logs will be received ordered by a given tax index and that the tags won't be reused). + // that a logs will be received ordered by a given tag index and that the tags won't be reused). const logsMap = new Map(); const contractName = await this.contractDataProvider.getDebugContractName(contractAddress); for (const recipient of recipients) { @@ -636,7 +634,6 @@ export class PXEDataProvider implements ExecutionDataProvider { recipient: AztecAddress, simulator?: AcirSimulator, ): Promise { - console.log('Processing tagged logs'); const decryptedLogs = await this.#decryptTaggedLogs(logs, recipient); // We've produced the full NoteDao, which we'd be able to simply insert into the database. However, this is From 43a43b410c771adca14ba6eb862498d8bbc9d8ed Mon Sep 17 00:00:00 2001 From: benesjan Date: Tue, 4 Mar 2025 22:20:59 +0000 Subject: [PATCH 11/22] crowdfunging test fix --- .../end-to-end/src/e2e_crowdfunding_and_claim.test.ts | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/yarn-project/end-to-end/src/e2e_crowdfunding_and_claim.test.ts b/yarn-project/end-to-end/src/e2e_crowdfunding_and_claim.test.ts index 442a4baa0ab..3738691516e 100644 --- a/yarn-project/end-to-end/src/e2e_crowdfunding_and_claim.test.ts +++ b/yarn-project/end-to-end/src/e2e_crowdfunding_and_claim.test.ts @@ -131,9 +131,9 @@ describe('e2e_crowdfunding_and_claim', () => { const processUniqueNote = (uniqueNote: UniqueNote) => { return { note: { - value: uniqueNote.note.items[0].toBigInt(), // We convert to bigint as Fr is not serializable to U128 - owner: AztecAddress.fromField(uniqueNote.note.items[1]), - randomness: uniqueNote.note.items[2], + owner: AztecAddress.fromField(uniqueNote.note.items[0]), + randomness: uniqueNote.note.items[1], + value: uniqueNote.note.items[2].toBigInt(), // We convert to bigint as Fr is not serializable to U128 }, // eslint-disable-next-line camelcase contract_address: uniqueNote.contractAddress, From 8ffc7e7eab2d2d409f7900552db0b6c0089f831c Mon Sep 17 00:00:00 2001 From: benesjan Date: Tue, 4 Mar 2025 23:50:58 +0000 Subject: [PATCH 12/22] processing only unseen --- .../src/database/interfaces/pxe_database.ts | 10 ++++++- .../pxe/src/database/kv_pxe_database.ts | 26 ++++++++++++++++++- .../pxe/src/pxe_data_provider/index.ts | 10 +++---- 3 files changed, 39 insertions(+), 7 deletions(-) diff --git a/yarn-project/pxe/src/database/interfaces/pxe_database.ts b/yarn-project/pxe/src/database/interfaces/pxe_database.ts index ccb000397b7..76f3998d6a6 100644 --- a/yarn-project/pxe/src/database/interfaces/pxe_database.ts +++ b/yarn-project/pxe/src/database/interfaces/pxe_database.ts @@ -4,7 +4,7 @@ import type { AztecAddress } from '@aztec/stdlib/aztec-address'; import type { InBlock } from '@aztec/stdlib/block'; import type { CompleteAddress, ContractInstanceWithAddress } from '@aztec/stdlib/contract'; import type { PublicKey } from '@aztec/stdlib/keys'; -import type { IndexedTaggingSecret } from '@aztec/stdlib/logs'; +import type { IndexedTaggingSecret, TxScopedL2Log } from '@aztec/stdlib/logs'; import type { NotesFilter } from '@aztec/stdlib/note'; import type { BlockHeader } from '@aztec/stdlib/tx'; @@ -178,6 +178,14 @@ export interface PxeDatabase extends ContractArtifactDatabase, ContractInstanceD */ setTaggingSecretsIndexesAsRecipient(indexedTaggingSecrets: IndexedTaggingSecret[]): Promise; + /** + * Iterates through logs, checks if the log is unseen, if it is, it adds it to the seen logs set stored in the db + * and adds the logs to the returned logs array. Returns the unseen logs. + * @param logs - The logs to check for being unseen + * @returns Unseen logs + */ + updateSeenLogsAndGetUnseen(logs: TxScopedL2Log[]): Promise; + /** * Deletes all notes synched after this block number. * @param blockNumber - All notes strictly after this block number are removed. diff --git a/yarn-project/pxe/src/database/kv_pxe_database.ts b/yarn-project/pxe/src/database/kv_pxe_database.ts index afd850294ab..d7c6bde459b 100644 --- a/yarn-project/pxe/src/database/kv_pxe_database.ts +++ b/yarn-project/pxe/src/database/kv_pxe_database.ts @@ -1,4 +1,5 @@ import { toBufferBE } from '@aztec/foundation/bigint-buffer'; +import { sha256 } from '@aztec/foundation/crypto'; import { Fr, type Point } from '@aztec/foundation/fields'; import { toArray } from '@aztec/foundation/iterable'; import { type Logger, createLogger } from '@aztec/foundation/log'; @@ -24,7 +25,7 @@ import { SerializableContractInstance, } from '@aztec/stdlib/contract'; import type { PublicKey } from '@aztec/stdlib/keys'; -import type { IndexedTaggingSecret } from '@aztec/stdlib/logs'; +import type { IndexedTaggingSecret, TxScopedL2Log } from '@aztec/stdlib/logs'; import { NoteStatus, type NotesFilter } from '@aztec/stdlib/note'; import { MerkleTreeId } from '@aztec/stdlib/trees'; import { BlockHeader } from '@aztec/stdlib/tx'; @@ -72,6 +73,9 @@ export class KVPxeDatabase implements PxeDatabase { // Arbitrary data stored by contracts. Key is computed as `${contractAddress}:${key}` #capsules: AztecAsyncMap; + // Stores the hashes of the logs that have been seen + #seenLogHashes: AztecAsyncMap; + #logger: Logger; protected constructor(private db: AztecAsyncKVStore) { @@ -113,6 +117,8 @@ export class KVPxeDatabase implements PxeDatabase { this.#capsules = db.openMap('capsules'); + this.#seenLogHashes = db.openMap('seen_log_hashes'); + this.#logger = createLogger('aztec:kv-pxe-database'); } @@ -637,6 +643,24 @@ export class KVPxeDatabase implements PxeDatabase { }); } + async updateSeenLogsAndGetUnseen(logs: TxScopedL2Log[]): Promise { + const unseenLogs: TxScopedL2Log[] = []; + + for (const log of logs) { + // Create a hash of the log data and tx hash to uniquely identify it + const hash = sha256(log.toBuffer()).toString('hex'); + + // Check if we've seen this log before + const seen = await this.#seenLogHashes.hasAsync(hash); + if (!seen) { + // If we haven't seen it, add it to results and mark as seen + unseenLogs.push(log); + await this.#seenLogHashes.set(hash, true); + } + } + return unseenLogs; + } + async storeCapsule(contractAddress: AztecAddress, slot: Fr, capsule: Fr[]): Promise { await this.#capsules.set(dbSlotToKey(contractAddress, slot), Buffer.concat(capsule.map(value => value.toBuffer()))); } diff --git a/yarn-project/pxe/src/pxe_data_provider/index.ts b/yarn-project/pxe/src/pxe_data_provider/index.ts index 2a1a31fa8e9..f301c20b17f 100644 --- a/yarn-project/pxe/src/pxe_data_provider/index.ts +++ b/yarn-project/pxe/src/pxe_data_provider/index.ts @@ -566,11 +566,11 @@ export class PXEDataProvider implements ExecutionDataProvider { secretsAndWindows = newSecretsAndWindows; } - // We filter the logs by block number and store them in the map. - logsMap.set( - recipient.toString(), - logsForRecipient.filter(log => log.blockNumber <= maxBlockNumber), - ); + // We filter the logs by block number and then we filter out the logs that have been seen. + const filteredLogsByBlock = logsForRecipient.filter(log => log.blockNumber <= maxBlockNumber); + const filteredUnseenLogs = await this.db.updateSeenLogsAndGetUnseen(filteredLogsByBlock); + + logsMap.set(recipient.toString(), filteredUnseenLogs); // At this point we have processed all the logs for the recipient so we store the new largest indexes in the db. await this.db.setTaggingSecretsIndexesAsRecipient( From 9f865e3bb44dbd79287bbd2c9190c660a676a956 Mon Sep 17 00:00:00 2001 From: benesjan Date: Wed, 5 Mar 2025 17:13:13 +0000 Subject: [PATCH 13/22] fix of incorrectly handling note scopes --- yarn-project/pxe/src/database/kv_pxe_database.ts | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/yarn-project/pxe/src/database/kv_pxe_database.ts b/yarn-project/pxe/src/database/kv_pxe_database.ts index d7c6bde459b..66cc04fffe6 100644 --- a/yarn-project/pxe/src/database/kv_pxe_database.ts +++ b/yarn-project/pxe/src/database/kv_pxe_database.ts @@ -212,10 +212,12 @@ export class KVPxeDatabase implements PxeDatabase { for (const dao of notes) { const noteIndex = toBufferBE(dao.index, 32).toString('hex'); - // Check if note already exists in #notes and skip if it does - const existingNote = await this.#notes.getAsync(noteIndex); - if (existingNote) { - this.#logger.warn(`Note with index ${noteIndex} already exists in active notes. Skipping re-addition...`); + // Check if note exists under this scope and skip if it does + const existingScopes = await toArray(this.#notesToScope.getValuesAsync(noteIndex)); + if (existingScopes.includes(scope.toString())) { + this.#logger.warn( + `Note with index ${noteIndex} already exists in active notes under scope ${scope}. Skipping re-addition...`, + ); continue; } From 65e42584f392bf2a34e30e82256d1046f41bcad5 Mon Sep 17 00:00:00 2001 From: benesjan Date: Wed, 5 Mar 2025 17:58:56 +0000 Subject: [PATCH 14/22] safer db access --- .../src/database/interfaces/pxe_database.ts | 4 +++ .../pxe/src/database/kv_pxe_database.ts | 27 +++++++++++-------- .../pxe/src/pxe_data_provider/index.ts | 3 ++- 3 files changed, 22 insertions(+), 12 deletions(-) diff --git a/yarn-project/pxe/src/database/interfaces/pxe_database.ts b/yarn-project/pxe/src/database/interfaces/pxe_database.ts index 76f3998d6a6..941931688a6 100644 --- a/yarn-project/pxe/src/database/interfaces/pxe_database.ts +++ b/yarn-project/pxe/src/database/interfaces/pxe_database.ts @@ -181,6 +181,10 @@ export interface PxeDatabase extends ContractArtifactDatabase, ContractInstanceD /** * Iterates through logs, checks if the log is unseen, if it is, it adds it to the seen logs set stored in the db * and adds the logs to the returned logs array. Returns the unseen logs. + * @dev Note that all the database operations are wrapped in a transaction to ensure atomicity and prevent race + * conditions from other calls to `syncTaggedLogs` (which calls this method). This is important as the main goal + * of this method is to ensure that we do not process the same log twice (before this was introduced, we were having + * race conditions in tests). * @param logs - The logs to check for being unseen * @returns Unseen logs */ diff --git a/yarn-project/pxe/src/database/kv_pxe_database.ts b/yarn-project/pxe/src/database/kv_pxe_database.ts index 66cc04fffe6..4acf02821df 100644 --- a/yarn-project/pxe/src/database/kv_pxe_database.ts +++ b/yarn-project/pxe/src/database/kv_pxe_database.ts @@ -648,18 +648,23 @@ export class KVPxeDatabase implements PxeDatabase { async updateSeenLogsAndGetUnseen(logs: TxScopedL2Log[]): Promise { const unseenLogs: TxScopedL2Log[] = []; - for (const log of logs) { - // Create a hash of the log data and tx hash to uniquely identify it - const hash = sha256(log.toBuffer()).toString('hex'); - - // Check if we've seen this log before - const seen = await this.#seenLogHashes.hasAsync(hash); - if (!seen) { - // If we haven't seen it, add it to results and mark as seen - unseenLogs.push(log); - await this.#seenLogHashes.set(hash, true); + // We wrap the database access in a transaction to ensure atomicity and prevent race conditions from other calls + // to `syncTaggedLogs` (which calls this method). + await this.db.transactionAsync(async () => { + for (const log of logs) { + // Create a hash of the log data and tx hash to uniquely identify it + const hash = sha256(log.toBuffer()).toString('hex'); + + // Check if we've seen this log before + const seen = await this.#seenLogHashes.hasAsync(hash); + if (!seen) { + // If we haven't seen it, add it to results and mark as seen + unseenLogs.push(log); + await this.#seenLogHashes.set(hash, true); + } } - } + }); + return unseenLogs; } diff --git a/yarn-project/pxe/src/pxe_data_provider/index.ts b/yarn-project/pxe/src/pxe_data_provider/index.ts index f301c20b17f..f60dfed3ec4 100644 --- a/yarn-project/pxe/src/pxe_data_provider/index.ts +++ b/yarn-project/pxe/src/pxe_data_provider/index.ts @@ -566,8 +566,9 @@ export class PXEDataProvider implements ExecutionDataProvider { secretsAndWindows = newSecretsAndWindows; } - // We filter the logs by block number and then we filter out the logs that have been seen. + // We filter the logs by block number. const filteredLogsByBlock = logsForRecipient.filter(log => log.blockNumber <= maxBlockNumber); + // Now we filter out the logs that have been seen before and we tag the unseen logs as seen in the db. const filteredUnseenLogs = await this.db.updateSeenLogsAndGetUnseen(filteredLogsByBlock); logsMap.set(recipient.toString(), filteredUnseenLogs); From e7d6023df567ee116bce765a5f1f6dd6bb4632ae Mon Sep 17 00:00:00 2001 From: benesjan Date: Wed, 5 Mar 2025 18:05:22 +0000 Subject: [PATCH 15/22] removing obsolete test --- .../src/archiver/archiver_store_test_suite.ts | 28 ------------------- 1 file changed, 28 deletions(-) diff --git a/yarn-project/archiver/src/archiver/archiver_store_test_suite.ts b/yarn-project/archiver/src/archiver/archiver_store_test_suite.ts index 7d23fa16ab7..2080a7bf551 100644 --- a/yarn-project/archiver/src/archiver/archiver_store_test_suite.ts +++ b/yarn-project/archiver/src/archiver/archiver_store_test_suite.ts @@ -524,34 +524,6 @@ export function describeArchiverDataStore( ], ]); }); - - it('is not possible to add public logs by tag if they are invalid', async () => { - const tag = makeTag(99, 88, 77); - const invalidLogs = [ - PublicLog.fromFields([ - AztecAddress.fromNumber(1).toField(), - tag, - ...times(PUBLIC_LOG_DATA_SIZE_IN_FIELDS - 1, i => new Fr(tag.toNumber() + i)), - ]), - PublicLog.fromFields([ - AztecAddress.fromNumber(1).toField(), - tag, - ...times(PUBLIC_LOG_DATA_SIZE_IN_FIELDS - 1, i => new Fr(tag.toNumber() + i)), - ]), - ]; - - // Create a block containing these invalid logs - const newBlockNumber = numBlocks; - const newBlock = await mockBlockWithLogs(newBlockNumber); - newBlock.data.body.txEffects[0].publicLogs = invalidLogs; - await store.addBlocks([newBlock]); - await store.addLogs([newBlock.data]); - - const logsByTags = await store.getLogsByTags([tag]); - - // Neither of the logs should have been added: - expect(logsByTags).toEqual([[]]); - }); }); describe('getPublicLogs', () => { From 84f25a58862e561d233e0ba8a18aa05231393106 Mon Sep 17 00:00:00 2001 From: benesjan Date: Wed, 5 Mar 2025 19:01:10 +0000 Subject: [PATCH 16/22] fix --- noir-projects/aztec-nr/aztec/src/macros/dispatch/mod.nr | 2 +- noir-projects/noir-contracts/contracts/fpc_contract/src/main.nr | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/noir-projects/aztec-nr/aztec/src/macros/dispatch/mod.nr b/noir-projects/aztec-nr/aztec/src/macros/dispatch/mod.nr index 9e6cff1fde8..bcd31ec8d19 100644 --- a/noir-projects/aztec-nr/aztec/src/macros/dispatch/mod.nr +++ b/noir-projects/aztec-nr/aztec/src/macros/dispatch/mod.nr @@ -85,7 +85,7 @@ pub comptime fn generate_public_dispatch(m: Module) -> Quoted { // No dispatch function if there are no public functions quote {} } else { - let ifs = ifs.push_back(quote { panic(f"Unknown selector") }); + let ifs = ifs.push_back(quote { panic(f"Unknown selector {selector}") }); let dispatch = ifs.join(quote { }); let body = quote { diff --git a/noir-projects/noir-contracts/contracts/fpc_contract/src/main.nr b/noir-projects/noir-contracts/contracts/fpc_contract/src/main.nr index e0a1c66de0d..26b73df9972 100644 --- a/noir-projects/noir-contracts/contracts/fpc_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/fpc_contract/src/main.nr @@ -98,7 +98,7 @@ pub contract FPC { // Set a public teardown function in which the refund will be paid back to the user by finalizing the partial note. context.set_public_teardown_function( context.this_address(), - comptime { FunctionSelector::from_signature("complete_refund((Field),Field,u128)") }, + comptime { FunctionSelector::from_signature("complete_refund((Field),(Field),u128)") }, array_concat( array_concat(accepted_asset.serialize(), partial_note.serialize()), max_fee.serialize(), From b95c4c1f2e3b6173b940eed6a195865ca6dafbbf Mon Sep 17 00:00:00 2001 From: benesjan Date: Wed, 5 Mar 2025 21:13:41 +0000 Subject: [PATCH 17/22] reverting seen logs db thing --- .../src/database/interfaces/pxe_database.ts | 14 +-------- .../pxe/src/database/kv_pxe_database.ts | 31 +------------------ .../pxe/src/pxe_data_provider/index.ts | 11 +++---- 3 files changed, 7 insertions(+), 49 deletions(-) diff --git a/yarn-project/pxe/src/database/interfaces/pxe_database.ts b/yarn-project/pxe/src/database/interfaces/pxe_database.ts index 941931688a6..ccb000397b7 100644 --- a/yarn-project/pxe/src/database/interfaces/pxe_database.ts +++ b/yarn-project/pxe/src/database/interfaces/pxe_database.ts @@ -4,7 +4,7 @@ import type { AztecAddress } from '@aztec/stdlib/aztec-address'; import type { InBlock } from '@aztec/stdlib/block'; import type { CompleteAddress, ContractInstanceWithAddress } from '@aztec/stdlib/contract'; import type { PublicKey } from '@aztec/stdlib/keys'; -import type { IndexedTaggingSecret, TxScopedL2Log } from '@aztec/stdlib/logs'; +import type { IndexedTaggingSecret } from '@aztec/stdlib/logs'; import type { NotesFilter } from '@aztec/stdlib/note'; import type { BlockHeader } from '@aztec/stdlib/tx'; @@ -178,18 +178,6 @@ export interface PxeDatabase extends ContractArtifactDatabase, ContractInstanceD */ setTaggingSecretsIndexesAsRecipient(indexedTaggingSecrets: IndexedTaggingSecret[]): Promise; - /** - * Iterates through logs, checks if the log is unseen, if it is, it adds it to the seen logs set stored in the db - * and adds the logs to the returned logs array. Returns the unseen logs. - * @dev Note that all the database operations are wrapped in a transaction to ensure atomicity and prevent race - * conditions from other calls to `syncTaggedLogs` (which calls this method). This is important as the main goal - * of this method is to ensure that we do not process the same log twice (before this was introduced, we were having - * race conditions in tests). - * @param logs - The logs to check for being unseen - * @returns Unseen logs - */ - updateSeenLogsAndGetUnseen(logs: TxScopedL2Log[]): Promise; - /** * Deletes all notes synched after this block number. * @param blockNumber - All notes strictly after this block number are removed. diff --git a/yarn-project/pxe/src/database/kv_pxe_database.ts b/yarn-project/pxe/src/database/kv_pxe_database.ts index 4acf02821df..9301dda7ff0 100644 --- a/yarn-project/pxe/src/database/kv_pxe_database.ts +++ b/yarn-project/pxe/src/database/kv_pxe_database.ts @@ -1,5 +1,4 @@ import { toBufferBE } from '@aztec/foundation/bigint-buffer'; -import { sha256 } from '@aztec/foundation/crypto'; import { Fr, type Point } from '@aztec/foundation/fields'; import { toArray } from '@aztec/foundation/iterable'; import { type Logger, createLogger } from '@aztec/foundation/log'; @@ -25,7 +24,7 @@ import { SerializableContractInstance, } from '@aztec/stdlib/contract'; import type { PublicKey } from '@aztec/stdlib/keys'; -import type { IndexedTaggingSecret, TxScopedL2Log } from '@aztec/stdlib/logs'; +import type { IndexedTaggingSecret } from '@aztec/stdlib/logs'; import { NoteStatus, type NotesFilter } from '@aztec/stdlib/note'; import { MerkleTreeId } from '@aztec/stdlib/trees'; import { BlockHeader } from '@aztec/stdlib/tx'; @@ -73,9 +72,6 @@ export class KVPxeDatabase implements PxeDatabase { // Arbitrary data stored by contracts. Key is computed as `${contractAddress}:${key}` #capsules: AztecAsyncMap; - // Stores the hashes of the logs that have been seen - #seenLogHashes: AztecAsyncMap; - #logger: Logger; protected constructor(private db: AztecAsyncKVStore) { @@ -117,8 +113,6 @@ export class KVPxeDatabase implements PxeDatabase { this.#capsules = db.openMap('capsules'); - this.#seenLogHashes = db.openMap('seen_log_hashes'); - this.#logger = createLogger('aztec:kv-pxe-database'); } @@ -645,29 +639,6 @@ export class KVPxeDatabase implements PxeDatabase { }); } - async updateSeenLogsAndGetUnseen(logs: TxScopedL2Log[]): Promise { - const unseenLogs: TxScopedL2Log[] = []; - - // We wrap the database access in a transaction to ensure atomicity and prevent race conditions from other calls - // to `syncTaggedLogs` (which calls this method). - await this.db.transactionAsync(async () => { - for (const log of logs) { - // Create a hash of the log data and tx hash to uniquely identify it - const hash = sha256(log.toBuffer()).toString('hex'); - - // Check if we've seen this log before - const seen = await this.#seenLogHashes.hasAsync(hash); - if (!seen) { - // If we haven't seen it, add it to results and mark as seen - unseenLogs.push(log); - await this.#seenLogHashes.set(hash, true); - } - } - }); - - return unseenLogs; - } - async storeCapsule(contractAddress: AztecAddress, slot: Fr, capsule: Fr[]): Promise { await this.#capsules.set(dbSlotToKey(contractAddress, slot), Buffer.concat(capsule.map(value => value.toBuffer()))); } diff --git a/yarn-project/pxe/src/pxe_data_provider/index.ts b/yarn-project/pxe/src/pxe_data_provider/index.ts index f60dfed3ec4..2a1a31fa8e9 100644 --- a/yarn-project/pxe/src/pxe_data_provider/index.ts +++ b/yarn-project/pxe/src/pxe_data_provider/index.ts @@ -566,12 +566,11 @@ export class PXEDataProvider implements ExecutionDataProvider { secretsAndWindows = newSecretsAndWindows; } - // We filter the logs by block number. - const filteredLogsByBlock = logsForRecipient.filter(log => log.blockNumber <= maxBlockNumber); - // Now we filter out the logs that have been seen before and we tag the unseen logs as seen in the db. - const filteredUnseenLogs = await this.db.updateSeenLogsAndGetUnseen(filteredLogsByBlock); - - logsMap.set(recipient.toString(), filteredUnseenLogs); + // We filter the logs by block number and store them in the map. + logsMap.set( + recipient.toString(), + logsForRecipient.filter(log => log.blockNumber <= maxBlockNumber), + ); // At this point we have processed all the logs for the recipient so we store the new largest indexes in the db. await this.db.setTaggingSecretsIndexesAsRecipient( From a8c449b7703a3cea9e894f83f43c3e51dedf17cc Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Nicol=C3=A1s=20Venturo?= Date: Thu, 6 Mar 2025 17:43:25 +0000 Subject: [PATCH 18/22] Fix merge issue --- .../noir-contracts/contracts/token_contract/src/main.nr | 1 - 1 file changed, 1 deletion(-) diff --git a/noir-projects/noir-contracts/contracts/token_contract/src/main.nr b/noir-projects/noir-contracts/contracts/token_contract/src/main.nr index 20a06d0c350..5cf04835adc 100644 --- a/noir-projects/noir-contracts/contracts/token_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/token_contract/src/main.nr @@ -44,7 +44,6 @@ pub contract Token { // docs:end:import_authwit use crate::types::balance_set::BalanceSet; - use std::ops::{Add, Sub}; // docs:end::imports From 8d236e0afbedd68be93cfbe31fd5b70c76432617 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Nicol=C3=A1s=20Venturo?= Date: Fri, 7 Mar 2025 16:30:59 +0000 Subject: [PATCH 19/22] Fix missing import --- noir-projects/noir-contracts/contracts/nft_contract/src/main.nr | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/noir-projects/noir-contracts/contracts/nft_contract/src/main.nr b/noir-projects/noir-contracts/contracts/nft_contract/src/main.nr index 98609ea1bdf..8c886d9ff3b 100644 --- a/noir-projects/noir-contracts/contracts/nft_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/nft_contract/src/main.nr @@ -20,7 +20,7 @@ pub contract NFT { functions::{initializer, internal, private, public, view}, storage::storage, }, - note::constants::MAX_NOTES_PER_PAGE, + note::{constants::MAX_NOTES_PER_PAGE, note_interface::NoteProperties}, prelude::{ AztecAddress, Map, NoteGetterOptions, NoteViewerOptions, PrivateContext, PrivateSet, PublicContext, PublicImmutable, PublicMutable, From 181e82829ff783c9fa4b824ffc129058eb3b7ac6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Nicol=C3=A1s=20Venturo?= Date: Tue, 11 Mar 2025 19:43:59 +0000 Subject: [PATCH 20/22] Perform full note discovery on sync_notes --- noir-projects/aztec-nr/aztec/src/macros/mod.nr | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/noir-projects/aztec-nr/aztec/src/macros/mod.nr b/noir-projects/aztec-nr/aztec/src/macros/mod.nr index a7e323af047..d72f412f348 100644 --- a/noir-projects/aztec-nr/aztec/src/macros/mod.nr +++ b/noir-projects/aztec-nr/aztec/src/macros/mod.nr @@ -5,7 +5,7 @@ pub mod notes; pub mod storage; pub mod events; -use functions::{stub_registry, utils::transform_unconstrained}; +use functions::{stub_registry, utils::{create_note_discovery_call, transform_unconstrained}}; use notes::{generate_note_export, NOTES}; use storage::STORAGE_LAYOUT_NAME; @@ -328,9 +328,15 @@ comptime fn generate_note_exports() -> Quoted { } comptime fn generate_sync_notes() -> Quoted { + let note_discovery_call = create_note_discovery_call(); quote { unconstrained fn sync_notes() { - aztec::oracle::note_discovery::sync_notes(); + // Because this unconstrained function is injected after the contract is processed by the macros, it'll not + // be modified by the macros that alter unconstrained functions. As such, we need to manually inject the + // unconstrained execution context since it will not be available otherwise. + let context = dep::aztec::context::unconstrained_context::UnconstrainedContext::new(); + + $note_discovery_call } } } From 03b017e7109285e2e09130d5e48e24a74bdc3d57 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Nicol=C3=A1s=20Venturo?= Date: Tue, 11 Mar 2025 21:43:59 +0000 Subject: [PATCH 21/22] Fix sync nots --- noir-projects/aztec-nr/aztec/src/macros/functions/utils.nr | 4 ++-- noir-projects/aztec-nr/aztec/src/macros/mod.nr | 1 + 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/noir-projects/aztec-nr/aztec/src/macros/functions/utils.nr b/noir-projects/aztec-nr/aztec/src/macros/functions/utils.nr index 17c78c24074..070306554b7 100644 --- a/noir-projects/aztec-nr/aztec/src/macros/functions/utils.nr +++ b/noir-projects/aztec-nr/aztec/src/macros/functions/utils.nr @@ -343,7 +343,7 @@ comptime fn create_init_check(f: FunctionDefinition) -> Quoted { /// Injects a call to `aztec::discovery::discover_new_notes`, causing for new notes to be added to PXE and made /// available for the current execution. -comptime fn create_note_discovery_call() -> Quoted { +pub(crate) comptime fn create_note_discovery_call() -> Quoted { quote { /// Safety: note discovery returns nothing and is performed solely for its side-effects. It is therefore always /// safe to call. @@ -351,7 +351,7 @@ comptime fn create_note_discovery_call() -> Quoted { dep::aztec::discovery::discover_new_notes( context.this_address(), _compute_note_hash_and_nullifier, - ) + ); }; } } diff --git a/noir-projects/aztec-nr/aztec/src/macros/mod.nr b/noir-projects/aztec-nr/aztec/src/macros/mod.nr index d72f412f348..d7268761ef9 100644 --- a/noir-projects/aztec-nr/aztec/src/macros/mod.nr +++ b/noir-projects/aztec-nr/aztec/src/macros/mod.nr @@ -255,6 +255,7 @@ comptime fn generate_contract_library_method_compute_note_hash_and_nullifier() - _storage_slot: Field, _note_type_id: Field, _contract_address: aztec::protocol_types::address::AztecAddress, + _nonce: Field, ) -> Option { panic(f"This contract does not use private notes") } From 45da6f08cee4559ff44120cf4c0abe7753b81d6d Mon Sep 17 00:00:00 2001 From: esau <152162806+sklppy88@users.noreply.github.com> Date: Thu, 13 Mar 2025 03:00:17 +0900 Subject: [PATCH 22/22] fix: this fixes the issues induced by 12391 because we now require deliver notes to be implemented on the txe level (#12667) Resolves #12668 Co-authored-by: sklppy88 --- docs/docs/developers/guides/js_apps/test.md | 2 +- .../contracts/escrow_contract/src/test.nr | 241 +++++++++--------- .../src/guides/dapp_testing.test.ts | 3 +- yarn-project/txe/src/node/txe_node.ts | 2 +- yarn-project/txe/src/oracle/txe_oracle.ts | 29 ++- .../txe/src/txe_service/txe_service.ts | 37 +++ yarn-project/txe/src/util/encoding.ts | 4 + 7 files changed, 185 insertions(+), 133 deletions(-) diff --git a/docs/docs/developers/guides/js_apps/test.md b/docs/docs/developers/guides/js_apps/test.md index 8c3f0d1a9c0..d58f8975865 100644 --- a/docs/docs/developers/guides/js_apps/test.md +++ b/docs/docs/developers/guides/js_apps/test.md @@ -107,7 +107,7 @@ To query storage directly, you'll need to know the slot you want to access. This #### Querying private state -Private state in the Aztec is represented via sets of [private notes](../../../aztec/concepts/storage/state_model.md#private-state). We can query the Private Execution Environment (PXE) for all notes encrypted for a given user in a contract slot. For example, this gets all notes encrypted for the `owner` user that are stored on the token contract address and on the slot that was calculated earlier. To calculate the actual balance, it extracts the `value` of each note, which is the first element, and sums them up. +Private state in the Aztec is represented via sets of [private notes](../../../aztec/concepts/storage/state_model.md#private-state). We can query the Private Execution Environment (PXE) for all notes encrypted for a given user in a contract slot. For example, this gets all notes encrypted for the `owner` user that are stored on the token contract address and on the slot that was calculated earlier. To calculate the actual balance, it extracts the `value` of each note, which is the third element, and sums them up. #include_code private-storage /yarn-project/end-to-end/src/guides/dapp_testing.test.ts typescript diff --git a/noir-projects/noir-contracts/contracts/escrow_contract/src/test.nr b/noir-projects/noir-contracts/contracts/escrow_contract/src/test.nr index 4d98ed6fe20..b56bc2a4585 100644 --- a/noir-projects/noir-contracts/contracts/escrow_contract/src/test.nr +++ b/noir-projects/noir-contracts/contracts/escrow_contract/src/test.nr @@ -1,120 +1,121 @@ -// TODO(ek): Clean up this test -use crate::Escrow; -use dep::token::Token; - -use aztec::{ - oracle::{execution::{get_block_number, get_contract_address}, storage::storage_read}, - prelude::AztecAddress, - protocol_types::storage::map::derive_storage_slot_in_map, - test::helpers::{cheatcodes, test_environment::TestEnvironment}, -}; - -pub unconstrained fn get_public_balance( - token_contract_address: AztecAddress, - address: AztecAddress, -) -> u128 { - let current_contract_address = get_contract_address(); - cheatcodes::set_contract_address(token_contract_address); - let block_number = get_block_number(); - - let balances_slot = Token::storage_layout().public_balances.slot; - let address_slot = derive_storage_slot_in_map(balances_slot, address); - let amount: u128 = storage_read(token_contract_address, address_slot, block_number); - cheatcodes::set_contract_address(current_contract_address); - amount -} - -pub unconstrained fn get_private_balance( - token_contract_address: AztecAddress, - address: AztecAddress, -) -> u128 { - let current_contract_address = get_contract_address(); - cheatcodes::set_contract_address(token_contract_address); - // Direct call to unconstrained - let amt = Token::balance_of_private(address); - cheatcodes::set_contract_address(current_contract_address); - amt -} - -global MINT_AMOUNT: u128 = 200000; - -unconstrained fn deploy_contracts( - env: &mut TestEnvironment, - admin_and_owner: AztecAddress, -) -> (AztecAddress, AztecAddress) { - env.impersonate(admin_and_owner); - - // Deploy token contract - let donation_token_initializer_call_interface = Token::interface().constructor( - admin_and_owner, - "Token00000000000000000000000000", - "TKN0000000000000000000000000000", - 18, - ); - let donation_token_contract = env - .deploy("./@token_contract", "Token") - .with_public_void_initializer(donation_token_initializer_call_interface); - let token_contract_address = donation_token_contract.to_address(); - env.advance_block_by(1); - - // Deploy Escrow contract with public keys - let escrow_contract_initializer_call_interface = - Escrow::interface().constructor(admin_and_owner); - let escrow_contract = env - .deploy_with_public_keys("./@escrow_contract", "Escrow", 6969) - .with_private_initializer(escrow_contract_initializer_call_interface); - let escrow_contract_address = escrow_contract.to_address(); - - env.advance_block_by(1); - - Token::at(token_contract_address) - .mint_to_private(admin_and_owner, admin_and_owner, MINT_AMOUNT) - .call(&mut env.private()); - - env.advance_block_by(1); - - let private_balance_after_mint = get_private_balance(token_contract_address, admin_and_owner); - assert(private_balance_after_mint == MINT_AMOUNT); - - (token_contract_address, escrow_contract_address) -} - -#[test] -unconstrained fn main() { - let mut env = TestEnvironment::new(); - - let (account_1, account_2) = (env.create_account_contract(1), env.create_account_contract(2)); - - let (token_contract_address, escrow_contract_address) = deploy_contracts(&mut env, account_1); - - // We transfer tokens to the escrow contract - let TRANSFER_AMOUNT = 20000 as u128; - Token::at(token_contract_address).transfer(escrow_contract_address, TRANSFER_AMOUNT).call( - &mut env.private(), - ); - env.advance_block_by(1); - - let balance_of_escrow_after_transfer = - get_private_balance(token_contract_address, escrow_contract_address); - - assert_eq(balance_of_escrow_after_transfer, TRANSFER_AMOUNT); - - // We then withdraw some escrowed funds to account_2 - let balance_of_account_2_before_withdrawal = - get_private_balance(token_contract_address, account_2); - assert(balance_of_account_2_before_withdrawal == 0 as u128); - - let WITHDRAWAL_AMOUNT = 69 as u128; - Escrow::at(escrow_contract_address) - .withdraw(token_contract_address, WITHDRAWAL_AMOUNT, account_2) - .call(&mut env.private()); - env.advance_block_by(1); - - let balance_of_account_2_after_withdrawal = - get_private_balance(token_contract_address, account_2); - assert(balance_of_account_2_after_withdrawal == WITHDRAWAL_AMOUNT); - - let balance_of_escrow_after_withdrawal = - get_private_balance(token_contract_address, escrow_contract_address); - assert(balance_of_escrow_after_withdrawal == TRANSFER_AMOUNT - WITHDRAWAL_AMOUNT); -} +// TODO (#12692): The underlying issue stopping this from working is a big problem and needs to be fixed. + +// use crate::Escrow; +// use dep::token::Token; + +// use aztec::{ +// oracle::{execution::{get_block_number, get_contract_address}, storage::storage_read}, +// prelude::AztecAddress, +// protocol_types::storage::map::derive_storage_slot_in_map, +// test::helpers::{cheatcodes, test_environment::TestEnvironment}, +// }; + +// pub unconstrained fn get_public_balance( +// token_contract_address: AztecAddress, +// address: AztecAddress, +// ) -> u128 { +// let current_contract_address = get_contract_address(); +// cheatcodes::set_contract_address(token_contract_address); +// let block_number = get_block_number(); + +// let balances_slot = Token::storage_layout().public_balances.slot; +// let address_slot = derive_storage_slot_in_map(balances_slot, address); +// let amount: u128 = storage_read(token_contract_address, address_slot, block_number); +// cheatcodes::set_contract_address(current_contract_address); +// amount +// } + +// pub unconstrained fn get_private_balance( +// token_contract_address: AztecAddress, +// address: AztecAddress, +// ) -> u128 { +// let current_contract_address = get_contract_address(); +// cheatcodes::set_contract_address(token_contract_address); +// // Direct call to unconstrained +// let amt = Token::balance_of_private(address); +// cheatcodes::set_contract_address(current_contract_address); +// amt +// } + +// global MINT_AMOUNT: u128 = 200000; + +// unconstrained fn deploy_contracts( +// env: &mut TestEnvironment, +// admin_and_owner: AztecAddress, +// ) -> (AztecAddress, AztecAddress) { +// env.impersonate(admin_and_owner); + +// // Deploy token contract +// let donation_token_initializer_call_interface = Token::interface().constructor( +// admin_and_owner, +// "Token00000000000000000000000000", +// "TKN0000000000000000000000000000", +// 18, +// ); +// let donation_token_contract = env +// .deploy("./@token_contract", "Token") +// .with_public_void_initializer(donation_token_initializer_call_interface); +// let token_contract_address = donation_token_contract.to_address(); +// env.advance_block_by(1); + +// // Deploy Escrow contract with public keys +// let escrow_contract_initializer_call_interface = +// Escrow::interface().constructor(admin_and_owner); +// let escrow_contract = env +// .deploy_with_public_keys("./@escrow_contract", "Escrow", 6969) +// .with_private_initializer(escrow_contract_initializer_call_interface); +// let escrow_contract_address = escrow_contract.to_address(); + +// env.advance_block_by(1); + +// Token::at(token_contract_address) +// .mint_to_private(admin_and_owner, admin_and_owner, MINT_AMOUNT) +// .call(&mut env.private()); + +// env.advance_block_by(1); + +// let private_balance_after_mint = get_private_balance(token_contract_address, admin_and_owner); +// assert(private_balance_after_mint == MINT_AMOUNT); + +// (token_contract_address, escrow_contract_address) +// } + +// #[test] +// unconstrained fn main() { +// let mut env = TestEnvironment::new(); + +// let (account_1, account_2) = (env.create_account_contract(1), env.create_account_contract(2)); + +// let (token_contract_address, escrow_contract_address) = deploy_contracts(&mut env, account_1); + +// // We transfer tokens to the escrow contract +// let TRANSFER_AMOUNT = 20000 as u128; +// Token::at(token_contract_address).transfer(escrow_contract_address, TRANSFER_AMOUNT).call( +// &mut env.private(), +// ); +// env.advance_block_by(1); + +// let balance_of_escrow_after_transfer = +// get_private_balance(token_contract_address, escrow_contract_address); + +// assert_eq(balance_of_escrow_after_transfer, TRANSFER_AMOUNT); + +// // We then withdraw some escrowed funds to account_2 +// let balance_of_account_2_before_withdrawal = +// get_private_balance(token_contract_address, account_2); +// assert(balance_of_account_2_before_withdrawal == 0 as u128); + +// let WITHDRAWAL_AMOUNT = 69 as u128; +// Escrow::at(escrow_contract_address) +// .withdraw(token_contract_address, WITHDRAWAL_AMOUNT, account_2) +// .call(&mut env.private()); +// env.advance_block_by(1); + +// let balance_of_account_2_after_withdrawal = +// get_private_balance(token_contract_address, account_2); +// assert(balance_of_account_2_after_withdrawal == WITHDRAWAL_AMOUNT); + +// let balance_of_escrow_after_withdrawal = +// get_private_balance(token_contract_address, escrow_contract_address); +// assert(balance_of_escrow_after_withdrawal == TRANSFER_AMOUNT - WITHDRAWAL_AMOUNT); +// } diff --git a/yarn-project/end-to-end/src/guides/dapp_testing.test.ts b/yarn-project/end-to-end/src/guides/dapp_testing.test.ts index ccf87c7208c..aabef260137 100644 --- a/yarn-project/end-to-end/src/guides/dapp_testing.test.ts +++ b/yarn-project/end-to-end/src/guides/dapp_testing.test.ts @@ -87,7 +87,8 @@ describe('guides/dapp/testing', () => { storageSlot: ownerSlot, scopes: [owner.getAddress()], }); - const values = notes.map(note => note.note.items[0]); + // TODO(#12694): Do not rely on the ordering of members in a struct / check notes manually + const values = notes.map(note => note.note.items[2]); const balance = values.reduce((sum, current) => sum + current.toBigInt(), 0n); expect(balance).toEqual(100n); // docs:end:private-storage diff --git a/yarn-project/txe/src/node/txe_node.ts b/yarn-project/txe/src/node/txe_node.ts index 53bcbb48739..a63b8c1d309 100644 --- a/yarn-project/txe/src/node/txe_node.ts +++ b/yarn-project/txe/src/node/txe_node.ts @@ -138,7 +138,7 @@ export class TXENode implements AztecNode { const nullifiersInBlock: Fr[] = []; for (const [key, val] of this.#blockNumberToNullifiers.entries()) { - if (key < parsedBlockNumber) { + if (key <= parsedBlockNumber) { nullifiersInBlock.push(...val); } } diff --git a/yarn-project/txe/src/oracle/txe_oracle.ts b/yarn-project/txe/src/oracle/txe_oracle.ts index 1a803280da7..5378a134880 100644 --- a/yarn-project/txe/src/oracle/txe_oracle.ts +++ b/yarn-project/txe/src/oracle/txe_oracle.ts @@ -1096,17 +1096,26 @@ export class TXE implements TypedOracle { return Promise.resolve(); } - deliverNote( - _contractAddress: AztecAddress, - _storageSlot: Fr, - _nonce: Fr, - _content: Fr[], - _noteHash: Fr, - _nullifier: Fr, - _txHash: Fr, - _recipient: AztecAddress, + public async deliverNote( + contractAddress: AztecAddress, + storageSlot: Fr, + nonce: Fr, + content: Fr[], + noteHash: Fr, + nullifier: Fr, + txHash: Fr, + recipient: AztecAddress, ): Promise { - throw new Error('deliverNote'); + await this.pxeOracleInterface.deliverNote( + contractAddress, + storageSlot, + nonce, + content, + noteHash, + nullifier, + txHash, + recipient, + ); } async getLogByTag(tag: Fr): Promise { diff --git a/yarn-project/txe/src/txe_service/txe_service.ts b/yarn-project/txe/src/txe_service/txe_service.ts index ee2831fa21c..40c26e2479a 100644 --- a/yarn-project/txe/src/txe_service/txe_service.ts +++ b/yarn-project/txe/src/txe_service/txe_service.ts @@ -11,6 +11,7 @@ import { AztecAddress } from '@aztec/stdlib/aztec-address'; import { computePartialAddress } from '@aztec/stdlib/contract'; import { SimulationError } from '@aztec/stdlib/errors'; import { computePublicDataTreeLeafSlot, siloNullifier } from '@aztec/stdlib/hash'; +import { LogWithTxData } from '@aztec/stdlib/logs'; import { MerkleTreeId } from '@aztec/stdlib/trees'; import { TXE } from '../oracle/txe_oracle.js'; @@ -26,6 +27,7 @@ import { toArray, toForeignCallResult, toSingle, + toSingleOrArray, } from '../util/encoding.js'; import { ExpectedFailureError } from '../util/expected_failure_error.js'; @@ -521,6 +523,41 @@ export class TXEService { return toForeignCallResult([]); } + public async deliverNote( + contractAddress: ForeignCallSingle, + storageSlot: ForeignCallSingle, + nonce: ForeignCallSingle, + content: ForeignCallArray, + contentLength: ForeignCallSingle, + noteHash: ForeignCallSingle, + nullifier: ForeignCallSingle, + txHash: ForeignCallSingle, + recipient: ForeignCallSingle, + ) { + await this.typedOracle.deliverNote( + AztecAddress.fromField(fromSingle(contractAddress)), + fromSingle(storageSlot), + fromSingle(nonce), + fromArray(content.slice(0, Number(BigInt(contentLength)))), + fromSingle(noteHash), + fromSingle(nullifier), + fromSingle(txHash), + AztecAddress.fromField(fromSingle(recipient)), + ); + + return toForeignCallResult([toSingle(Fr.ONE)]); + } + + async getLogByTag(tag: ForeignCallSingle) { + const log = await this.typedOracle.getLogByTag(fromSingle(tag)); + + if (log == null) { + return toForeignCallResult([toSingle(Fr.ZERO), ...LogWithTxData.noirSerializationOfEmpty().map(toSingleOrArray)]); + } else { + return toForeignCallResult([toSingle(Fr.ONE), ...log.toNoirSerialization().map(toSingleOrArray)]); + } + } + async storeCapsule(contractAddress: ForeignCallSingle, slot: ForeignCallSingle, capsule: ForeignCallArray) { await this.typedOracle.storeCapsule( AztecAddress.fromField(fromSingle(contractAddress)), diff --git a/yarn-project/txe/src/util/encoding.ts b/yarn-project/txe/src/util/encoding.ts index c3691fc7152..2a9c65cb41a 100644 --- a/yarn-project/txe/src/util/encoding.ts +++ b/yarn-project/txe/src/util/encoding.ts @@ -49,6 +49,10 @@ export function toArray(objs: Fr[]): ForeignCallArray { return objs.map(obj => obj.toString()); } +export function toSingleOrArray(value: Fr | Fr[]) { + return Array.isArray(value) ? value.map(toSingle) : toSingle(value); +} + export function bufferToU8Array(buffer: Buffer): ForeignCallArray { return toArray(Array.from(buffer).map(byte => new Fr(byte))); }