From b6c3048b728beaffbbef80f3e4f598cd0beee4a2 Mon Sep 17 00:00:00 2001 From: Gregorio Juliana Date: Thu, 14 Nov 2024 11:59:43 -0300 Subject: [PATCH] feat: remove note processor and trigger sync (#9794) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Closes: https://github.com/AztecProtocol/aztec-packages/issues/9370 Closes https://github.com/AztecProtocol/aztec-packages/issues/9575 Closes https://github.com/AztecProtocol/aztec-packages/issues/9786 Replaces the note processor with the tagging approach, supporting a sliding window of indexes and partial notes --------- Co-authored-by: Nicolás Venturo Co-authored-by: Santiago Palladino --- .../aztec/src/encrypted_logs/payload.nr | 17 +- .../aztec-nr/aztec/src/macros/mod.nr | 10 + .../aztec-nr/aztec/src/macros/notes/mod.nr | 1 - .../aztec-nr/aztec/src/oracle/notes.nr | 44 +- .../types/src/indexed_tagging_secret.nr | 9 +- .../archiver/src/archiver/archiver.ts | 6 +- .../archiver/src/archiver/archiver_store.ts | 4 +- .../src/archiver/archiver_store_test_suite.ts | 105 +++-- .../kv_archiver_store/kv_archiver_store.ts | 4 +- .../archiver/kv_archiver_store/log_store.ts | 22 +- .../memory_archiver_store.ts | 97 +++-- .../aztec-node/src/aztec-node/server.ts | 20 +- .../account_manager/deploy_account_sent_tx.ts | 2 - .../aztec.js/src/account_manager/index.ts | 4 +- .../aztec.js/src/contract/sent_tx.test.ts | 19 +- yarn-project/aztec.js/src/contract/sent_tx.ts | 42 +- yarn-project/aztec.js/src/index.ts | 1 - yarn-project/aztec.js/src/utils/account.ts | 33 -- yarn-project/aztec.js/src/utils/index.ts | 1 - .../aztec.js/src/wallet/base_wallet.ts | 7 - yarn-project/bot/src/bot.ts | 4 +- .../src/interfaces/archiver.test.ts | 8 +- .../circuit-types/src/interfaces/archiver.ts | 4 +- .../src/interfaces/aztec-node.test.ts | 26 +- .../src/interfaces/aztec-node.ts | 24 +- .../circuit-types/src/interfaces/pxe.test.ts | 30 -- .../circuit-types/src/interfaces/pxe.ts | 20 - .../src/interfaces/sync-status.ts | 3 - .../src/logs/get_logs_response.test.ts | 10 + .../src/logs/get_logs_response.ts | 42 +- .../l1_payload/encrypted_log_payload.test.ts | 8 +- .../circuit-types/src/logs/l2_logs_source.ts | 4 +- .../src/messaging/l1_to_l2_message.ts | 2 +- .../circuit-types/src/stats/metrics.ts | 32 -- yarn-project/circuit-types/src/stats/stats.ts | 48 --- .../circuit-types/src/tx/tx_receipt.ts | 15 - .../circuits.js/src/structs/tagging_secret.ts | 28 +- yarn-project/cli/src/cmds/pxe/get_block.ts | 5 +- .../benchmarks/bench_process_history.test.ts | 13 +- .../benchmarks/bench_publish_rollup.test.ts | 6 +- .../end-to-end/src/benchmarks/utils.ts | 25 +- .../src/composed/e2e_persistence.test.ts | 5 - .../end-to-end/src/e2e_2_pxes.test.ts | 47 +-- .../blacklist_token_contract_test.ts | 1 - .../minting.test.ts | 6 +- .../end-to-end/src/e2e_cheat_codes.test.ts | 1 + .../src/e2e_crowdfunding_and_claim.test.ts | 26 +- .../e2e_pending_note_hashes_contract.test.ts | 12 +- .../src/e2e_prover/e2e_prover_test.ts | 14 +- .../e2e_token_contract/token_contract_test.ts | 9 +- .../transfer_in_private.test.ts | 3 - .../end-to-end/src/fixtures/token_utils.ts | 15 +- ...akey_e2e_inclusion_proofs_contract.test.ts | 10 +- .../src/guides/dapp_testing.test.ts | 1 + .../src/shared/cross_chain_test_harness.ts | 5 +- .../pxe/src/contract_data_oracle/index.ts | 5 + .../src/database/deferred_note_dao.test.ts | 24 -- .../pxe/src/database/deferred_note_dao.ts | 47 --- .../pxe/src/database/incoming_note_dao.ts | 2 +- .../pxe/src/database/kv_pxe_database.ts | 104 ++--- .../pxe/src/database/outgoing_note_dao.ts | 2 +- yarn-project/pxe/src/database/pxe_database.ts | 47 +-- .../add_public_values_to_payload.ts | 2 +- .../brute_force_note_info.ts | 0 .../utils => note_decryption_utils}/index.ts | 0 .../produce_note_daos.ts | 22 +- .../produce_note_daos_for_key.ts | 21 +- yarn-project/pxe/src/note_processor/index.ts | 1 - .../src/note_processor/note_processor.test.ts | 391 ------------------ .../pxe/src/note_processor/note_processor.ts | 358 ---------------- .../pxe/src/pxe_service/pxe_service.ts | 42 +- .../pxe/src/simulator_oracle/index.ts | 273 +++++++----- .../simulator_oracle/simulator_oracle.test.ts | 255 +++++++++--- .../pxe/src/synchronizer/synchronizer.test.ts | 67 +-- .../pxe/src/synchronizer/synchronizer.ts | 256 +----------- .../scripts/src/benchmarks/aggregate.ts | 12 - .../simulator/src/acvm/oracle/oracle.ts | 12 +- .../simulator/src/acvm/oracle/typed_oracle.ts | 10 +- .../src/client/client_execution_context.ts | 18 +- .../simulator/src/client/db_oracle.ts | 17 +- .../src/client/private_execution.test.ts | 23 +- .../client/unconstrained_execution.test.ts | 2 + .../simulator/src/client/view_data_oracle.ts | 17 +- yarn-project/txe/src/oracle/txe_oracle.ts | 41 +- .../txe/src/txe_service/txe_service.ts | 8 +- 85 files changed, 938 insertions(+), 2101 deletions(-) delete mode 100644 yarn-project/aztec.js/src/utils/account.ts create mode 100644 yarn-project/circuit-types/src/logs/get_logs_response.test.ts delete mode 100644 yarn-project/pxe/src/database/deferred_note_dao.test.ts delete mode 100644 yarn-project/pxe/src/database/deferred_note_dao.ts rename yarn-project/pxe/src/{note_processor/utils => note_decryption_utils}/add_public_values_to_payload.ts (97%) rename yarn-project/pxe/src/{note_processor/utils => note_decryption_utils}/brute_force_note_info.ts (100%) rename yarn-project/pxe/src/{note_processor/utils => note_decryption_utils}/index.ts (100%) rename yarn-project/pxe/src/{note_processor/utils => note_decryption_utils}/produce_note_daos.ts (79%) rename yarn-project/pxe/src/{note_processor/utils => note_decryption_utils}/produce_note_daos_for_key.ts (62%) delete mode 100644 yarn-project/pxe/src/note_processor/index.ts delete mode 100644 yarn-project/pxe/src/note_processor/note_processor.test.ts delete mode 100644 yarn-project/pxe/src/note_processor/note_processor.ts diff --git a/noir-projects/aztec-nr/aztec/src/encrypted_logs/payload.nr b/noir-projects/aztec-nr/aztec/src/encrypted_logs/payload.nr index 099a253e07f..264f2898ec8 100644 --- a/noir-projects/aztec-nr/aztec/src/encrypted_logs/payload.nr +++ b/noir-projects/aztec-nr/aztec/src/encrypted_logs/payload.nr @@ -14,7 +14,10 @@ use std::{ use crate::{ encrypted_logs::header::EncryptedLogHeader, keys::point_to_symmetric_key::point_to_symmetric_key, - oracle::{notes::{get_app_tag_bytes, increment_app_tagging_secret}, random::random}, + oracle::{ + notes::{get_app_tag_bytes_as_sender, increment_app_tagging_secret_index_as_sender}, + random::random, + }, utils::point::point_to_bytes, }; @@ -128,8 +131,8 @@ fn compute_encrypted_log( // We assume that the sender wants for the recipient to find the tagged note, and therefore that they will cooperate // and use the correct tag. Usage of a bad tag will result in the recipient not being able to find the note // automatically. - let tag_bytes = unsafe { get_app_tag_bytes(sender, recipient) }; - increment_app_tagging_secret(sender, recipient); + let tag_bytes = unsafe { get_app_tag_bytes_as_sender(sender, recipient) }; + increment_app_tagging_secret_index_as_sender(sender, recipient); for i in 0..32 { encrypted_bytes[offset + i] = tag_bytes[i]; @@ -331,13 +334,9 @@ mod test { 0x25afb798ea6d0b8c1618e50fdeafa463059415013d3b7c75d46abf5e242be70c, ); - let _ = OracleMock::mock("getAppTaggingSecret").returns([ - 69420, - 0x25afb798ea6d0b8c1618e50fdeafa463059415013d3b7c75d46abf5e242be70c, - 1337, - ]); + let _ = OracleMock::mock("getAppTaggingSecretAsSender").returns([69420, 1337]); - let _ = OracleMock::mock("incrementAppTaggingSecret"); + let _ = OracleMock::mock("incrementAppTaggingSecretIndexAsSender").returns(()); let log = compute_private_log_payload( contract_address, diff --git a/noir-projects/aztec-nr/aztec/src/macros/mod.nr b/noir-projects/aztec-nr/aztec/src/macros/mod.nr index 9e30b327a2e..924c5bcf8e0 100644 --- a/noir-projects/aztec-nr/aztec/src/macros/mod.nr +++ b/noir-projects/aztec-nr/aztec/src/macros/mod.nr @@ -29,11 +29,13 @@ pub comptime fn aztec(m: Module) -> Quoted { generate_compute_note_hash_and_optionally_a_nullifier(); let note_exports = generate_note_exports(); let public_dispatch = generate_public_dispatch(m); + let sync_notes = generate_sync_notes(); quote { $note_exports $interface $compute_note_hash_and_optionally_a_nullifier $public_dispatch + $sync_notes } } @@ -173,3 +175,11 @@ comptime fn generate_note_exports() -> Quoted { }) .join(quote {}) } + +comptime fn generate_sync_notes() -> Quoted { + quote { + unconstrained fn sync_notes() { + aztec::oracle::notes::sync_notes(); + } + } +} diff --git a/noir-projects/aztec-nr/aztec/src/macros/notes/mod.nr b/noir-projects/aztec-nr/aztec/src/macros/notes/mod.nr index 52e66b27667..4570d2ce571 100644 --- a/noir-projects/aztec-nr/aztec/src/macros/notes/mod.nr +++ b/noir-projects/aztec-nr/aztec/src/macros/notes/mod.nr @@ -730,7 +730,6 @@ comptime fn generate_finalization_payload( // We append the public value to the log and emit it as unencrypted log let mut finalization_log = [0; $finalization_log_byte_length]; - // Iterate over the partial log and copy it to the final log for i in 0..setup_log.len() { finalization_log[i + 1] = setup_log[i]; diff --git a/noir-projects/aztec-nr/aztec/src/oracle/notes.nr b/noir-projects/aztec-nr/aztec/src/oracle/notes.nr index c94c8a3ef7a..4bbc97be9f9 100644 --- a/noir-projects/aztec-nr/aztec/src/oracle/notes.nr +++ b/noir-projects/aztec-nr/aztec/src/oracle/notes.nr @@ -155,6 +155,7 @@ pub unconstrained fn get_notes, { + sync_notes_oracle_wrapper(); let fields = get_notes_oracle_wrapper( storage_slot, num_selects, @@ -203,27 +204,30 @@ pub unconstrained fn check_nullifier_exists(inner_nullifier: Field) -> bool { #[oracle(checkNullifierExists)] unconstrained fn check_nullifier_exists_oracle(_inner_nullifier: Field) -> Field {} -/// Same as `get_app_tagging_secret`, except it returns the derived tag as an array of bytes, ready to be included in a +/// Same as `get_app_tagging_secret_as_sender`, except it returns the derived tag as an array of bytes, ready to be included in a /// log. -pub unconstrained fn get_app_tag_bytes(sender: AztecAddress, recipient: AztecAddress) -> [u8; 32] { - let tag = get_app_tagging_secret(sender, recipient).compute_tag(); +pub unconstrained fn get_app_tag_bytes_as_sender( + sender: AztecAddress, + recipient: AztecAddress, +) -> [u8; 32] { + let tag = get_app_tagging_secret_as_sender(sender, recipient).compute_tag(recipient); tag.to_be_bytes() } /// Returns the tagging secret for a given sender and recipient pair, siloed for the current contract address. -/// Includes the last known index used for tagging with this secret. +/// Includes the last known index used to send a note tagged with this secret. /// For this to work, PXE must know the ivpsk_m of the sender. /// For the recipient's side, only the address is needed. -pub unconstrained fn get_app_tagging_secret( +pub unconstrained fn get_app_tagging_secret_as_sender( sender: AztecAddress, recipient: AztecAddress, ) -> IndexedTaggingSecret { - let result = get_app_tagging_secret_oracle(sender, recipient); + let result = get_app_tagging_secret_as_sender_oracle(sender, recipient); IndexedTaggingSecret::deserialize(result) } -#[oracle(getAppTaggingSecret)] -unconstrained fn get_app_tagging_secret_oracle( +#[oracle(getAppTaggingSecretAsSender)] +unconstrained fn get_app_tagging_secret_as_sender_oracle( _sender: AztecAddress, _recipient: AztecAddress, ) -> [Field; INDEXED_TAGGING_SECRET_LENGTH] {} @@ -233,38 +237,38 @@ unconstrained fn get_app_tagging_secret_oracle( /// This change should only be persisted in a non-volatile database if the tagged log is found in an actual block - /// otherwise e.g. a reverting transaction can cause the sender to accidentally skip indices and later produce notes /// that are not found by the recipient. -pub fn increment_app_tagging_secret(sender: AztecAddress, recipient: AztecAddress) { +pub fn increment_app_tagging_secret_index_as_sender(sender: AztecAddress, recipient: AztecAddress) { // This oracle call returns nothing: we only call it for its side effects. It is therefore always safe to call. unsafe { - increment_app_tagging_secret_wrapper(sender, recipient); + increment_app_tagging_secret_index_as_sender_wrapper(sender, recipient); } } -unconstrained fn increment_app_tagging_secret_wrapper( +unconstrained fn increment_app_tagging_secret_index_as_sender_wrapper( sender: AztecAddress, recipient: AztecAddress, ) { - increment_app_tagging_secret_oracle(sender, recipient); + increment_app_tagging_secret_index_as_sender_oracle(sender, recipient); } -#[oracle(incrementAppTaggingSecret)] -unconstrained fn increment_app_tagging_secret_oracle( +#[oracle(incrementAppTaggingSecretIndexAsSender)] +unconstrained fn increment_app_tagging_secret_index_as_sender_oracle( _sender: AztecAddress, _recipient: AztecAddress, ) {} -/// Finds new notes that may have been sent to `recipient` in the current contract and makes them available +/// Finds new notes that may have been sent to all registered accounts in PXE in the current contract and makes them available /// for later querying via the `get_notes` oracle. -pub fn sync_notes(recipient: AztecAddress) { +pub fn sync_notes() { // This oracle call returns nothing: we only call it for its side effects. It is therefore always safe to call. unsafe { - sync_notes_oracle_wrapper(recipient); + sync_notes_oracle_wrapper(); } } -unconstrained fn sync_notes_oracle_wrapper(recipient: AztecAddress) { - sync_notes_oracle(recipient); +unconstrained fn sync_notes_oracle_wrapper() { + sync_notes_oracle(); } #[oracle(syncNotes)] -unconstrained fn sync_notes_oracle(_recipient: AztecAddress) {} +unconstrained fn sync_notes_oracle() {} diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/indexed_tagging_secret.nr b/noir-projects/noir-protocol-circuits/crates/types/src/indexed_tagging_secret.nr index 3ea0310ef92..89837964cff 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/indexed_tagging_secret.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/indexed_tagging_secret.nr @@ -2,19 +2,16 @@ use crate::traits::{Deserialize, Serialize}; use super::{address::aztec_address::AztecAddress, hash::poseidon2_hash}; use std::meta::derive; -pub global INDEXED_TAGGING_SECRET_LENGTH: u32 = 3; +pub global INDEXED_TAGGING_SECRET_LENGTH: u32 = 2; #[derive(Serialize, Deserialize)] pub struct IndexedTaggingSecret { secret: Field, - recipient: AztecAddress, index: u32, } impl IndexedTaggingSecret { - pub fn compute_tag(self) -> Field { - poseidon2_hash( - [self.secret, self.recipient.to_field(), self.index as Field], - ) + pub fn compute_tag(self, recipient: AztecAddress) -> Field { + poseidon2_hash([self.secret, recipient.to_field(), self.index as Field]) } } diff --git a/yarn-project/archiver/src/archiver/archiver.ts b/yarn-project/archiver/src/archiver/archiver.ts index d26f4bced68..a2a10d8c7a2 100644 --- a/yarn-project/archiver/src/archiver/archiver.ts +++ b/yarn-project/archiver/src/archiver/archiver.ts @@ -15,7 +15,7 @@ import { type TxEffect, type TxHash, type TxReceipt, - type TxScopedEncryptedL2NoteLog, + type TxScopedL2Log, type UnencryptedL2Log, } from '@aztec/circuit-types'; import { @@ -639,7 +639,7 @@ export class Archiver implements ArchiveSource { * @returns For each received tag, an array of matching logs is returned. An empty array implies no logs match * that tag. */ - getLogsByTags(tags: Fr[]): Promise { + getLogsByTags(tags: Fr[]): Promise { return this.store.getLogsByTags(tags); } @@ -955,7 +955,7 @@ class ArchiverStoreHelper ): Promise>[]> { return this.store.getLogs(from, limit, logType); } - getLogsByTags(tags: Fr[]): Promise { + getLogsByTags(tags: Fr[]): Promise { return this.store.getLogsByTags(tags); } getUnencryptedLogs(filter: LogFilter): Promise { diff --git a/yarn-project/archiver/src/archiver/archiver_store.ts b/yarn-project/archiver/src/archiver/archiver_store.ts index 308f6749ecd..4eb2c80ccc0 100644 --- a/yarn-project/archiver/src/archiver/archiver_store.ts +++ b/yarn-project/archiver/src/archiver/archiver_store.ts @@ -9,7 +9,7 @@ import { type TxEffect, type TxHash, type TxReceipt, - type TxScopedEncryptedL2NoteLog, + type TxScopedL2Log, } from '@aztec/circuit-types'; import { type ContractClassPublic, @@ -142,7 +142,7 @@ export interface ArchiverDataStore { * @returns For each received tag, an array of matching logs is returned. An empty array implies no logs match * that tag. */ - getLogsByTags(tags: Fr[]): Promise; + getLogsByTags(tags: Fr[]): Promise; /** * Gets unencrypted logs based on the provided filter. diff --git a/yarn-project/archiver/src/archiver/archiver_store_test_suite.ts b/yarn-project/archiver/src/archiver/archiver_store_test_suite.ts index 1311c6ec03e..cd30af56a78 100644 --- a/yarn-project/archiver/src/archiver/archiver_store_test_suite.ts +++ b/yarn-project/archiver/src/archiver/archiver_store_test_suite.ts @@ -344,14 +344,24 @@ export function describeArchiverDataStore(testName: string, getStore: () => Arch describe('getLogsByTags', () => { const txsPerBlock = 4; const numPrivateFunctionCalls = 3; - const numNoteEncryptedLogs = 2; + const numPublicFunctionCalls = 1; + const numEncryptedLogsPerFn = 2; + const numUnencryptedLogsPerFn = 1; const numBlocks = 10; let blocks: L1Published[]; - let tags: { [i: number]: { [j: number]: Buffer[] } } = {}; + let encryptedLogTags: { [i: number]: { [j: number]: Buffer[] } } = {}; + let unencryptedLogTags: { [i: number]: { [j: number]: Buffer[] } } = {}; beforeEach(async () => { blocks = times(numBlocks, (index: number) => ({ - data: L2Block.random(index + 1, txsPerBlock, numPrivateFunctionCalls, 2, numNoteEncryptedLogs, 2), + data: L2Block.random( + index + 1, + txsPerBlock, + numPrivateFunctionCalls, + numPublicFunctionCalls, + numEncryptedLogsPerFn, + numUnencryptedLogsPerFn, + ), l1: { blockNumber: BigInt(index), blockHash: `0x${index}`, timestamp: BigInt(index) }, })); // Last block has the note encrypted log tags of the first tx copied from the previous block @@ -373,47 +383,94 @@ export function describeArchiverDataStore(testName: string, getStore: () => Arch await store.addBlocks(blocks); await store.addLogs(blocks.map(b => b.data)); - tags = {}; + encryptedLogTags = {}; + unencryptedLogTags = {}; blocks.forEach((b, blockIndex) => { - if (!tags[blockIndex]) { - tags[blockIndex] = {}; + if (!encryptedLogTags[blockIndex]) { + encryptedLogTags[blockIndex] = {}; + } + if (!unencryptedLogTags[blockIndex]) { + unencryptedLogTags[blockIndex] = {}; } b.data.body.noteEncryptedLogs.txLogs.forEach((txLogs, txIndex) => { - if (!tags[blockIndex][txIndex]) { - tags[blockIndex][txIndex] = []; + if (!encryptedLogTags[blockIndex][txIndex]) { + encryptedLogTags[blockIndex][txIndex] = []; + } + encryptedLogTags[blockIndex][txIndex].push(...txLogs.unrollLogs().map(log => log.data.subarray(0, 32))); + }); + b.data.body.unencryptedLogs.txLogs.forEach((txLogs, txIndex) => { + if (!unencryptedLogTags[blockIndex][txIndex]) { + unencryptedLogTags[blockIndex][txIndex] = []; } - tags[blockIndex][txIndex].push(...txLogs.unrollLogs().map(log => log.data.subarray(0, 32))); + unencryptedLogTags[blockIndex][txIndex].push(...txLogs.unrollLogs().map(log => log.data.subarray(0, 32))); }); }); }); - it('is possible to batch request all logs of a tx via tags', async () => { + it('is possible to batch request encrypted logs of a tx via tags', async () => { // get random tx from any block that's not the last one const targetBlockIndex = randomInt(numBlocks - 2); const targetTxIndex = randomInt(txsPerBlock); const logsByTags = await store.getLogsByTags( - tags[targetBlockIndex][targetTxIndex].map(buffer => new Fr(buffer)), + encryptedLogTags[targetBlockIndex][targetTxIndex].map(buffer => new Fr(buffer)), ); - const expectedResponseSize = numPrivateFunctionCalls * numNoteEncryptedLogs; + const expectedResponseSize = numPrivateFunctionCalls * numEncryptedLogsPerFn; expect(logsByTags.length).toEqual(expectedResponseSize); logsByTags.forEach((logsByTag, logIndex) => { expect(logsByTag).toHaveLength(1); const [scopedLog] = logsByTag; expect(scopedLog.txHash).toEqual(blocks[targetBlockIndex].data.body.txEffects[targetTxIndex].txHash); - expect(scopedLog.log).toEqual( - blocks[targetBlockIndex].data.body.noteEncryptedLogs.txLogs[targetTxIndex].unrollLogs()[logIndex], + expect(scopedLog.logData).toEqual( + blocks[targetBlockIndex].data.body.noteEncryptedLogs.txLogs[targetTxIndex].unrollLogs()[logIndex].data, ); }); }); - it('is possible to batch request all logs of different blocks via tags', async () => { + // TODO: Allow this test when #9835 is fixed and tags can be correctly decoded + it.skip('is possible to batch request all logs (encrypted and unencrypted) of a tx via tags', async () => { + // get random tx from any block that's not the last one + const targetBlockIndex = randomInt(numBlocks - 2); + const targetTxIndex = randomInt(txsPerBlock); + + const logsByTags = await store.getLogsByTags( + encryptedLogTags[targetBlockIndex][targetTxIndex] + .concat(unencryptedLogTags[targetBlockIndex][targetTxIndex]) + .map(buffer => new Fr(buffer)), + ); + + const expectedResponseSize = + numPrivateFunctionCalls * numEncryptedLogsPerFn + numPublicFunctionCalls * numUnencryptedLogsPerFn; + expect(logsByTags.length).toEqual(expectedResponseSize); + + const encryptedLogsByTags = logsByTags.slice(0, numPrivateFunctionCalls * numEncryptedLogsPerFn); + const unencryptedLogsByTags = logsByTags.slice(numPrivateFunctionCalls * numEncryptedLogsPerFn); + encryptedLogsByTags.forEach((logsByTag, logIndex) => { + expect(logsByTag).toHaveLength(1); + const [scopedLog] = logsByTag; + expect(scopedLog.txHash).toEqual(blocks[targetBlockIndex].data.body.txEffects[targetTxIndex].txHash); + expect(scopedLog.logData).toEqual( + blocks[targetBlockIndex].data.body.noteEncryptedLogs.txLogs[targetTxIndex].unrollLogs()[logIndex].data, + ); + }); + unencryptedLogsByTags.forEach((logsByTag, logIndex) => { + expect(logsByTag).toHaveLength(1); + const [scopedLog] = logsByTag; + expect(scopedLog.logData).toEqual( + blocks[targetBlockIndex].data.body.unencryptedLogs.txLogs[targetTxIndex].unrollLogs()[logIndex].data, + ); + }); + }); + + it('is possible to batch request logs of different blocks via tags', async () => { // get first tx of first block and second tx of second block - const logsByTags = await store.getLogsByTags([...tags[0][0], ...tags[1][1]].map(buffer => new Fr(buffer))); + const logsByTags = await store.getLogsByTags( + [...encryptedLogTags[0][0], ...encryptedLogTags[1][1]].map(buffer => new Fr(buffer)), + ); - const expectedResponseSize = 2 * numPrivateFunctionCalls * numNoteEncryptedLogs; + const expectedResponseSize = 2 * numPrivateFunctionCalls * numEncryptedLogsPerFn; expect(logsByTags.length).toEqual(expectedResponseSize); logsByTags.forEach(logsByTag => expect(logsByTag).toHaveLength(1)); @@ -421,14 +478,14 @@ export function describeArchiverDataStore(testName: string, getStore: () => Arch it('is possible to batch request logs that have the same tag but different content', async () => { // get first tx of last block - const logsByTags = await store.getLogsByTags(tags[numBlocks - 1][0].map(buffer => new Fr(buffer))); + const logsByTags = await store.getLogsByTags(encryptedLogTags[numBlocks - 1][0].map(buffer => new Fr(buffer))); - const expectedResponseSize = numPrivateFunctionCalls * numNoteEncryptedLogs; + const expectedResponseSize = numPrivateFunctionCalls * numEncryptedLogsPerFn; expect(logsByTags.length).toEqual(expectedResponseSize); logsByTags.forEach(logsByTag => { expect(logsByTag).toHaveLength(2); - const [tag0, tag1] = logsByTag.map(scopedLog => new Fr(scopedLog.log.data.subarray(0, 32))); + const [tag0, tag1] = logsByTag.map(scopedLog => new Fr(scopedLog.logData.subarray(0, 32))); expect(tag0).toEqual(tag1); }); }); @@ -440,10 +497,10 @@ export function describeArchiverDataStore(testName: string, getStore: () => Arch const logsByTags = await store.getLogsByTags([ Fr.random(), - ...tags[targetBlockIndex][targetTxIndex].slice(1).map(buffer => new Fr(buffer)), + ...encryptedLogTags[targetBlockIndex][targetTxIndex].slice(1).map(buffer => new Fr(buffer)), ]); - const expectedResponseSize = numPrivateFunctionCalls * numNoteEncryptedLogs; + const expectedResponseSize = numPrivateFunctionCalls * numEncryptedLogsPerFn; expect(logsByTags.length).toEqual(expectedResponseSize); const [emptyLogsByTag, ...populatedLogsByTags] = logsByTags; @@ -453,8 +510,8 @@ export function describeArchiverDataStore(testName: string, getStore: () => Arch expect(logsByTag).toHaveLength(1); const [scopedLog] = logsByTag; expect(scopedLog.txHash).toEqual(blocks[targetBlockIndex].data.body.txEffects[targetTxIndex].txHash); - expect(scopedLog.log).toEqual( - blocks[targetBlockIndex].data.body.noteEncryptedLogs.txLogs[targetTxIndex].unrollLogs()[logIndex + 1], + expect(scopedLog.logData).toEqual( + blocks[targetBlockIndex].data.body.noteEncryptedLogs.txLogs[targetTxIndex].unrollLogs()[logIndex + 1].data, ); }); }); diff --git a/yarn-project/archiver/src/archiver/kv_archiver_store/kv_archiver_store.ts b/yarn-project/archiver/src/archiver/kv_archiver_store/kv_archiver_store.ts index 783baa404b4..8cbb627a5ce 100644 --- a/yarn-project/archiver/src/archiver/kv_archiver_store/kv_archiver_store.ts +++ b/yarn-project/archiver/src/archiver/kv_archiver_store/kv_archiver_store.ts @@ -9,7 +9,7 @@ import { type TxEffect, type TxHash, type TxReceipt, - type TxScopedEncryptedL2NoteLog, + type TxScopedL2Log, } from '@aztec/circuit-types'; import { type ContractClassPublic, @@ -245,7 +245,7 @@ export class KVArchiverDataStore implements ArchiverDataStore { * @returns For each received tag, an array of matching logs is returned. An empty array implies no logs match * that tag. */ - getLogsByTags(tags: Fr[]): Promise { + getLogsByTags(tags: Fr[]): Promise { try { return this.#logStore.getLogsByTags(tags); } catch (err) { diff --git a/yarn-project/archiver/src/archiver/kv_archiver_store/log_store.ts b/yarn-project/archiver/src/archiver/kv_archiver_store/log_store.ts index c33f0c70b28..f6c0abbc327 100644 --- a/yarn-project/archiver/src/archiver/kv_archiver_store/log_store.ts +++ b/yarn-project/archiver/src/archiver/kv_archiver_store/log_store.ts @@ -11,7 +11,7 @@ import { type LogFilter, LogId, LogType, - TxScopedEncryptedL2NoteLog, + TxScopedL2Log, UnencryptedL2BlockL2Logs, type UnencryptedL2Log, } from '@aztec/circuit-types'; @@ -84,7 +84,15 @@ export class LogStore { } this.#log.verbose(`Found tagged (${logType}) log with tag ${tag.toString()} in block ${block.number}`); const currentLogs = taggedLogs.get(tag.toString()) ?? []; - currentLogs.push(new TxScopedEncryptedL2NoteLog(txHash, dataStartIndexForTx, log).toBuffer()); + currentLogs.push( + new TxScopedL2Log( + txHash, + dataStartIndexForTx, + block.number, + logType === 'unencryptedLogs', + log.data, + ).toBuffer(), + ); taggedLogs.set(tag.toString(), currentLogs); } catch (err) { this.#log.warn(`Failed to add tagged log to store: ${err}`); @@ -103,8 +111,7 @@ export class LogStore { const taggedLogsToAdd = blocks .flatMap(block => [ this.#extractTaggedLogs(block, 'noteEncryptedLogs'), - // TODO: process unencrypted logs in #9794 - // this.#extractTaggedLogs(block, 'unencryptedLogs'), + this.#extractTaggedLogs(block, 'unencryptedLogs'), ]) .reduce((acc, val) => { for (const [tag, logs] of val.entries()) { @@ -209,14 +216,11 @@ export class LogStore { * @returns For each received tag, an array of matching logs is returned. An empty array implies no logs match * that tag. */ - getLogsByTags(tags: Fr[]): Promise { + getLogsByTags(tags: Fr[]): Promise { return this.db.transaction(() => tags .map(tag => this.#logsByTag.get(tag.toString())) - .map( - noteLogBuffers => - noteLogBuffers?.map(noteLogBuffer => TxScopedEncryptedL2NoteLog.fromBuffer(noteLogBuffer)) ?? [], - ), + .map(noteLogBuffers => noteLogBuffers?.map(noteLogBuffer => TxScopedL2Log.fromBuffer(noteLogBuffer)) ?? []), ); } diff --git a/yarn-project/archiver/src/archiver/memory_archiver_store/memory_archiver_store.ts b/yarn-project/archiver/src/archiver/memory_archiver_store/memory_archiver_store.ts index ccf9f0cc33b..e49ab8eccc2 100644 --- a/yarn-project/archiver/src/archiver/memory_archiver_store/memory_archiver_store.ts +++ b/yarn-project/archiver/src/archiver/memory_archiver_store/memory_archiver_store.ts @@ -1,4 +1,5 @@ import { + type Body, type ContractClass2BlockL2Logs, type EncryptedL2BlockL2Logs, type EncryptedNoteL2BlockL2Logs, @@ -14,7 +15,7 @@ import { type TxEffect, type TxHash, TxReceipt, - TxScopedEncryptedL2NoteLog, + TxScopedL2Log, type UnencryptedL2BlockL2Logs, } from '@aztec/circuit-types'; import { @@ -53,9 +54,9 @@ export class MemoryArchiverStore implements ArchiverDataStore { private noteEncryptedLogsPerBlock: Map = new Map(); - private taggedNoteEncryptedLogs: Map = new Map(); + private taggedLogs: Map = new Map(); - private noteEncryptedLogTagsPerBlock: Map = new Map(); + private logTagsPerBlock: Map = new Map(); private encryptedLogsPerBlock: Map = new Map(); @@ -210,6 +211,56 @@ export class MemoryArchiverStore implements ArchiverDataStore { return Promise.resolve(true); } + #storeTaggedLogs(block: L2Block, logType: keyof Pick): void { + const dataStartIndexForBlock = + block.header.state.partial.noteHashTree.nextAvailableLeafIndex - + block.body.numberOfTxsIncludingPadded * MAX_NOTE_HASHES_PER_TX; + block.body[logType].txLogs.forEach((txLogs, txIndex) => { + const txHash = block.body.txEffects[txIndex].txHash; + const dataStartIndexForTx = dataStartIndexForBlock + txIndex * MAX_NOTE_HASHES_PER_TX; + const logs = txLogs.unrollLogs(); + logs.forEach(log => { + if ( + (logType == 'noteEncryptedLogs' && log.data.length < 32) || + // TODO remove when #9835 and #9836 are fixed + (logType === 'unencryptedLogs' && log.data.length < 32 * 33) + ) { + this.#log.warn(`Skipping log (${logType}) with invalid data length: ${log.data.length}`); + return; + } + try { + let tag = Fr.ZERO; + // TODO remove when #9835 and #9836 are fixed. The partial note logs are emitted as bytes, but encoded as Fields. + // This means that for every 32 bytes of payload, we only have 1 byte of data. + // Also, the tag is not stored in the first 32 bytes of the log, (that's the length of public fields now) but in the next 32. + if (logType === 'unencryptedLogs') { + const correctedBuffer = Buffer.alloc(32); + const initialOffset = 32; + for (let i = 0; i < 32; i++) { + const byte = Fr.fromBuffer( + log.data.subarray(i * 32 + initialOffset, i * 32 + 32 + initialOffset), + ).toNumber(); + correctedBuffer.writeUInt8(byte, i); + } + tag = new Fr(correctedBuffer); + } else { + tag = new Fr(log.data.subarray(0, 32)); + } + this.#log.verbose(`Storing tagged (${logType}) log with tag ${tag.toString()} in block ${block.number}`); + const currentLogs = this.taggedLogs.get(tag.toString()) || []; + this.taggedLogs.set(tag.toString(), [ + ...currentLogs, + new TxScopedL2Log(txHash, dataStartIndexForTx, block.number, logType === 'unencryptedLogs', log.data), + ]); + const currentTagsInBlock = this.logTagsPerBlock.get(block.number) || []; + this.logTagsPerBlock.set(block.number, [...currentTagsInBlock, tag]); + } catch (err) { + this.#log.warn(`Failed to add tagged log to store: ${err}`); + } + }); + }); + } + /** * Append new logs to the store's list. * @param block - The block for which to add the logs. @@ -217,33 +268,9 @@ export class MemoryArchiverStore implements ArchiverDataStore { */ addLogs(blocks: L2Block[]): Promise { blocks.forEach(block => { - const dataStartIndexForBlock = - block.header.state.partial.noteHashTree.nextAvailableLeafIndex - - block.body.numberOfTxsIncludingPadded * MAX_NOTE_HASHES_PER_TX; + void this.#storeTaggedLogs(block, 'noteEncryptedLogs'); + void this.#storeTaggedLogs(block, 'unencryptedLogs'); this.noteEncryptedLogsPerBlock.set(block.number, block.body.noteEncryptedLogs); - block.body.noteEncryptedLogs.txLogs.forEach((txLogs, txIndex) => { - const txHash = block.body.txEffects[txIndex].txHash; - const dataStartIndexForTx = dataStartIndexForBlock + txIndex * MAX_NOTE_HASHES_PER_TX; - const noteLogs = txLogs.unrollLogs(); - noteLogs.forEach(noteLog => { - if (noteLog.data.length < 32) { - this.#log.warn(`Skipping note log with invalid data length: ${noteLog.data.length}`); - return; - } - try { - const tag = new Fr(noteLog.data.subarray(0, 32)); - const currentNoteLogs = this.taggedNoteEncryptedLogs.get(tag.toString()) || []; - this.taggedNoteEncryptedLogs.set(tag.toString(), [ - ...currentNoteLogs, - new TxScopedEncryptedL2NoteLog(txHash, dataStartIndexForTx, noteLog), - ]); - const currentTagsInBlock = this.noteEncryptedLogTagsPerBlock.get(block.number) || []; - this.noteEncryptedLogTagsPerBlock.set(block.number, [...currentTagsInBlock, tag]); - } catch (err) { - this.#log.warn(`Failed to add tagged note log to store: ${err}`); - } - }); - }); this.encryptedLogsPerBlock.set(block.number, block.body.encryptedLogs); this.unencryptedLogsPerBlock.set(block.number, block.body.unencryptedLogs); this.contractClassLogsPerBlock.set(block.number, block.body.contractClassLogs); @@ -252,19 +279,19 @@ export class MemoryArchiverStore implements ArchiverDataStore { } deleteLogs(blocks: L2Block[]): Promise { - const noteTagsToDelete = blocks.flatMap(block => this.noteEncryptedLogTagsPerBlock.get(block.number)); - noteTagsToDelete + const tagsToDelete = blocks.flatMap(block => this.logTagsPerBlock.get(block.number)); + tagsToDelete .filter(tag => tag != undefined) .forEach(tag => { - this.taggedNoteEncryptedLogs.delete(tag!.toString()); + this.taggedLogs.delete(tag!.toString()); }); blocks.forEach(block => { this.encryptedLogsPerBlock.delete(block.number); this.noteEncryptedLogsPerBlock.delete(block.number); this.unencryptedLogsPerBlock.delete(block.number); + this.logTagsPerBlock.delete(block.number); this.contractClassLogsPerBlock.delete(block.number); - this.noteEncryptedLogTagsPerBlock.delete(block.number); }); return Promise.resolve(true); @@ -433,8 +460,8 @@ export class MemoryArchiverStore implements ArchiverDataStore { * @returns For each received tag, an array of matching logs is returned. An empty array implies no logs match * that tag. */ - getLogsByTags(tags: Fr[]): Promise { - const noteLogs = tags.map(tag => this.taggedNoteEncryptedLogs.get(tag.toString()) || []); + getLogsByTags(tags: Fr[]): Promise { + const noteLogs = tags.map(tag => this.taggedLogs.get(tag.toString()) || []); return Promise.resolve(noteLogs); } diff --git a/yarn-project/aztec-node/src/aztec-node/server.ts b/yarn-project/aztec-node/src/aztec-node/server.ts index 9a793fd3356..eec371b96fb 100644 --- a/yarn-project/aztec-node/src/aztec-node/server.ts +++ b/yarn-project/aztec-node/src/aztec-node/server.ts @@ -27,7 +27,7 @@ import { type TxEffect, type TxHash, TxReceipt, - type TxScopedEncryptedL2NoteLog, + type TxScopedL2Log, TxStatus, type TxValidator, type WorldStateSynchronizer, @@ -315,7 +315,7 @@ export class AztecNodeService implements AztecNode { * @returns For each received tag, an array of matching logs is returned. An empty array implies no logs match * that tag. */ - public getLogsByTags(tags: Fr[]): Promise { + public getLogsByTags(tags: Fr[]): Promise { return this.encryptedLogsSource.getLogsByTags(tags); } @@ -411,19 +411,19 @@ export class AztecNodeService implements AztecNode { } /** - * Find the index of the given leaf in the given tree. - * @param blockNumber - The block number at which to get the data + * Find the indexes of the given leaves in the given tree. + * @param blockNumber - The block number at which to get the data or 'latest' for latest data * @param treeId - The tree to search in. - * @param leafValue - The value to search for - * @returns The index of the given leaf in the given tree or undefined if not found. + * @param leafValue - The values to search for + * @returns The indexes of the given leaves in the given tree or undefined if not found. */ - public async findLeafIndex( + public async findLeavesIndexes( blockNumber: L2BlockNumber, treeId: MerkleTreeId, - leafValue: Fr, - ): Promise { + leafValues: Fr[], + ): Promise<(bigint | undefined)[]> { const committedDb = await this.#getWorldState(blockNumber); - return committedDb.findLeafIndex(treeId, leafValue.toBuffer()); + return await Promise.all(leafValues.map(leafValue => committedDb.findLeafIndex(treeId, leafValue.toBuffer()))); } /** diff --git a/yarn-project/aztec.js/src/account_manager/deploy_account_sent_tx.ts b/yarn-project/aztec.js/src/account_manager/deploy_account_sent_tx.ts index fe96e478c38..3ffdfe4ebaa 100644 --- a/yarn-project/aztec.js/src/account_manager/deploy_account_sent_tx.ts +++ b/yarn-project/aztec.js/src/account_manager/deploy_account_sent_tx.ts @@ -3,7 +3,6 @@ import { type FieldsOf } from '@aztec/foundation/types'; import { type Wallet } from '../account/index.js'; import { DefaultWaitOpts, SentTx, type WaitOpts } from '../contract/sent_tx.js'; -import { waitForAccountSynch } from '../utils/account.js'; /** Extends a transaction receipt with a wallet instance for the newly deployed contract. */ export type DeployAccountTxReceipt = FieldsOf & { @@ -37,7 +36,6 @@ export class DeployAccountSentTx extends SentTx { public override async wait(opts: WaitOpts = DefaultWaitOpts): Promise { const receipt = await super.wait(opts); const wallet = await this.getWalletPromise; - await waitForAccountSynch(this.pxe, wallet.getCompleteAddress(), opts); return { ...receipt, wallet }; } } diff --git a/yarn-project/aztec.js/src/account_manager/index.ts b/yarn-project/aztec.js/src/account_manager/index.ts index 4ea49a57103..e4e3316a6db 100644 --- a/yarn-project/aztec.js/src/account_manager/index.ts +++ b/yarn-project/aztec.js/src/account_manager/index.ts @@ -8,7 +8,6 @@ import { type AccountInterface } from '../account/interface.js'; import { type DeployOptions } from '../contract/deploy_method.js'; import { DefaultWaitOpts, type WaitOpts } from '../contract/sent_tx.js'; import { DefaultMultiCallEntrypoint } from '../entrypoint/default_multi_call_entrypoint.js'; -import { waitForAccountSynch } from '../utils/account.js'; import { AccountWalletWithSecretKey, SignerlessWallet } from '../wallet/index.js'; import { DeployAccountMethod } from './deploy_account_method.js'; import { DeployAccountSentTx } from './deploy_account_sent_tx.js'; @@ -105,7 +104,7 @@ export class AccountManager { * @param opts - Options to wait for the account to be synched. * @returns A Wallet instance. */ - public async register(opts: WaitOpts = DefaultWaitOpts): Promise { + public async register(): Promise { await this.pxe.registerContract({ artifact: this.accountContract.getContractArtifact(), instance: this.getInstance(), @@ -113,7 +112,6 @@ export class AccountManager { await this.pxe.registerAccount(this.secretKey, this.getCompleteAddress().partialAddress); - await waitForAccountSynch(this.pxe, this.getCompleteAddress(), opts); return this.getWallet(); } diff --git a/yarn-project/aztec.js/src/contract/sent_tx.test.ts b/yarn-project/aztec.js/src/contract/sent_tx.test.ts index d7125dd4755..13a2def98cd 100644 --- a/yarn-project/aztec.js/src/contract/sent_tx.test.ts +++ b/yarn-project/aztec.js/src/contract/sent_tx.test.ts @@ -23,34 +23,27 @@ describe('SentTx', () => { pxe.getTxReceipt.mockResolvedValue(txReceipt); }); - it('waits for all notes accounts to be synced', async () => { - pxe.getSyncStatus - .mockResolvedValueOnce({ blocks: 25, notes: { '0x1': 19, '0x2': 20 } }) - .mockResolvedValueOnce({ blocks: 25, notes: { '0x1': 20, '0x2': 20 } }); + it('waits for all notes of the accounts to be available', async () => { + pxe.getSyncStatus.mockResolvedValueOnce({ blocks: 25 }).mockResolvedValueOnce({ blocks: 25 }); const actual = await sentTx.wait({ timeout: 1, interval: 0.4 }); expect(actual).toEqual(txReceipt); }); - it('fails if an account is not synced', async () => { - pxe.getSyncStatus.mockResolvedValue({ blocks: 25, notes: { '0x1': 19, '0x2': 20 } }); - await expect(sentTx.wait({ timeout: 1, interval: 0.4 })).rejects.toThrow(/timeout/i); - }); - it('does not wait for notes sync', async () => { - pxe.getSyncStatus.mockResolvedValue({ blocks: 19, notes: { '0x1': 19, '0x2': 19 } }); - const actual = await sentTx.wait({ timeout: 1, interval: 0.4, waitForNotesSync: false }); + pxe.getSyncStatus.mockResolvedValue({ blocks: 19 }); + const actual = await sentTx.wait({ timeout: 1, interval: 0.4, waitForNotesAvailable: false }); expect(actual).toEqual(txReceipt); }); it('throws if tx is dropped', async () => { pxe.getTxReceipt.mockResolvedValue({ ...txReceipt, status: TxStatus.DROPPED } as TxReceipt); - pxe.getSyncStatus.mockResolvedValue({ blocks: 19, notes: { '0x1': 19, '0x2': 19 } }); + pxe.getSyncStatus.mockResolvedValue({ blocks: 19 }); await expect(sentTx.wait({ timeout: 1, interval: 0.4 })).rejects.toThrow(/dropped/); }); it('waits for the tx to be proven', async () => { - const waitOpts = { timeout: 1, interval: 0.4, waitForNotesSync: false, proven: true, provenTimeout: 2 }; + const waitOpts = { timeout: 1, interval: 0.4, waitForNotesAvailable: false, proven: true, provenTimeout: 2 }; pxe.getProvenBlockNumber.mockResolvedValue(10); await expect(sentTx.wait(waitOpts)).rejects.toThrow(/timeout/i); diff --git a/yarn-project/aztec.js/src/contract/sent_tx.ts b/yarn-project/aztec.js/src/contract/sent_tx.ts index 91b0f5ea2c0..7b85b3fd853 100644 --- a/yarn-project/aztec.js/src/contract/sent_tx.ts +++ b/yarn-project/aztec.js/src/contract/sent_tx.ts @@ -1,11 +1,4 @@ -import { - type ExtendedNote, - type GetUnencryptedLogsResponse, - type PXE, - type TxHash, - type TxReceipt, - TxStatus, -} from '@aztec/circuit-types'; +import { type GetUnencryptedLogsResponse, type PXE, type TxHash, type TxReceipt, TxStatus } from '@aztec/circuit-types'; import { retryUntil } from '@aztec/foundation/retry'; import { type FieldsOf } from '@aztec/foundation/types'; @@ -20,10 +13,10 @@ export type WaitOpts = { /** Whether to wait for the tx to be proven. */ proven?: boolean; /** - * Whether to wait for the PXE Service to sync all notes up to the block in which this tx was mined. + * Whether to wait for the node to notify that the block in which this tx was mined is available to fetch notes from. * If false, then any queries that depend on state set by this transaction may return stale data. Defaults to true. **/ - waitForNotesSync?: boolean; + waitForNotesAvailable?: boolean; /** Whether to include information useful for debugging/testing in the receipt. */ debug?: boolean; /** Whether to accept a revert as a status code for the tx when waiting for it. If false, will throw if the tx reverts. */ @@ -34,8 +27,8 @@ export const DefaultWaitOpts: WaitOpts = { timeout: 60, provenTimeout: 600, interval: 1, - waitForNotesSync: true, debug: false, + waitForNotesAvailable: true, }; /** @@ -74,9 +67,6 @@ export class SentTx { * @returns The transaction receipt. */ public async wait(opts?: WaitOpts): Promise> { - if (opts?.debug && opts.waitForNotesSync === false) { - throw new Error('Cannot set debug to true if waitForNotesSync is false'); - } const receipt = await this.waitForReceipt(opts); if (receipt.status !== TxStatus.SUCCESS && !opts?.dontThrowOnRevert) { throw new Error( @@ -89,15 +79,11 @@ export class SentTx { if (opts?.debug) { const txHash = await this.getTxHash(); const tx = (await this.pxe.getTxEffect(txHash))!; - const visibleIncomingNotes = await this.pxe.getIncomingNotes({ txHash }); - const visibleOutgoingNotes = await this.pxe.getOutgoingNotes({ txHash }); receipt.debugInfo = { noteHashes: tx.noteHashes, nullifiers: tx.nullifiers, publicDataWrites: tx.publicDataWrites, l2ToL1Msgs: tx.l2ToL1Msgs, - visibleIncomingNotes, - visibleOutgoingNotes, }; } return receipt; @@ -113,16 +99,6 @@ export class SentTx { return this.pxe.getUnencryptedLogs({ txHash: await this.getTxHash() }); } - /** - * Get notes of accounts registered in the provided PXE/Wallet created in this tx. - * @remarks This function will wait for the tx to be mined if it hasn't been already. - * @returns The requested notes. - */ - public async getVisibleNotes(): Promise { - await this.wait(); - return this.pxe.getIncomingNotes({ txHash: await this.getTxHash() }); - } - protected async waitForReceipt(opts?: WaitOpts): Promise { const txHash = await this.getTxHash(); return await retryUntil( @@ -137,15 +113,15 @@ export class SentTx { return txReceipt; } // If we don't care about waiting for notes to be synced, return the receipt - const waitForNotesSync = opts?.waitForNotesSync ?? DefaultWaitOpts.waitForNotesSync; - if (!waitForNotesSync) { + const waitForNotesAvailable = opts?.waitForNotesAvailable ?? DefaultWaitOpts.waitForNotesAvailable; + if (!waitForNotesAvailable) { return txReceipt; } // Check if all sync blocks on the PXE Service are greater or equal than the block in which the tx was mined - const { blocks, notes } = await this.pxe.getSyncStatus(); + const { blocks } = await this.pxe.getSyncStatus(); const targetBlock = txReceipt.blockNumber!; - const areNotesSynced = blocks >= targetBlock && Object.values(notes).every(block => block >= targetBlock); - return areNotesSynced ? txReceipt : undefined; + const areNotesAvailable = blocks >= targetBlock; + return areNotesAvailable ? txReceipt : undefined; }, 'isMined', opts?.timeout ?? DefaultWaitOpts.timeout, diff --git a/yarn-project/aztec.js/src/index.ts b/yarn-project/aztec.js/src/index.ts index 825126491fd..5adffcba01f 100644 --- a/yarn-project/aztec.js/src/index.ts +++ b/yarn-project/aztec.js/src/index.ts @@ -53,7 +53,6 @@ export { generateClaimSecret, generatePublicKey, readFieldCompressedString, - waitForAccountSynch, waitForPXE, type AztecAddressLike, type EthAddressLike, diff --git a/yarn-project/aztec.js/src/utils/account.ts b/yarn-project/aztec.js/src/utils/account.ts deleted file mode 100644 index 5f11c192ede..00000000000 --- a/yarn-project/aztec.js/src/utils/account.ts +++ /dev/null @@ -1,33 +0,0 @@ -import { type PXE } from '@aztec/circuit-types'; -import { type CompleteAddress } from '@aztec/circuits.js'; -import { retryUntil } from '@aztec/foundation/retry'; - -import { DefaultWaitOpts, type WaitOpts } from '../contract/sent_tx.js'; - -/** - * Waits for the account to finish synchronizing with the PXE Service. - * @param pxe - PXE instance - * @param address - Address to wait for synch - * @param opts - Wait options - */ -export async function waitForAccountSynch( - pxe: PXE, - address: CompleteAddress, - { interval, timeout }: WaitOpts = DefaultWaitOpts, -): Promise { - const accountAddress = address.address.toString(); - await retryUntil( - async () => { - const status = await pxe.getSyncStatus(); - const accountSynchedToBlock = status.notes[accountAddress]; - if (typeof accountSynchedToBlock === 'undefined') { - return false; - } else { - return accountSynchedToBlock >= status.blocks; - } - }, - 'waitForAccountSynch', - timeout, - interval, - ); -} diff --git a/yarn-project/aztec.js/src/utils/index.ts b/yarn-project/aztec.js/src/utils/index.ts index 4d9c7dc3969..7a980b6ca68 100644 --- a/yarn-project/aztec.js/src/utils/index.ts +++ b/yarn-project/aztec.js/src/utils/index.ts @@ -4,7 +4,6 @@ export * from './abi_types.js'; export * from './cheat_codes.js'; export * from './authwit.js'; export * from './pxe.js'; -export * from './account.js'; export * from './anvil_test_watcher.js'; export * from './field_compressed_string.js'; export * from './portal_manager.js'; diff --git a/yarn-project/aztec.js/src/wallet/base_wallet.ts b/yarn-project/aztec.js/src/wallet/base_wallet.ts index 027e646f213..00cc9127a38 100644 --- a/yarn-project/aztec.js/src/wallet/base_wallet.ts +++ b/yarn-project/aztec.js/src/wallet/base_wallet.ts @@ -21,7 +21,6 @@ import { type TxSimulationResult, type UniqueNote, } from '@aztec/circuit-types'; -import { type NoteProcessorStats } from '@aztec/circuit-types/stats'; import { type AztecAddress, type CompleteAddress, @@ -173,15 +172,9 @@ export abstract class BaseWallet implements Wallet { isGlobalStateSynchronized() { return this.pxe.isGlobalStateSynchronized(); } - isAccountStateSynchronized(account: AztecAddress) { - return this.pxe.isAccountStateSynchronized(account); - } getSyncStatus(): Promise { return this.pxe.getSyncStatus(); } - getSyncStats(): Promise<{ [key: string]: NoteProcessorStats }> { - return this.pxe.getSyncStats(); - } addAuthWitness(authWitness: AuthWitness) { return this.pxe.addAuthWitness(authWitness); } diff --git a/yarn-project/bot/src/bot.ts b/yarn-project/bot/src/bot.ts index 08cf67bbb38..8f3f2d65942 100644 --- a/yarn-project/bot/src/bot.ts +++ b/yarn-project/bot/src/bot.ts @@ -72,10 +72,8 @@ export class Bot { const opts = this.getSendMethodOpts(); const batch = new BatchCall(wallet, calls); - this.log.verbose(`Creating batch execution request with ${calls.length} calls`, logCtx); - await batch.create(opts); - this.log.verbose(`Simulating transaction`, logCtx); + this.log.verbose(`Simulating transaction with ${calls.length}`, logCtx); await batch.simulate(); this.log.verbose(`Proving transaction`, logCtx); diff --git a/yarn-project/circuit-types/src/interfaces/archiver.test.ts b/yarn-project/circuit-types/src/interfaces/archiver.test.ts index 90c337c4158..b6b411c3177 100644 --- a/yarn-project/circuit-types/src/interfaces/archiver.test.ts +++ b/yarn-project/circuit-types/src/interfaces/archiver.test.ts @@ -23,7 +23,7 @@ import { resolve } from 'path'; import { L2Block } from '../l2_block.js'; import { type L2Tips } from '../l2_block_source.js'; import { ExtendedUnencryptedL2Log } from '../logs/extended_unencrypted_l2_log.js'; -import { type GetUnencryptedLogsResponse, TxScopedEncryptedL2NoteLog } from '../logs/get_logs_response.js'; +import { type GetUnencryptedLogsResponse, TxScopedL2Log } from '../logs/get_logs_response.js'; import { EncryptedL2BlockL2Logs, EncryptedNoteL2BlockL2Logs, @@ -160,7 +160,7 @@ describe('ArchiverApiSchema', () => { it('getLogsByTags', async () => { const result = await context.client.getLogsByTags([Fr.random()]); - expect(result).toEqual([[expect.any(TxScopedEncryptedL2NoteLog)]]); + expect(result).toEqual([[expect.any(TxScopedL2Log)]]); }); it('getUnencryptedLogs', async () => { @@ -306,9 +306,9 @@ class MockArchiver implements ArchiverApi { throw new Error(`Unexpected log type: ${logType}`); } } - getLogsByTags(tags: Fr[]): Promise { + getLogsByTags(tags: Fr[]): Promise { expect(tags[0]).toBeInstanceOf(Fr); - return Promise.resolve([Array.from({ length: tags.length }, () => TxScopedEncryptedL2NoteLog.random())]); + return Promise.resolve([Array.from({ length: tags.length }, () => TxScopedL2Log.random())]); } getUnencryptedLogs(filter: LogFilter): Promise { expect(filter.txHash).toBeInstanceOf(TxHash); diff --git a/yarn-project/circuit-types/src/interfaces/archiver.ts b/yarn-project/circuit-types/src/interfaces/archiver.ts index 97818e1b079..0dc118875aa 100644 --- a/yarn-project/circuit-types/src/interfaces/archiver.ts +++ b/yarn-project/circuit-types/src/interfaces/archiver.ts @@ -12,7 +12,7 @@ import { z } from 'zod'; import { L2Block } from '../l2_block.js'; import { type L2BlockSource, L2TipsSchema } from '../l2_block_source.js'; -import { GetUnencryptedLogsResponseSchema, TxScopedEncryptedL2NoteLog } from '../logs/get_logs_response.js'; +import { GetUnencryptedLogsResponseSchema, TxScopedL2Log } from '../logs/get_logs_response.js'; import { L2BlockL2Logs } from '../logs/l2_block_l2_logs.js'; import { type L2LogsSource } from '../logs/l2_logs_source.js'; import { LogFilterSchema } from '../logs/log_filter.js'; @@ -56,7 +56,7 @@ export const ArchiverApiSchema: ApiSchemaFor = { getLogsByTags: z .function() .args(z.array(schemas.Fr)) - .returns(z.array(z.array(TxScopedEncryptedL2NoteLog.schema))), + .returns(z.array(z.array(TxScopedL2Log.schema))), getUnencryptedLogs: z.function().args(LogFilterSchema).returns(GetUnencryptedLogsResponseSchema), getContractClassLogs: z.function().args(LogFilterSchema).returns(GetUnencryptedLogsResponseSchema), getPublicFunction: z diff --git a/yarn-project/circuit-types/src/interfaces/aztec-node.test.ts b/yarn-project/circuit-types/src/interfaces/aztec-node.test.ts index ad0cfb730d2..d415249c3f4 100644 --- a/yarn-project/circuit-types/src/interfaces/aztec-node.test.ts +++ b/yarn-project/circuit-types/src/interfaces/aztec-node.test.ts @@ -30,7 +30,7 @@ import { resolve } from 'path'; import { L2Block } from '../l2_block.js'; import { ExtendedUnencryptedL2Log } from '../logs/extended_unencrypted_l2_log.js'; -import { type GetUnencryptedLogsResponse, TxScopedEncryptedL2NoteLog } from '../logs/get_logs_response.js'; +import { type GetUnencryptedLogsResponse, TxScopedL2Log } from '../logs/get_logs_response.js'; import { EncryptedL2BlockL2Logs, EncryptedNoteL2BlockL2Logs, @@ -80,9 +80,9 @@ describe('AztecNodeApiSchema', () => { expect([...tested].sort()).toEqual(all.sort()); }); - it('findLeafIndex', async () => { - const response = await context.client.findLeafIndex(1, MerkleTreeId.ARCHIVE, Fr.random()); - expect(response).toBe(1n); + it('findLeavesIndexes', async () => { + const response = await context.client.findLeavesIndexes(1, MerkleTreeId.ARCHIVE, [Fr.random(), Fr.random()]); + expect(response).toEqual([1n, undefined]); }); it('getNullifierSiblingPath', async () => { @@ -217,7 +217,7 @@ describe('AztecNodeApiSchema', () => { it('getLogsByTags', async () => { const response = await context.client.getLogsByTags([Fr.random()]); - expect(response).toEqual([[expect.any(TxScopedEncryptedL2NoteLog)]]); + expect(response).toEqual([[expect.any(TxScopedL2Log)]]); }); it('sendTx', async () => { @@ -318,9 +318,15 @@ describe('AztecNodeApiSchema', () => { class MockAztecNode implements AztecNode { constructor(private artifact: ContractArtifact) {} - findLeafIndex(blockNumber: number | 'latest', treeId: MerkleTreeId, leafValue: Fr): Promise { - expect(leafValue).toBeInstanceOf(Fr); - return Promise.resolve(1n); + findLeavesIndexes( + blockNumber: number | 'latest', + treeId: MerkleTreeId, + leafValues: Fr[], + ): Promise<(bigint | undefined)[]> { + expect(leafValues).toHaveLength(2); + expect(leafValues[0]).toBeInstanceOf(Fr); + expect(leafValues[1]).toBeInstanceOf(Fr); + return Promise.resolve([1n, undefined]); } getNullifierSiblingPath( blockNumber: number | 'latest', @@ -453,10 +459,10 @@ class MockAztecNode implements AztecNode { expect(filter.contractAddress).toBeInstanceOf(AztecAddress); return Promise.resolve({ logs: [ExtendedUnencryptedL2Log.random()], maxLogsHit: true }); } - getLogsByTags(tags: Fr[]): Promise { + getLogsByTags(tags: Fr[]): Promise { expect(tags).toHaveLength(1); expect(tags[0]).toBeInstanceOf(Fr); - return Promise.resolve([[TxScopedEncryptedL2NoteLog.random()]]); + return Promise.resolve([[TxScopedL2Log.random()]]); } sendTx(tx: Tx): Promise { expect(tx).toBeInstanceOf(Tx); diff --git a/yarn-project/circuit-types/src/interfaces/aztec-node.ts b/yarn-project/circuit-types/src/interfaces/aztec-node.ts index eb9030474a3..91b7dd8d0ca 100644 --- a/yarn-project/circuit-types/src/interfaces/aztec-node.ts +++ b/yarn-project/circuit-types/src/interfaces/aztec-node.ts @@ -30,7 +30,7 @@ import { type LogFilter, LogFilterSchema, LogType, - TxScopedEncryptedL2NoteLog, + TxScopedL2Log, } from '../logs/index.js'; import { MerkleTreeId } from '../merkle_tree_id.js'; import { EpochProofQuote } from '../prover_coordination/epoch_proof_quote.js'; @@ -50,13 +50,17 @@ import { type ProverCoordination, ProverCoordinationApiSchema } from './prover-c */ export interface AztecNode extends ProverCoordination { /** - * Find the index of the given leaf in the given tree. + * Find the indexes of the given leaves in the given tree. * @param blockNumber - The block number at which to get the data or 'latest' for latest data * @param treeId - The tree to search in. - * @param leafValue - The value to search for - * @returns The index of the given leaf in the given tree or undefined if not found. + * @param leafValue - The values to search for + * @returns The indexes of the given leaves in the given tree or undefined if not found. */ - findLeafIndex(blockNumber: L2BlockNumber, treeId: MerkleTreeId, leafValue: Fr): Promise; + findLeavesIndexes( + blockNumber: L2BlockNumber, + treeId: MerkleTreeId, + leafValues: Fr[], + ): Promise<(bigint | undefined)[]>; /** * Returns a sibling path for the given index in the nullifier tree. @@ -270,7 +274,7 @@ export interface AztecNode extends ProverCoordination { * @returns For each received tag, an array of matching logs and metadata (e.g. tx hash) is returned. An empty array implies no logs match that tag. */ - getLogsByTags(tags: Fr[]): Promise; + getLogsByTags(tags: Fr[]): Promise; /** * Method to submit a transaction to the p2p pool. @@ -392,10 +396,10 @@ export interface AztecNode extends ProverCoordination { export const AztecNodeApiSchema: ApiSchemaFor = { ...ProverCoordinationApiSchema, - findLeafIndex: z + findLeavesIndexes: z .function() - .args(L2BlockNumberSchema, z.nativeEnum(MerkleTreeId), schemas.Fr) - .returns(schemas.BigInt.optional()), + .args(L2BlockNumberSchema, z.nativeEnum(MerkleTreeId), z.array(schemas.Fr)) + .returns(z.array(optional(schemas.BigInt))), getNullifierSiblingPath: z .function() @@ -475,7 +479,7 @@ export const AztecNodeApiSchema: ApiSchemaFor = { getLogsByTags: z .function() .args(z.array(schemas.Fr)) - .returns(z.array(z.array(TxScopedEncryptedL2NoteLog.schema))), + .returns(z.array(z.array(TxScopedL2Log.schema))), sendTx: z.function().args(Tx.schema).returns(z.void()), diff --git a/yarn-project/circuit-types/src/interfaces/pxe.test.ts b/yarn-project/circuit-types/src/interfaces/pxe.test.ts index 35356a8a677..8aefa059edb 100644 --- a/yarn-project/circuit-types/src/interfaces/pxe.test.ts +++ b/yarn-project/circuit-types/src/interfaces/pxe.test.ts @@ -37,7 +37,6 @@ import { ExtendedNote, type OutgoingNotesFilter, UniqueNote } from '../notes/ind import { PrivateExecutionResult } from '../private_execution_result.js'; import { type EpochProofQuote } from '../prover_coordination/epoch_proof_quote.js'; import { SiblingPath } from '../sibling_path/sibling_path.js'; -import { type NoteProcessorStats } from '../stats/stats.js'; import { Tx, TxHash, TxProvingResult, TxReceipt, TxSimulationResult } from '../tx/index.js'; import { TxEffect } from '../tx_effect.js'; import { TxExecutionRequest } from '../tx_execution_request.js'; @@ -256,21 +255,11 @@ describe('PXESchema', () => { expect(result).toBe(true); }); - it('isAccountStateSynchronized', async () => { - const result = await context.client.isAccountStateSynchronized(address); - expect(result).toBe(true); - }); - it('getSyncStatus', async () => { const result = await context.client.getSyncStatus(); expect(result).toEqual(await handler.getSyncStatus()); }); - it('getSyncStats', async () => { - const result = await context.client.getSyncStats(); - expect(result).toEqual(await handler.getSyncStats()); - }); - it('getContractInstance', async () => { const result = await context.client.getContractInstance(address); expect(result).toEqual(instance); @@ -504,28 +493,9 @@ class MockPXE implements PXE { isGlobalStateSynchronized(): Promise { return Promise.resolve(true); } - isAccountStateSynchronized(account: AztecAddress): Promise { - expect(account).toEqual(this.address); - return Promise.resolve(true); - } getSyncStatus(): Promise { return Promise.resolve({ blocks: 1, - notes: { [this.address.toString()]: 1 }, - }); - } - getSyncStats(): Promise<{ [key: string]: NoteProcessorStats }> { - return Promise.resolve({ - [this.address.toString()]: { - seen: 1, - deferredIncoming: 1, - deferredOutgoing: 1, - decryptedIncoming: 1, - decryptedOutgoing: 1, - failed: 1, - blocks: 1, - txs: 1, - }, }); } getContractInstance(address: AztecAddress): Promise { diff --git a/yarn-project/circuit-types/src/interfaces/pxe.ts b/yarn-project/circuit-types/src/interfaces/pxe.ts index 7926cfa6d04..4bfa62f74bb 100644 --- a/yarn-project/circuit-types/src/interfaces/pxe.ts +++ b/yarn-project/circuit-types/src/interfaces/pxe.ts @@ -38,7 +38,6 @@ import { type IncomingNotesFilter, IncomingNotesFilterSchema } from '../notes/in import { ExtendedNote, type OutgoingNotesFilter, OutgoingNotesFilterSchema, UniqueNote } from '../notes/index.js'; import { PrivateExecutionResult } from '../private_execution_result.js'; import { SiblingPath } from '../sibling_path/sibling_path.js'; -import { type NoteProcessorStats, NoteProcessorStatsSchema } from '../stats/stats.js'; import { Tx, TxHash, TxProvingResult, TxReceipt, TxSimulationResult } from '../tx/index.js'; import { TxEffect } from '../tx_effect.js'; import { TxExecutionRequest } from '../tx_execution_request.js'; @@ -352,17 +351,6 @@ export interface PXE { */ isGlobalStateSynchronized(): Promise; - /** - * Checks if the specified account is synchronized. - * @param account - The aztec address for which to query the sync status. - * @returns True if the account is fully synched, false otherwise. - * @deprecated Use `getSyncStatus` instead. - * @remarks Checks whether all the notes from all the blocks have been processed. If it is not the case, the - * retrieved information from contracts might be old/stale (e.g. old token balance). - * @throws If checking a sync status of account which is not registered. - */ - isAccountStateSynchronized(account: AztecAddress): Promise; - /** * Returns the latest block that has been synchronized globally and for each account. The global block number * indicates whether global state has been updated up to that block, whereas each address indicates up to which @@ -371,12 +359,6 @@ export interface PXE { */ getSyncStatus(): Promise; - /** - * Returns the note processor stats. - * @returns The note processor stats for notes for each public key being tracked. - */ - getSyncStats(): Promise<{ [key: string]: NoteProcessorStats }>; - /** * Returns a Contract Instance given its address, which includes the contract class identifier, * initialization hash, deployment salt, and public keys hash. @@ -549,9 +531,7 @@ export const PXESchema: ApiSchemaFor = { getNodeInfo: z.function().returns(NodeInfoSchema), getPXEInfo: z.function().returns(PXEInfoSchema), isGlobalStateSynchronized: z.function().returns(z.boolean()), - isAccountStateSynchronized: z.function().args(schemas.AztecAddress).returns(z.boolean()), getSyncStatus: z.function().returns(SyncStatusSchema), - getSyncStats: z.function().returns(z.record(NoteProcessorStatsSchema)), getContractInstance: z .function() .args(schemas.AztecAddress) diff --git a/yarn-project/circuit-types/src/interfaces/sync-status.ts b/yarn-project/circuit-types/src/interfaces/sync-status.ts index 59696232b5d..b85a13620d6 100644 --- a/yarn-project/circuit-types/src/interfaces/sync-status.ts +++ b/yarn-project/circuit-types/src/interfaces/sync-status.ts @@ -6,11 +6,8 @@ import { z } from 'zod'; export type SyncStatus = { /** Up to which block has been synched for blocks and txs. */ blocks: number; - /** Up to which block has been synched for notes, indexed by each account address being monitored. */ - notes: Record; }; export const SyncStatusSchema = z.object({ blocks: z.number(), - notes: z.record(z.number()), }) satisfies ZodFor; diff --git a/yarn-project/circuit-types/src/logs/get_logs_response.test.ts b/yarn-project/circuit-types/src/logs/get_logs_response.test.ts new file mode 100644 index 00000000000..8ec54978a64 --- /dev/null +++ b/yarn-project/circuit-types/src/logs/get_logs_response.test.ts @@ -0,0 +1,10 @@ +import { jsonStringify } from '@aztec/foundation/json-rpc'; + +import { TxScopedL2Log } from './get_logs_response.js'; + +describe('TxScopedL2Log', () => { + it('serializes to JSON', () => { + const log = TxScopedL2Log.random(); + expect(TxScopedL2Log.schema.parse(JSON.parse(jsonStringify(log)))).toEqual(log); + }); +}); diff --git a/yarn-project/circuit-types/src/logs/get_logs_response.ts b/yarn-project/circuit-types/src/logs/get_logs_response.ts index 61df2d208bb..c62b5af965c 100644 --- a/yarn-project/circuit-types/src/logs/get_logs_response.ts +++ b/yarn-project/circuit-types/src/logs/get_logs_response.ts @@ -1,11 +1,10 @@ import { Fr } from '@aztec/circuits.js'; -import { type ZodFor } from '@aztec/foundation/schemas'; -import { BufferReader, numToUInt32BE } from '@aztec/foundation/serialize'; +import { type ZodFor, schemas } from '@aztec/foundation/schemas'; +import { BufferReader, boolToBuffer, numToUInt32BE } from '@aztec/foundation/serialize'; import { z } from 'zod'; import { TxHash } from '../tx/tx_hash.js'; -import { EncryptedL2NoteLog } from './encrypted_l2_note_log.js'; import { ExtendedUnencryptedL2Log } from './extended_unencrypted_l2_log.js'; /** Response for the getUnencryptedLogs archiver call. */ @@ -21,7 +20,7 @@ export const GetUnencryptedLogsResponseSchema = z.object({ maxLogsHit: z.boolean(), }) satisfies ZodFor; -export class TxScopedEncryptedL2NoteLog { +export class TxScopedL2Log { constructor( /* * Hash of the tx where the log is included @@ -33,9 +32,17 @@ export class TxScopedEncryptedL2NoteLog { */ public dataStartIndexForTx: number, /* - * The encrypted note log + * The block this log is included in */ - public log: EncryptedL2NoteLog, + public blockNumber: number, + /* + * Indicates if the log comes from the unencrypted logs stream (partial note) + */ + public isFromPublic: boolean, + /* + * The log data + */ + public logData: Buffer, ) {} static get schema() { @@ -43,27 +50,38 @@ export class TxScopedEncryptedL2NoteLog { .object({ txHash: TxHash.schema, dataStartIndexForTx: z.number(), - log: EncryptedL2NoteLog.schema, + blockNumber: z.number(), + isFromPublic: z.boolean(), + logData: schemas.BufferB64, }) .transform( - ({ txHash, dataStartIndexForTx, log }) => new TxScopedEncryptedL2NoteLog(txHash, dataStartIndexForTx, log), + ({ txHash, dataStartIndexForTx, blockNumber, isFromPublic, logData }) => + new TxScopedL2Log(txHash, dataStartIndexForTx, blockNumber, isFromPublic, logData), ); } toBuffer() { - return Buffer.concat([this.txHash.toBuffer(), numToUInt32BE(this.dataStartIndexForTx), this.log.toBuffer()]); + return Buffer.concat([ + this.txHash.toBuffer(), + numToUInt32BE(this.dataStartIndexForTx), + numToUInt32BE(this.blockNumber), + boolToBuffer(this.isFromPublic), + this.logData, + ]); } static fromBuffer(buffer: Buffer) { const reader = BufferReader.asReader(buffer); - return new TxScopedEncryptedL2NoteLog( + return new TxScopedL2Log( TxHash.fromField(reader.readObject(Fr)), reader.readNumber(), - EncryptedL2NoteLog.fromBuffer(reader.readToEnd()), + reader.readNumber(), + reader.readBoolean(), + reader.readToEnd(), ); } static random() { - return new TxScopedEncryptedL2NoteLog(TxHash.random(), 1, EncryptedL2NoteLog.random()); + return new TxScopedL2Log(TxHash.random(), 1, 1, false, Fr.random().toBuffer()); } } diff --git a/yarn-project/circuit-types/src/logs/l1_payload/encrypted_log_payload.test.ts b/yarn-project/circuit-types/src/logs/l1_payload/encrypted_log_payload.test.ts index 7a34d206221..9560fb54ddd 100644 --- a/yarn-project/circuit-types/src/logs/l1_payload/encrypted_log_payload.test.ts +++ b/yarn-project/circuit-types/src/logs/l1_payload/encrypted_log_payload.test.ts @@ -115,17 +115,15 @@ describe('EncryptedLogPayload', () => { ); // We set a random secret, as it is simply the result of an oracle call, and we are not actually computing this in nr. - const logTag = new IndexedTaggingSecret( - new Fr(69420), + const logTag = new IndexedTaggingSecret(new Fr(69420), 1337).computeTag( AztecAddress.fromBigInt(0x25afb798ea6d0b8c1618e50fdeafa463059415013d3b7c75d46abf5e242be70cn), - 1337, - ).computeTag(); + ); const tagString = logTag.toString().slice(2); let byteArrayString = `[${tagString.match(/.{1,2}/g)!.map(byte => parseInt(byte, 16))}]`; updateInlineTestData( 'noir-projects/aztec-nr/aztec/src/encrypted_logs/payload.nr', - 'tag_from_typescript', + 'encrypted_log_from_typescript', byteArrayString, ); diff --git a/yarn-project/circuit-types/src/logs/l2_logs_source.ts b/yarn-project/circuit-types/src/logs/l2_logs_source.ts index d892037e459..804130711e4 100644 --- a/yarn-project/circuit-types/src/logs/l2_logs_source.ts +++ b/yarn-project/circuit-types/src/logs/l2_logs_source.ts @@ -1,6 +1,6 @@ import { type Fr } from '@aztec/circuits.js'; -import { type GetUnencryptedLogsResponse, type TxScopedEncryptedL2NoteLog } from './get_logs_response.js'; +import { type GetUnencryptedLogsResponse, type TxScopedL2Log } from './get_logs_response.js'; import { type L2BlockL2Logs } from './l2_block_l2_logs.js'; import { type LogFilter } from './log_filter.js'; import { type FromLogType, type LogType } from './log_type.js'; @@ -28,7 +28,7 @@ export interface L2LogsSource { * @returns For each received tag, an array of matching logs is returned. An empty array implies no logs match * that tag. */ - getLogsByTags(tags: Fr[]): Promise; + getLogsByTags(tags: Fr[]): Promise; /** * Gets unencrypted logs based on the provided filter. diff --git a/yarn-project/circuit-types/src/messaging/l1_to_l2_message.ts b/yarn-project/circuit-types/src/messaging/l1_to_l2_message.ts index 6d8ada947b9..e68a73a83e8 100644 --- a/yarn-project/circuit-types/src/messaging/l1_to_l2_message.ts +++ b/yarn-project/circuit-types/src/messaging/l1_to_l2_message.ts @@ -87,7 +87,7 @@ export async function getNonNullifiedL1ToL2MessageWitness( const messageNullifier = computeL1ToL2MessageNullifier(contractAddress, messageHash, secret); - const nullifierIndex = await node.findLeafIndex('latest', MerkleTreeId.NULLIFIER_TREE, messageNullifier); + const [nullifierIndex] = await node.findLeavesIndexes('latest', MerkleTreeId.NULLIFIER_TREE, [messageNullifier]); if (nullifierIndex !== undefined) { throw new Error(`No non-nullified L1 to L2 message found for message hash ${messageHash.toString()}`); } diff --git a/yarn-project/circuit-types/src/stats/metrics.ts b/yarn-project/circuit-types/src/stats/metrics.ts index 8960a9dd962..70d41f4a999 100644 --- a/yarn-project/circuit-types/src/stats/metrics.ts +++ b/yarn-project/circuit-types/src/stats/metrics.ts @@ -109,19 +109,6 @@ export const Metrics = [ description: 'Time for the state synchronizer to process an L2 block that was not built by its own sequencer.', events: ['l2-block-handled'], }, - { - name: 'note_successful_decrypting_time_in_ms', - groupBy: 'block-size', - description: 'Time for the PXE to decrypt all notes in a block where they belong to a registered account.', - events: ['note-processor-caught-up'], - }, - { - name: 'note_trial_decrypting_time_in_ms', - groupBy: 'block-size', - description: - 'Time for the PXE to try decrypting all notes in a block where they do not belong to a registered account.', - events: ['note-processor-caught-up'], - }, { name: 'l2_block_building_time_in_ms', groupBy: 'block-size', @@ -146,31 +133,12 @@ export const Metrics = [ description: 'Time for a node without a sequencer to sync chain history', events: ['node-synced-chain-history'], }, - { - name: 'note_history_successful_decrypting_time_in_ms', - groupBy: 'chain-length', - description: 'Time for a PXE to decrypt all notes in the chain where they belong to a registered account.', - events: ['note-processor-caught-up'], - }, - { - name: 'note_history_trial_decrypting_time_in_ms', - groupBy: 'chain-length', - description: - 'Time for a PXE to try decrypting all notes in the chain where they do not belong to a registered account.', - events: ['note-processor-caught-up'], - }, { name: 'node_database_size_in_bytes', groupBy: 'chain-length', description: 'Size on disk of the leveldown database of a node after syncing all chain history.', events: ['node-synced-chain-history'], }, - { - name: 'pxe_database_size_in_bytes', - groupBy: 'chain-length', - description: 'Estimated size in memory of a PXE database after syncing all notes that belong to it in the chain.', - events: ['note-processor-caught-up'], - }, { name: 'protocol_circuit_simulation_time_in_ms', groupBy: 'protocol-circuit-name', diff --git a/yarn-project/circuit-types/src/stats/stats.ts b/yarn-project/circuit-types/src/stats/stats.ts index 4f63e0ad568..f9f94709845 100644 --- a/yarn-project/circuit-types/src/stats/stats.ts +++ b/yarn-project/circuit-types/src/stats/stats.ts @@ -1,7 +1,3 @@ -import { type ZodFor } from '@aztec/foundation/schemas'; - -import { z } from 'zod'; - /** Stats associated with an ACIR proof generation.*/ export type ProofConstructed = { /** Name of the event for metrics purposes */ @@ -207,49 +203,6 @@ export type L2BlockHandledStats = { oldestHistoricBlock: bigint; } & L2BlockStats; -/** Stats for a note processor that has caught up with the chain. */ -export type NoteProcessorCaughtUpStats = { - /** Name of the event. */ - eventName: 'note-processor-caught-up'; - /** Account the note processor belongs to. */ - account: string; - /** Total time to catch up with the tip of the chain from scratch in ms. */ - duration: number; - /** Size of the notes db. */ - dbSize: number; -} & NoteProcessorStats; - -/** Accumulated rolling stats for a note processor. */ -export type NoteProcessorStats = { - /** How many notes have been seen and trial-decrypted. */ - seen: number; - /** How many notes had decryption deferred due to a missing contract */ - deferredIncoming: number; - /** How many notes had decryption deferred due to a missing contract */ - deferredOutgoing: number; - /** How many incoming notes were successfully decrypted. */ - decryptedIncoming: number; - /** How many outgoing notes were successfully decrypted. */ - decryptedOutgoing: number; - /** How many notes failed processing. */ - failed: number; - /** How many blocks were spanned. */ - blocks: number; - /** How many txs were spanned. */ - txs: number; -}; - -export const NoteProcessorStatsSchema = z.object({ - seen: z.number(), - deferredIncoming: z.number(), - deferredOutgoing: z.number(), - decryptedIncoming: z.number(), - decryptedOutgoing: z.number(), - failed: z.number(), - blocks: z.number(), - txs: z.number(), -}) satisfies ZodFor; - /** Stats for a tx. */ export type TxStats = { /** Hash of the tx. */ @@ -322,7 +275,6 @@ export type Stats = | L2BlockBuiltStats | L2BlockHandledStats | NodeSyncedChainHistoryStats - | NoteProcessorCaughtUpStats | ProofConstructed | TreeInsertionStats | TxAddedToPoolStats; diff --git a/yarn-project/circuit-types/src/tx/tx_receipt.ts b/yarn-project/circuit-types/src/tx/tx_receipt.ts index 3d92c79adf6..280dae346b7 100644 --- a/yarn-project/circuit-types/src/tx/tx_receipt.ts +++ b/yarn-project/circuit-types/src/tx/tx_receipt.ts @@ -5,7 +5,6 @@ import { type FieldsOf } from '@aztec/foundation/types'; import { z } from 'zod'; -import { UniqueNote } from '../notes/extended_note.js'; import { TxHash } from './tx_hash.js'; /** @@ -141,18 +140,6 @@ interface DebugInfo { * New L2 to L1 messages created by the transaction. */ l2ToL1Msgs: Fr[]; - /** - * Notes created in this tx which were successfully decoded with the incoming keys of accounts which are registered - * in the PXE which was used to submit the tx. You will not get notes of accounts which are not registered in - * the PXE here even though they were created in this tx. - */ - visibleIncomingNotes: UniqueNote[]; - /** - * Notes created in this tx which were successfully decoded with the outgoing keys of accounts which are registered - * in the PXE which was used to submit the tx. You will not get notes of accounts which are not registered in - * the PXE here even though they were created in this tx. - */ - visibleOutgoingNotes: UniqueNote[]; } const DebugInfoSchema = z.object({ @@ -160,6 +147,4 @@ const DebugInfoSchema = z.object({ nullifiers: z.array(schemas.Fr), publicDataWrites: z.array(PublicDataWrite.schema), l2ToL1Msgs: z.array(schemas.Fr), - visibleIncomingNotes: z.array(UniqueNote.schema), - visibleOutgoingNotes: z.array(UniqueNote.schema), }); diff --git a/yarn-project/circuits.js/src/structs/tagging_secret.ts b/yarn-project/circuits.js/src/structs/tagging_secret.ts index 3275346cf1a..0c5c7175d7f 100644 --- a/yarn-project/circuits.js/src/structs/tagging_secret.ts +++ b/yarn-project/circuits.js/src/structs/tagging_secret.ts @@ -1,33 +1,19 @@ -import { AztecAddress } from '@aztec/foundation/aztec-address'; +import { type AztecAddress } from '@aztec/foundation/aztec-address'; import { poseidon2Hash } from '@aztec/foundation/crypto'; import { Fr } from '@aztec/foundation/fields'; -export class TaggingSecret { - constructor(public secret: Fr, public recipient: AztecAddress) {} +export class IndexedTaggingSecret { + constructor(public secret: Fr, public index: number) {} toFields(): Fr[] { - return [this.secret, this.recipient.toField()]; - } -} - -export class IndexedTaggingSecret extends TaggingSecret { - constructor(secret: Fr, recipient: AztecAddress, public index: number) { - super(secret, recipient); - } - - override toFields(): Fr[] { - return [this.secret, this.recipient.toField(), new Fr(this.index)]; + return [this.secret, new Fr(this.index)]; } static fromFields(serialized: Fr[]) { - return new this(serialized[0], AztecAddress.fromField(serialized[1]), serialized[2].toNumber()); - } - - static fromTaggingSecret(directionalSecret: TaggingSecret, index: number) { - return new this(directionalSecret.secret, directionalSecret.recipient, index); + return new this(serialized[0], serialized[1].toNumber()); } - computeTag() { - return poseidon2Hash([this.secret, this.recipient, this.index]); + computeTag(recipient: AztecAddress) { + return poseidon2Hash([this.secret, recipient, this.index]); } } diff --git a/yarn-project/cli/src/cmds/pxe/get_block.ts b/yarn-project/cli/src/cmds/pxe/get_block.ts index c43d3633ef9..efe47f4148e 100644 --- a/yarn-project/cli/src/cmds/pxe/get_block.ts +++ b/yarn-project/cli/src/cmds/pxe/get_block.ts @@ -19,9 +19,8 @@ export async function getBlock( setInterval(async () => { const newBlock = await client.getBlockNumber(); if (newBlock > lastBlock) { - const { blocks, notes } = await client.getSyncStatus(); - const areNotesSynced = blocks >= newBlock && Object.values(notes).every(block => block >= newBlock); - if (areNotesSynced) { + const { blocks } = await client.getSyncStatus(); + if (blocks >= newBlock) { log(''); await inspectBlock(client, newBlock, log, { showTxs: true }); lastBlock = newBlock; diff --git a/yarn-project/end-to-end/src/benchmarks/bench_process_history.test.ts b/yarn-project/end-to-end/src/benchmarks/bench_process_history.test.ts index 68524a7c743..adc29c28d63 100644 --- a/yarn-project/end-to-end/src/benchmarks/bench_process_history.test.ts +++ b/yarn-project/end-to-end/src/benchmarks/bench_process_history.test.ts @@ -9,14 +9,7 @@ import { type BenchmarkingContract } from '@aztec/noir-contracts.js/Benchmarking import { type SequencerClient } from '@aztec/sequencer-client'; import { type EndToEndContext } from '../fixtures/utils.js'; -import { - benchmarkSetup, - getFolderSize, - makeDataDirectory, - sendTxs, - waitNewPXESynced, - waitRegisteredAccountSynced, -} from './utils.js'; +import { benchmarkSetup, getFolderSize, makeDataDirectory, sendTxs, waitNewPXESynced } from './utils.js'; const BLOCK_SIZE = BENCHMARK_HISTORY_BLOCK_SIZE; const CHAIN_LENGTHS = BENCHMARK_HISTORY_CHAIN_LENGTHS; @@ -82,11 +75,11 @@ describe('benchmarks/process_history', () => { context.logger.info(`Registering owner account on new pxe`); const partialAddress = context.wallet.getCompleteAddress().partialAddress; const secretKey = context.wallet.getSecretKey(); - await waitRegisteredAccountSynced(pxe, secretKey, partialAddress); + await pxe.registerAccount(secretKey, partialAddress); // Repeat for another account that didn't receive any notes for them, so we measure trial-decrypts context.logger.info(`Registering fresh account on new pxe`); - await waitRegisteredAccountSynced(pxe, Fr.random(), Fr.random()); + await pxe.registerAccount(Fr.random(), Fr.random()); // Stop the external node and pxe await pxe.stop(); diff --git a/yarn-project/end-to-end/src/benchmarks/bench_publish_rollup.test.ts b/yarn-project/end-to-end/src/benchmarks/bench_publish_rollup.test.ts index 0463c9c8ac9..21612ccbe85 100644 --- a/yarn-project/end-to-end/src/benchmarks/bench_publish_rollup.test.ts +++ b/yarn-project/end-to-end/src/benchmarks/bench_publish_rollup.test.ts @@ -5,7 +5,7 @@ import { type BenchmarkingContract } from '@aztec/noir-contracts.js/Benchmarking import { type SequencerClient } from '@aztec/sequencer-client'; import { type EndToEndContext } from '../fixtures/utils.js'; -import { benchmarkSetup, sendTxs, waitNewPXESynced, waitRegisteredAccountSynced } from './utils.js'; +import { benchmarkSetup, sendTxs, waitNewPXESynced } from './utils.js'; describe('benchmarks/publish_rollup', () => { let context: EndToEndContext; @@ -47,11 +47,11 @@ describe('benchmarks/publish_rollup', () => { context.logger.info(`Registering owner account on new pxe`); const partialAddress = context.wallet.getCompleteAddress().partialAddress; const secretKey = context.wallet.getSecretKey(); - await waitRegisteredAccountSynced(pxe, secretKey, partialAddress); + await pxe.registerAccount(secretKey, partialAddress); // Repeat for another account that didn't receive any notes for them, so we measure trial-decrypts context.logger.info(`Registering fresh account on new pxe`); - await waitRegisteredAccountSynced(pxe, Fr.random(), Fr.random()); + await pxe.registerAccount(Fr.random(), Fr.random()); // Stop the external node and pxe await pxe.stop(); diff --git a/yarn-project/end-to-end/src/benchmarks/utils.ts b/yarn-project/end-to-end/src/benchmarks/utils.ts index 36c5f1fed76..5432a7f6781 100644 --- a/yarn-project/end-to-end/src/benchmarks/utils.ts +++ b/yarn-project/end-to-end/src/benchmarks/utils.ts @@ -1,15 +1,5 @@ import { type AztecNodeConfig, type AztecNodeService } from '@aztec/aztec-node'; -import { - type AztecNode, - BatchCall, - type Fr, - INITIAL_L2_BLOCK_NUM, - type PXE, - type PartialAddress, - type SentTx, - retryUntil, - sleep, -} from '@aztec/aztec.js'; +import { type AztecNode, BatchCall, INITIAL_L2_BLOCK_NUM, type SentTx, retryUntil, sleep } from '@aztec/aztec.js'; import { times } from '@aztec/foundation/collection'; import { randomInt } from '@aztec/foundation/crypto'; import { BenchmarkingContract } from '@aztec/noir-contracts.js/Benchmarking'; @@ -120,16 +110,3 @@ export async function waitNewPXESynced( await retryUntil(() => pxe.isGlobalStateSynchronized(), 'pxe-global-sync'); return pxe; } - -/** - * Registers a new account in a pxe and waits until it's synced all its notes. - * @param pxe - PXE where to register the account. - * @param secretKey - Secret key of the account to register. - * @param partialAddress - Partial address of the account to register. - */ -export async function waitRegisteredAccountSynced(pxe: PXE, secretKey: Fr, partialAddress: PartialAddress) { - const l2Block = await pxe.getBlockNumber(); - const accountAddress = (await pxe.registerAccount(secretKey, partialAddress)).address; - const isAccountSynced = async () => (await pxe.getSyncStatus()).notes[accountAddress.toString()] === l2Block; - await retryUntil(isAccountSynced, 'pxe-notes-sync'); -} diff --git a/yarn-project/end-to-end/src/composed/e2e_persistence.test.ts b/yarn-project/end-to-end/src/composed/e2e_persistence.test.ts index fbe36b3effc..df8cbcc47b0 100644 --- a/yarn-project/end-to-end/src/composed/e2e_persistence.test.ts +++ b/yarn-project/end-to-end/src/composed/e2e_persistence.test.ts @@ -6,7 +6,6 @@ import { Note, type TxHash, computeSecretHash, - waitForAccountSynch, } from '@aztec/aztec.js'; import { type Salt } from '@aztec/aztec.js/account'; import { type AztecAddress, type CompleteAddress, Fr, deriveSigningKey } from '@aztec/circuits.js'; @@ -243,8 +242,6 @@ describe('Aztec persistence', () => { const ownerWallet = await ownerAccount.getWallet(); const contract = await TokenBlacklistContract.at(contractAddress, ownerWallet); - await waitForAccountSynch(context.pxe, ownerAddress, { interval: 1, timeout: 10 }); - // check that notes total more than 0 so that this test isn't dependent on run order await expect(contract.methods.balance_of_private(ownerAddress.address).simulate()).resolves.toBeGreaterThan(0n); }); @@ -297,8 +294,6 @@ describe('Aztec persistence', () => { const signingKey = deriveSigningKey(ownerSecretKey); ownerWallet = await getUnsafeSchnorrWallet(context.pxe, ownerAddress.address, signingKey); contract = await TokenBlacklistContract.at(contractAddress, ownerWallet); - - await waitForAccountSynch(context.pxe, ownerAddress, { interval: 0.1, timeout: 5 }); }); afterEach(async () => { diff --git a/yarn-project/end-to-end/src/e2e_2_pxes.test.ts b/yarn-project/end-to-end/src/e2e_2_pxes.test.ts index d0c3c94695f..c95e16b9d7a 100644 --- a/yarn-project/end-to-end/src/e2e_2_pxes.test.ts +++ b/yarn-project/end-to-end/src/e2e_2_pxes.test.ts @@ -47,6 +47,9 @@ describe('e2e_2_pxes', () => { /*TODO(post-honk): We wait 5 seconds for a race condition in setting up two nodes. What is a more robust solution? */ await sleep(5000); + + await walletA.registerContact(walletB.getAddress()); + await walletB.registerContact(walletA.getAddress()); }); afterEach(async () => { @@ -152,30 +155,10 @@ describe('e2e_2_pxes', () => { await expectTokenBalance(walletB, token, walletB.getAddress(), userBBalance, logger); // CHECK THAT PRIVATE BALANCES ARE 0 WHEN ACCOUNT'S SECRET KEYS ARE NOT REGISTERED - // Note: Not checking if the account is synchronized because it is not registered as an account (it would throw). - const checkIfSynchronized = false; // Check that user A balance is 0 on server B - await expectTokenBalance(walletB, token, walletA.getAddress(), 0n, logger, checkIfSynchronized); + await expectTokenBalance(walletB, token, walletA.getAddress(), 0n, logger); // Check that user B balance is 0 on server A - await expectTokenBalance(walletA, token, walletB.getAddress(), 0n, logger, checkIfSynchronized); - }); - - it('permits migrating an account from one PXE to another', async () => { - const secretKey = Fr.random(); - const account = getUnsafeSchnorrAccount(pxeA, secretKey, Fr.random()); - const completeAddress = account.getCompleteAddress(); - const wallet = await account.waitSetup(); - - await expect(wallet.isAccountStateSynchronized(completeAddress.address)).resolves.toBe(true); - const accountOnB = getUnsafeSchnorrAccount(pxeB, secretKey, account.salt); - const walletOnB = await accountOnB.getWallet(); - - // need to register first otherwise the new PXE won't know about the account - await expect(walletOnB.isAccountStateSynchronized(completeAddress.address)).rejects.toThrow(); - - await accountOnB.register(); - // registering should wait for the account to be synchronized - await expect(walletOnB.isAccountStateSynchronized(completeAddress.address)).resolves.toBe(true); + await expectTokenBalance(walletA, token, walletB.getAddress(), 0n, logger); }); it('permits sending funds to a user before they have registered the contract', async () => { @@ -208,12 +191,15 @@ describe('e2e_2_pxes', () => { const sharedAccountOnA = getUnsafeSchnorrAccount(pxeA, sharedSecretKey, Fr.random()); const sharedAccountAddress = sharedAccountOnA.getCompleteAddress(); const sharedWalletOnA = await sharedAccountOnA.waitSetup(); - await expect(sharedWalletOnA.isAccountStateSynchronized(sharedAccountAddress.address)).resolves.toBe(true); + + await sharedWalletOnA.registerContact(walletA.getAddress()); const sharedAccountOnB = getUnsafeSchnorrAccount(pxeB, sharedSecretKey, sharedAccountOnA.salt); await sharedAccountOnB.register(); const sharedWalletOnB = await sharedAccountOnB.getWallet(); + await sharedWalletOnB.registerContact(sharedWalletOnA.getAddress()); + // deploy the contract on PXE A const token = await deployToken(walletA, initialBalance, logger); @@ -242,7 +228,6 @@ describe('e2e_2_pxes', () => { // PXE-B reprocesses the deferred notes, and sees the nullifier for A -> Shared await pxeB.registerContract(token); await expectTokenBalance(walletB, token, walletB.getAddress(), transferAmount2, logger); - await expect(sharedWalletOnB.isAccountStateSynchronized(sharedAccountAddress.address)).resolves.toBe(true); await expectTokenBalance( sharedWalletOnB, token, @@ -267,14 +252,16 @@ describe('e2e_2_pxes', () => { const receipt = await testContract.methods .call_create_note(noteValue, owner, outgoingViewer, noteStorageSlot) .send() - .wait({ debug: true }); - const { visibleIncomingNotes, visibleOutgoingNotes } = receipt.debugInfo!; - expect(visibleIncomingNotes).toHaveLength(1); - note = visibleIncomingNotes![0]; + .wait(); + await testContract.methods.sync_notes().simulate(); + const incomingNotes = await walletA.getIncomingNotes({ txHash: receipt.txHash }); + const outgoingNotes = await walletA.getOutgoingNotes({ txHash: receipt.txHash }); + expect(incomingNotes).toHaveLength(1); + note = incomingNotes[0]; // Since owner is the same as outgoing viewer the incoming and outgoing notes should be the same - expect(visibleOutgoingNotes).toHaveLength(1); - expect(visibleOutgoingNotes![0]).toEqual(note); + expect(outgoingNotes).toHaveLength(1); + expect(outgoingNotes[0]).toEqual(note); } // 3. Nullify the note diff --git a/yarn-project/end-to-end/src/e2e_blacklist_token_contract/blacklist_token_contract_test.ts b/yarn-project/end-to-end/src/e2e_blacklist_token_contract/blacklist_token_contract_test.ts index 5f5d8a12522..e2ca5a2fa64 100644 --- a/yarn-project/end-to-end/src/e2e_blacklist_token_contract/blacklist_token_contract_test.ts +++ b/yarn-project/end-to-end/src/e2e_blacklist_token_contract/blacklist_token_contract_test.ts @@ -96,7 +96,6 @@ export class BlacklistTokenContractTest { this.other = this.wallets[1]; this.blacklisted = this.wallets[2]; this.accounts = await pxe.getRegisteredAccounts(); - this.wallets.forEach((w, i) => this.logger.verbose(`Wallet ${i} address: ${w.getAddress()}`)); }); await this.snapshotManager.snapshot( diff --git a/yarn-project/end-to-end/src/e2e_blacklist_token_contract/minting.test.ts b/yarn-project/end-to-end/src/e2e_blacklist_token_contract/minting.test.ts index 62a1d0af5ca..03aa384b685 100644 --- a/yarn-project/end-to-end/src/e2e_blacklist_token_contract/minting.test.ts +++ b/yarn-project/end-to-end/src/e2e_blacklist_token_contract/minting.test.ts @@ -95,8 +95,10 @@ describe('e2e_blacklist_token_contract mint', () => { .wait({ debug: true }); tokenSim.mintPrivate(wallets[0].getAddress(), amount); - // 1 note should be created containing `amount` of tokens - const { visibleIncomingNotes } = receiptClaim.debugInfo!; + // Trigger a note sync + await asset.methods.sync_notes().simulate(); + // 1 note should have been created containing `amount` of tokens + const visibleIncomingNotes = await wallets[0].getIncomingNotes({ txHash: receiptClaim.txHash }); expect(visibleIncomingNotes.length).toBe(1); expect(visibleIncomingNotes[0].note.items[0].toBigInt()).toBe(amount); }); diff --git a/yarn-project/end-to-end/src/e2e_cheat_codes.test.ts b/yarn-project/end-to-end/src/e2e_cheat_codes.test.ts index ceae5ad7c6b..6dc08e64aee 100644 --- a/yarn-project/end-to-end/src/e2e_cheat_codes.test.ts +++ b/yarn-project/end-to-end/src/e2e_cheat_codes.test.ts @@ -180,6 +180,7 @@ describe('e2e_cheat_codes', () => { const mintAmount = 100n; await mintTokensToPrivate(token, wallet, admin, mintAmount); + await token.methods.sync_notes().simulate(); const balancesAdminSlot = cc.aztec.computeSlotInMap(TokenContract.storage.balances.slot, admin); diff --git a/yarn-project/end-to-end/src/e2e_crowdfunding_and_claim.test.ts b/yarn-project/end-to-end/src/e2e_crowdfunding_and_claim.test.ts index eeb91f00f72..5c8d1470d4d 100644 --- a/yarn-project/end-to-end/src/e2e_crowdfunding_and_claim.test.ts +++ b/yarn-project/end-to-end/src/e2e_crowdfunding_and_claim.test.ts @@ -112,6 +112,13 @@ describe('e2e_crowdfunding_and_claim', () => { await rewardToken.methods.set_minter(claimContract.address, true).send().wait(); + // Add the operator address + // as a contact to all donor wallets, so they can receive notes + await Promise.all( + donorWallets.map(async wallet => { + await wallet.registerContact(operatorWallet.getAddress()); + }), + ); // Now we mint DNT to donors await mintTokensToPrivate(donationToken, operatorWallet, donorWallets[0].getAddress(), 1234n); await mintTokensToPrivate(donationToken, operatorWallet, donorWallets[1].getAddress(), 2345n); @@ -164,9 +171,9 @@ describe('e2e_crowdfunding_and_claim', () => { }); // Get the notes emitted by the Crowdfunding contract and check that only 1 was emitted (the value note) - const notes = donateTxReceipt.debugInfo?.visibleIncomingNotes.filter(x => - x.contractAddress.equals(crowdfundingContract.address), - ); + await crowdfundingContract.withWallet(donorWallets[0]).methods.sync_notes().simulate(); + const incomingNotes = await donorWallets[0].getIncomingNotes({ txHash: donateTxReceipt.txHash }); + const notes = incomingNotes.filter(x => x.contractAddress.equals(crowdfundingContract.address)); expect(notes!.length).toEqual(1); // Set the value note in a format which can be passed to claim function @@ -235,9 +242,9 @@ describe('e2e_crowdfunding_and_claim', () => { }); // Get the notes emitted by the Crowdfunding contract and check that only 1 was emitted (the value note) - const notes = donateTxReceipt.debugInfo?.visibleIncomingNotes.filter(x => - x.contractAddress.equals(crowdfundingContract.address), - ); + await crowdfundingContract.withWallet(donorWallets[0]).methods.sync_notes().simulate(); + const incomingNotes = await donorWallets[0].getIncomingNotes({ txHash: donateTxReceipt.txHash }); + const notes = incomingNotes.filter(x => x.contractAddress.equals(crowdfundingContract.address)); expect(notes!.length).toEqual(1); // Set the value note in a format which can be passed to claim function @@ -291,9 +298,10 @@ describe('e2e_crowdfunding_and_claim', () => { let note: any; { const receipt = await inclusionsProofsContract.methods.create_note(owner, 5n).send().wait({ debug: true }); - const { visibleIncomingNotes } = receipt.debugInfo!; - expect(visibleIncomingNotes.length).toEqual(1); - note = processUniqueNote(visibleIncomingNotes![0]); + await inclusionsProofsContract.methods.sync_notes().simulate(); + const incomingNotes = await wallets[0].getIncomingNotes({ txHash: receipt.txHash }); + expect(incomingNotes.length).toEqual(1); + note = processUniqueNote(incomingNotes[0]); } // 3) Test the note was included diff --git a/yarn-project/end-to-end/src/e2e_pending_note_hashes_contract.test.ts b/yarn-project/end-to-end/src/e2e_pending_note_hashes_contract.test.ts index c379ae088aa..fa9230fa294 100644 --- a/yarn-project/end-to-end/src/e2e_pending_note_hashes_contract.test.ts +++ b/yarn-project/end-to-end/src/e2e_pending_note_hashes_contract.test.ts @@ -301,12 +301,12 @@ describe('e2e_pending_note_hashes_contract', () => { const outgoingViewer = owner; // Add a note of value 10, with a note log // Then emit another note log with the same counter as the one above, but with value 5 - await deployedContract.methods.test_emit_bad_note_log(owner, outgoingViewer).send().wait(); + const txReceipt = await deployedContract.methods.test_emit_bad_note_log(owner, outgoingViewer).send().wait(); - const syncStats = await wallet.getSyncStats(); - // Expect two incoming decryptable note logs to be emitted - expect(syncStats[owner.toString()].decryptedIncoming).toEqual(2); - // Expect one note log to be dropped - expect(syncStats[owner.toString()].failed).toEqual(1); + await deployedContract.methods.sync_notes().simulate(); + + const incomingNotes = await wallet.getIncomingNotes({ txHash: txReceipt.txHash }); + + expect(incomingNotes.length).toBe(1); }); }); diff --git a/yarn-project/end-to-end/src/e2e_prover/e2e_prover_test.ts b/yarn-project/end-to-end/src/e2e_prover/e2e_prover_test.ts index 9007b2e0f7a..15e77e4e775 100644 --- a/yarn-project/end-to-end/src/e2e_prover/e2e_prover_test.ts +++ b/yarn-project/end-to-end/src/e2e_prover/e2e_prover_test.ts @@ -34,7 +34,6 @@ import solc from 'solc'; import { type Hex, getContract } from 'viem'; import { privateKeyToAddress } from 'viem/accounts'; -import { waitRegisteredAccountSynced } from '../benchmarks/utils.js'; import { getACVMConfig } from '../fixtures/get_acvm_config.js'; import { getBBConfig } from '../fixtures/get_bb_config.js'; import { @@ -219,17 +218,8 @@ export class FullProverTest { await result.pxe.registerContract(this.fakeProofsAsset); for (let i = 0; i < 2; i++) { - await waitRegisteredAccountSynced( - result.pxe, - this.keys[i][0], - this.wallets[i].getCompleteAddress().partialAddress, - ); - - await waitRegisteredAccountSynced( - this.pxe, - this.keys[i][0], - this.wallets[i].getCompleteAddress().partialAddress, - ); + await result.pxe.registerAccount(this.keys[i][0], this.wallets[i].getCompleteAddress().partialAddress); + await this.pxe.registerAccount(this.keys[i][0], this.wallets[i].getCompleteAddress().partialAddress); } const account = getSchnorrAccount(result.pxe, this.keys[0][0], this.keys[0][1], SALT); diff --git a/yarn-project/end-to-end/src/e2e_token_contract/token_contract_test.ts b/yarn-project/end-to-end/src/e2e_token_contract/token_contract_test.ts index 3748f1b8986..ccaea95bc3f 100644 --- a/yarn-project/end-to-end/src/e2e_token_contract/token_contract_test.ts +++ b/yarn-project/end-to-end/src/e2e_token_contract/token_contract_test.ts @@ -46,7 +46,14 @@ export class TokenContractTest { const accountManagers = accountKeys.map(ak => getSchnorrAccount(pxe, ak[0], ak[1], 1)); this.wallets = await Promise.all(accountManagers.map(a => a.getWallet())); this.accounts = await pxe.getRegisteredAccounts(); - this.wallets.forEach((w, i) => this.logger.verbose(`Wallet ${i} address: ${w.getAddress()}`)); + // // Add every wallet the contacts of every other wallet. This way, they can send notes to each other and discover them + // await Promise.all( + // this.wallets.map(w => { + // const otherWallets = this.wallets.filter(ow => ow.getAddress() !== w.getAddress()); + // return Promise.all(otherWallets.map(ow => w.registerContact(ow.getAddress()))); + // }), + // ); + // this.wallets.forEach((w, i) => this.logger.verbose(`Wallet ${i} address: ${w.getAddress()}`)); }); await this.snapshotManager.snapshot( diff --git a/yarn-project/end-to-end/src/e2e_token_contract/transfer_in_private.test.ts b/yarn-project/end-to-end/src/e2e_token_contract/transfer_in_private.test.ts index 9834024f24c..d2cf242aff0 100644 --- a/yarn-project/end-to-end/src/e2e_token_contract/transfer_in_private.test.ts +++ b/yarn-project/end-to-end/src/e2e_token_contract/transfer_in_private.test.ts @@ -55,9 +55,6 @@ describe('e2e_token_contract transfer private', () => { .methods.transfer_in_private(accounts[0].address, accounts[1].address, amount, nonce) .send(); await expect(txReplay.wait()).rejects.toThrow(DUPLICATE_NULLIFIER_ERROR); - - // We let wallets[0] see wallets[1]'s notes because the check uses wallets[0]'s wallet to interact with the contracts to "get" state. - wallets[0].setScopes([wallets[0].getAddress(), wallets[1].getAddress()]); }); describe('failure cases', () => { diff --git a/yarn-project/end-to-end/src/fixtures/token_utils.ts b/yarn-project/end-to-end/src/fixtures/token_utils.ts index 834438f686f..6769aae9daa 100644 --- a/yarn-project/end-to-end/src/fixtures/token_utils.ts +++ b/yarn-project/end-to-end/src/fixtures/token_utils.ts @@ -1,4 +1,4 @@ -import { type AztecAddress, type DebugLogger, type Wallet, retryUntil } from '@aztec/aztec.js'; +import { type AztecAddress, type DebugLogger, type Wallet } from '@aztec/aztec.js'; import { TokenContract } from '@aztec/noir-contracts.js'; export async function deployToken(adminWallet: Wallet, initialAdminBalance: bigint, logger: DebugLogger) { @@ -28,26 +28,13 @@ export async function mintTokensToPrivate( await tokenAsMinter.methods.mint_to_private(from, recipient, amount).send().wait(); } -const awaitUserSynchronized = async (wallet: Wallet, owner: AztecAddress) => { - const isUserSynchronized = async () => { - return await wallet.isAccountStateSynchronized(owner); - }; - await retryUntil(isUserSynchronized, `synch of user ${owner.toString()}`, 10); -}; - export async function expectTokenBalance( wallet: Wallet, token: TokenContract, owner: AztecAddress, expectedBalance: bigint, logger: DebugLogger, - checkIfSynchronized = true, ) { - if (checkIfSynchronized) { - // First wait until the corresponding PXE has synchronized the account - await awaitUserSynchronized(wallet, owner); - } - // Then check the balance const contractWithWallet = await TokenContract.at(token.address, wallet); const balance = await contractWithWallet.methods.balance_of_private(owner).simulate({ from: owner }); diff --git a/yarn-project/end-to-end/src/flakey_e2e_inclusion_proofs_contract.test.ts b/yarn-project/end-to-end/src/flakey_e2e_inclusion_proofs_contract.test.ts index 5fb6a8e6113..534300c633a 100644 --- a/yarn-project/end-to-end/src/flakey_e2e_inclusion_proofs_contract.test.ts +++ b/yarn-project/end-to-end/src/flakey_e2e_inclusion_proofs_contract.test.ts @@ -62,7 +62,10 @@ describe('e2e_inclusion_proofs_contract', () => { const receipt = await contract.methods.create_note(owner, value).send().wait({ debug: true }); noteCreationBlockNumber = receipt.blockNumber!; - ({ noteHashes, visibleIncomingNotes } = receipt.debugInfo!); + ({ noteHashes } = receipt.debugInfo!); + + await contract.methods.sync_notes().simulate(); + visibleIncomingNotes = await wallets[0].getIncomingNotes({ txHash: receipt.txHash }); }); it('should return the correct values for creating a note', () => { @@ -155,7 +158,10 @@ describe('e2e_inclusion_proofs_contract', () => { const receipt = await contract.methods.create_note(owner, value).send().wait({ debug: true }); noteCreationBlockNumber = receipt.blockNumber!; - const { noteHashes, visibleIncomingNotes } = receipt.debugInfo!; + const { noteHashes } = receipt.debugInfo!; + + await contract.methods.sync_notes().simulate(); + const visibleIncomingNotes = await wallets[0].getIncomingNotes({ txHash: receipt.txHash }); expect(noteHashes.length).toBe(1); expect(visibleIncomingNotes.length).toBe(1); diff --git a/yarn-project/end-to-end/src/guides/dapp_testing.test.ts b/yarn-project/end-to-end/src/guides/dapp_testing.test.ts index b5f187d9e26..001ecca4712 100644 --- a/yarn-project/end-to-end/src/guides/dapp_testing.test.ts +++ b/yarn-project/end-to-end/src/guides/dapp_testing.test.ts @@ -106,6 +106,7 @@ describe('guides/dapp/testing', () => { it('checks private storage', async () => { // docs:start:private-storage + await token.methods.sync_notes().simulate(); const notes = await pxe.getIncomingNotes({ owner: owner.getAddress(), contractAddress: token.address, diff --git a/yarn-project/end-to-end/src/shared/cross_chain_test_harness.ts b/yarn-project/end-to-end/src/shared/cross_chain_test_harness.ts index 5767a678460..d68750db230 100644 --- a/yarn-project/end-to-end/src/shared/cross_chain_test_harness.ts +++ b/yarn-project/end-to-end/src/shared/cross_chain_test_harness.ts @@ -1,5 +1,6 @@ // docs:start:cross_chain_test_harness import { + type AccountWallet, type AztecAddress, type AztecNode, type DebugLogger, @@ -138,7 +139,7 @@ export class CrossChainTestHarness { pxeService: PXE, publicClient: PublicClient, walletClient: WalletClient, - wallet: Wallet, + wallet: AccountWallet, logger: DebugLogger, underlyingERC20Address?: EthAddress, ): Promise { @@ -207,7 +208,7 @@ export class CrossChainTestHarness { public readonly l1ContractAddresses: L1ContractAddresses, /** Wallet of the owner. */ - public readonly ownerWallet: Wallet, + public readonly ownerWallet: AccountWallet, ) { this.l1TokenPortalManager = new L1TokenPortalManager( this.tokenPortalAddress, diff --git a/yarn-project/pxe/src/contract_data_oracle/index.ts b/yarn-project/pxe/src/contract_data_oracle/index.ts index e29c95675cd..d5f6f9e5c0d 100644 --- a/yarn-project/pxe/src/contract_data_oracle/index.ts +++ b/yarn-project/pxe/src/contract_data_oracle/index.ts @@ -130,6 +130,11 @@ export class ContractDataOracle { return tree.getFunctionMembershipWitness(selector); } + public async getDebugContractName(contractAddress: AztecAddress) { + const tree = await this.getTreeForAddress(contractAddress); + return tree.getArtifact().name; + } + public async getDebugFunctionName(contractAddress: AztecAddress, selector: FunctionSelector) { const tree = await this.getTreeForAddress(contractAddress); const { name: contractName } = tree.getArtifact(); diff --git a/yarn-project/pxe/src/database/deferred_note_dao.test.ts b/yarn-project/pxe/src/database/deferred_note_dao.test.ts deleted file mode 100644 index 79250c687b8..00000000000 --- a/yarn-project/pxe/src/database/deferred_note_dao.test.ts +++ /dev/null @@ -1,24 +0,0 @@ -import { L1NotePayload, UnencryptedTxL2Logs, randomTxHash } from '@aztec/circuit-types'; -import { Fr, Point } from '@aztec/circuits.js'; -import { randomInt } from '@aztec/foundation/crypto'; - -import { DeferredNoteDao } from './deferred_note_dao.js'; - -export const randomDeferredNoteDao = ({ - publicKey = Point.random(), - payload = L1NotePayload.random(), - txHash = randomTxHash(), - noteHashes = [Fr.random(), Fr.random()], - dataStartIndexForTx = randomInt(100), - unencryptedLogs = UnencryptedTxL2Logs.random(1, 1), -}: Partial = {}) => { - return new DeferredNoteDao(publicKey, payload, txHash, noteHashes, dataStartIndexForTx, unencryptedLogs); -}; - -describe('Deferred Note DAO', () => { - it('convert to and from buffer', () => { - const deferredNote = randomDeferredNoteDao(); - const buf = deferredNote.toBuffer(); - expect(DeferredNoteDao.fromBuffer(buf)).toEqual(deferredNote); - }); -}); diff --git a/yarn-project/pxe/src/database/deferred_note_dao.ts b/yarn-project/pxe/src/database/deferred_note_dao.ts deleted file mode 100644 index 16044d7441e..00000000000 --- a/yarn-project/pxe/src/database/deferred_note_dao.ts +++ /dev/null @@ -1,47 +0,0 @@ -import { L1NotePayload, TxHash, UnencryptedTxL2Logs } from '@aztec/circuit-types'; -import { Fr, Point, type PublicKey, Vector } from '@aztec/circuits.js'; -import { BufferReader, serializeToBuffer } from '@aztec/foundation/serialize'; - -/** - * A note that is intended for us, but we cannot decode it yet because the contract is not yet in our database. - * - * So keep the state that we need to decode it later. - */ -export class DeferredNoteDao { - constructor( - /** Address Point or OvpkM (depending on if incoming or outgoing) the note was encrypted with. */ - public publicKey: PublicKey, - /** The note payload delivered via L1. */ - public payload: L1NotePayload, - /** The hash of the tx the note was created in. Equal to the first nullifier */ - public txHash: TxHash, - /** New note hashes in this transaction, one of which belongs to this note */ - public noteHashes: Fr[], - /** The next available leaf index for the note hash tree for this transaction */ - public dataStartIndexForTx: number, - /** Unencrypted logs for the transaction (used to complete partial notes) */ - public unencryptedLogs: UnencryptedTxL2Logs, - ) {} - - toBuffer(): Buffer { - return serializeToBuffer( - this.publicKey, - this.payload, - this.txHash, - new Vector(this.noteHashes), - this.dataStartIndexForTx, - this.unencryptedLogs, - ); - } - static fromBuffer(buffer: Buffer | BufferReader) { - const reader = BufferReader.asReader(buffer); - return new DeferredNoteDao( - reader.readObject(Point), - reader.readObject(L1NotePayload), - reader.readObject(TxHash), - reader.readVector(Fr), - reader.readNumber(), - reader.readObject(UnencryptedTxL2Logs), - ); - } -} diff --git a/yarn-project/pxe/src/database/incoming_note_dao.ts b/yarn-project/pxe/src/database/incoming_note_dao.ts index 8a07c9d39a5..cbd344b135c 100644 --- a/yarn-project/pxe/src/database/incoming_note_dao.ts +++ b/yarn-project/pxe/src/database/incoming_note_dao.ts @@ -5,7 +5,7 @@ import { toBigIntBE } from '@aztec/foundation/bigint-buffer'; import { BufferReader, serializeToBuffer } from '@aztec/foundation/serialize'; import { type NoteData } from '@aztec/simulator'; -import { type NoteInfo } from '../note_processor/utils/index.js'; +import { type NoteInfo } from '../note_decryption_utils/index.js'; /** * A note with contextual data which was decrypted as incoming. diff --git a/yarn-project/pxe/src/database/kv_pxe_database.ts b/yarn-project/pxe/src/database/kv_pxe_database.ts index 9c6f25092d0..2fea8a0452b 100644 --- a/yarn-project/pxe/src/database/kv_pxe_database.ts +++ b/yarn-project/pxe/src/database/kv_pxe_database.ts @@ -4,9 +4,9 @@ import { CompleteAddress, type ContractInstanceWithAddress, Header, + type IndexedTaggingSecret, type PublicKey, SerializableContractInstance, - type TaggingSecret, computePoint, } from '@aztec/circuits.js'; import { type ContractArtifact } from '@aztec/foundation/abi'; @@ -22,7 +22,6 @@ import { } from '@aztec/kv-store'; import { contractArtifactFromBuffer, contractArtifactToBuffer } from '@aztec/types/abi'; -import { DeferredNoteDao } from './deferred_note_dao.js'; import { IncomingNoteDao } from './incoming_note_dao.js'; import { OutgoingNoteDao } from './outgoing_note_dao.js'; import { type PxeDatabase } from './pxe_database.js'; @@ -45,8 +44,6 @@ export class KVPxeDatabase implements PxeDatabase { #nullifiedNotesByStorageSlot: AztecMultiMap; #nullifiedNotesByTxHash: AztecMultiMap; #nullifiedNotesByAddressPoint: AztecMultiMap; - #deferredNotes: AztecArray; - #deferredNotesByContract: AztecMultiMap; #syncedBlockPerPublicKey: AztecMap; #contractArtifacts: AztecMap; #contractInstances: AztecMap; @@ -64,8 +61,11 @@ export class KVPxeDatabase implements PxeDatabase { #notesByTxHashAndScope: Map>; #notesByAddressPointAndScope: Map>; - // Stores the last index used for each tagging secret - #taggingSecretIndexes: AztecMap; + // Stores the last index used for each tagging secret, taking direction into account + // This is necessary to avoid reusing the same index for the same secret, which happens if + // sender and recipient are the same + #taggingSecretIndexesForSenders: AztecMap; + #taggingSecretIndexesForRecipients: AztecMap; constructor(private db: AztecKVStore) { this.#db = db; @@ -93,9 +93,6 @@ export class KVPxeDatabase implements PxeDatabase { this.#nullifiedNotesByTxHash = db.openMultiMap('nullified_notes_by_tx_hash'); this.#nullifiedNotesByAddressPoint = db.openMultiMap('nullified_notes_by_address_point'); - this.#deferredNotes = db.openArray('deferred_notes'); - this.#deferredNotesByContract = db.openMultiMap('deferred_notes_by_contract'); - this.#outgoingNotes = db.openMap('outgoing_notes'); this.#outgoingNotesByContract = db.openMultiMap('outgoing_notes_by_contract'); this.#outgoingNotesByStorageSlot = db.openMultiMap('outgoing_notes_by_storage_slot'); @@ -115,7 +112,8 @@ export class KVPxeDatabase implements PxeDatabase { this.#notesByAddressPointAndScope.set(scope, db.openMultiMap(`${scope}:notes_by_address_point`)); } - this.#taggingSecretIndexes = db.openMap('tagging_secret_indices'); + this.#taggingSecretIndexesForSenders = db.openMap('tagging_secret_indexes_for_senders'); + this.#taggingSecretIndexesForRecipients = db.openMap('tagging_secret_indexes_for_recipients'); } public async getContract( @@ -216,56 +214,6 @@ export class KVPxeDatabase implements PxeDatabase { }); } - async addDeferredNotes(deferredNotes: DeferredNoteDao[]): Promise { - const newLength = await this.#deferredNotes.push(...deferredNotes.map(note => note.toBuffer())); - for (const [index, note] of deferredNotes.entries()) { - const noteId = newLength - deferredNotes.length + index; - await this.#deferredNotesByContract.set(note.payload.contractAddress.toString(), noteId); - } - } - - getDeferredNotesByContract(contractAddress: AztecAddress): Promise { - const noteIds = this.#deferredNotesByContract.getValues(contractAddress.toString()); - const notes: DeferredNoteDao[] = []; - for (const noteId of noteIds) { - const serializedNote = this.#deferredNotes.at(noteId); - if (!serializedNote) { - continue; - } - - const note = DeferredNoteDao.fromBuffer(serializedNote); - notes.push(note); - } - - return Promise.resolve(notes); - } - - /** - * Removes all deferred notes for a given contract address. - * @param contractAddress - the contract address to remove deferred notes for - * @returns an array of the removed deferred notes - */ - removeDeferredNotesByContract(contractAddress: AztecAddress): Promise { - return this.#db.transaction(() => { - const deferredNotes: DeferredNoteDao[] = []; - const indices = Array.from(this.#deferredNotesByContract.getValues(contractAddress.toString())); - - for (const index of indices) { - const deferredNoteBuffer = this.#deferredNotes.at(index); - if (!deferredNoteBuffer) { - continue; - } else { - deferredNotes.push(DeferredNoteDao.fromBuffer(deferredNoteBuffer)); - } - - void this.#deferredNotesByContract.deleteValue(contractAddress.toString(), index); - void this.#deferredNotes.setAt(index, null); - } - - return deferredNotes; - }); - } - getIncomingNotes(filter: IncomingNotesFilter): Promise { const publicKey: PublicKey | undefined = filter.owner ? computePoint(filter.owner) : undefined; @@ -600,23 +548,37 @@ export class KVPxeDatabase implements PxeDatabase { return incomingNotesSize + outgoingNotesSize + treeRootsSize + authWitsSize + addressesSize; } - async incrementTaggingSecretsIndexes(appTaggingSecretsWithRecipient: TaggingSecret[]): Promise { - const indexes = await this.getTaggingSecretsIndexes(appTaggingSecretsWithRecipient); + async incrementTaggingSecretsIndexesAsSender(appTaggingSecrets: Fr[]): Promise { + await this.#incrementTaggingSecretsIndexes(appTaggingSecrets, this.#taggingSecretIndexesForSenders); + } + + async #incrementTaggingSecretsIndexes(appTaggingSecrets: Fr[], storageMap: AztecMap): Promise { + const indexes = await this.#getTaggingSecretsIndexes(appTaggingSecrets, storageMap); await this.db.transaction(() => { indexes.forEach((taggingSecretIndex, listIndex) => { const nextIndex = taggingSecretIndex + 1; - const { secret, recipient } = appTaggingSecretsWithRecipient[listIndex]; - const key = `${secret.toString()}-${recipient.toString()}`; - void this.#taggingSecretIndexes.set(key, nextIndex); + void storageMap.set(appTaggingSecrets[listIndex].toString(), nextIndex); }); }); } - getTaggingSecretsIndexes(appTaggingSecretsWithRecipient: TaggingSecret[]): Promise { - return this.db.transaction(() => - appTaggingSecretsWithRecipient.map( - ({ secret, recipient }) => this.#taggingSecretIndexes.get(`${secret.toString()}-${recipient.toString()}`) ?? 0, - ), - ); + async setTaggingSecretsIndexesAsRecipient(indexedSecrets: IndexedTaggingSecret[]): Promise { + await this.db.transaction(() => { + indexedSecrets.forEach(indexedSecret => { + void this.#taggingSecretIndexesForRecipients.set(indexedSecret.secret.toString(), indexedSecret.index); + }); + }); + } + + async getTaggingSecretsIndexesAsRecipient(appTaggingSecrets: Fr[]) { + return await this.#getTaggingSecretsIndexes(appTaggingSecrets, this.#taggingSecretIndexesForRecipients); + } + + async getTaggingSecretsIndexesAsSender(appTaggingSecrets: Fr[]) { + return await this.#getTaggingSecretsIndexes(appTaggingSecrets, this.#taggingSecretIndexesForSenders); + } + + #getTaggingSecretsIndexes(appTaggingSecrets: Fr[], storageMap: AztecMap): Promise { + return this.db.transaction(() => appTaggingSecrets.map(secret => storageMap.get(`${secret.toString()}`) ?? 0)); } } diff --git a/yarn-project/pxe/src/database/outgoing_note_dao.ts b/yarn-project/pxe/src/database/outgoing_note_dao.ts index 30385bb684e..04bb7d4835c 100644 --- a/yarn-project/pxe/src/database/outgoing_note_dao.ts +++ b/yarn-project/pxe/src/database/outgoing_note_dao.ts @@ -4,7 +4,7 @@ import { NoteSelector } from '@aztec/foundation/abi'; import { toBigIntBE } from '@aztec/foundation/bigint-buffer'; import { BufferReader, serializeToBuffer } from '@aztec/foundation/serialize'; -import { type NoteInfo } from '../note_processor/utils/index.js'; +import { type NoteInfo } from '../note_decryption_utils/index.js'; /** * A note with contextual data which was decrypted as outgoing. diff --git a/yarn-project/pxe/src/database/pxe_database.ts b/yarn-project/pxe/src/database/pxe_database.ts index d6758e233d8..d4cdb12bcec 100644 --- a/yarn-project/pxe/src/database/pxe_database.ts +++ b/yarn-project/pxe/src/database/pxe_database.ts @@ -3,8 +3,8 @@ import { type CompleteAddress, type ContractInstanceWithAddress, type Header, + type IndexedTaggingSecret, type PublicKey, - type TaggingSecret, } from '@aztec/circuits.js'; import { type ContractArtifact } from '@aztec/foundation/abi'; import { type AztecAddress } from '@aztec/foundation/aztec-address'; @@ -12,7 +12,6 @@ import { type Fr } from '@aztec/foundation/fields'; import { type ContractArtifactDatabase } from './contracts/contract_artifact_db.js'; import { type ContractInstanceDatabase } from './contracts/contract_instance_db.js'; -import { type DeferredNoteDao } from './deferred_note_dao.js'; import { type IncomingNoteDao } from './incoming_note_dao.js'; import { type OutgoingNoteDao } from './outgoing_note_dao.js'; @@ -90,25 +89,6 @@ export interface PxeDatabase extends ContractArtifactDatabase, ContractInstanceD */ addNotes(incomingNotes: IncomingNoteDao[], outgoingNotes: OutgoingNoteDao[], scope?: AztecAddress): Promise; - /** - * Add notes to the database that are intended for us, but we don't yet have the contract. - * @param deferredNotes - An array of deferred notes. - */ - addDeferredNotes(deferredNotes: DeferredNoteDao[]): Promise; - - /** - * Get deferred notes for a given contract address. - * @param contractAddress - The contract address to get the deferred notes for. - */ - getDeferredNotesByContract(contractAddress: AztecAddress): Promise; - - /** - * Remove deferred notes for a given contract address. - * @param contractAddress - The contract address to remove the deferred notes for. - * @returns an array of the removed deferred notes - */ - removeDeferredNotesByContract(contractAddress: AztecAddress): Promise; - /** * Remove nullified notes associated with the given account and nullifiers. * @@ -209,18 +189,29 @@ export interface PxeDatabase extends ContractArtifactDatabase, ContractInstanceD /** * Returns the last seen indexes for the provided app siloed tagging secrets or 0 if they've never been seen. - * The recipient must also be provided to convey "directionality" of the secret and index pair, or in other words - * whether the index was used to tag a sent or received note. * @param appTaggingSecrets - The app siloed tagging secrets. * @returns The indexes for the provided secrets, 0 if they've never been seen. */ - getTaggingSecretsIndexes(appTaggingSecretsWithRecipient: TaggingSecret[]): Promise; + getTaggingSecretsIndexesAsRecipient(appTaggingSecrets: Fr[]): Promise; + + /** + * Returns the last seen indexes for the provided app siloed tagging secrets or 0 if they've never been used + * @param appTaggingSecrets - The app siloed tagging secrets. + * @returns The indexes for the provided secrets, 0 if they've never been seen. + */ + getTaggingSecretsIndexesAsSender(appTaggingSecrets: Fr[]): Promise; + + /** + * Increments the index for the provided app siloed tagging secrets in the senders database + * To be used when the generated tags have been used as sender + * @param appTaggingSecrets - The app siloed tagging secrets. + */ + incrementTaggingSecretsIndexesAsSender(appTaggingSecrets: Fr[]): Promise; /** - * Increments the index for the provided app siloed tagging secrets. - * The recipient must also be provided to convey "directionality" of the secret and index pair, or in other words - * whether the index was used to tag a sent or received note. + * Sets the index for the provided app siloed tagging secrets + * To be used when the generated tags have been "seen" as a recipient * @param appTaggingSecrets - The app siloed tagging secrets. */ - incrementTaggingSecretsIndexes(appTaggingSecretsWithRecipient: TaggingSecret[]): Promise; + setTaggingSecretsIndexesAsRecipient(indexedTaggingSecrets: IndexedTaggingSecret[]): Promise; } diff --git a/yarn-project/pxe/src/note_processor/utils/add_public_values_to_payload.ts b/yarn-project/pxe/src/note_decryption_utils/add_public_values_to_payload.ts similarity index 97% rename from yarn-project/pxe/src/note_processor/utils/add_public_values_to_payload.ts rename to yarn-project/pxe/src/note_decryption_utils/add_public_values_to_payload.ts index 8a249ceab6d..1d9c3806eea 100644 --- a/yarn-project/pxe/src/note_processor/utils/add_public_values_to_payload.ts +++ b/yarn-project/pxe/src/note_decryption_utils/add_public_values_to_payload.ts @@ -1,7 +1,7 @@ import { type L1NotePayload, Note } from '@aztec/circuit-types'; import { ContractNotFoundError } from '@aztec/simulator'; -import { type PxeDatabase } from '../../database/pxe_database.js'; +import { type PxeDatabase } from '../database/pxe_database.js'; /** * Merges privately and publicly delivered note values. diff --git a/yarn-project/pxe/src/note_processor/utils/brute_force_note_info.ts b/yarn-project/pxe/src/note_decryption_utils/brute_force_note_info.ts similarity index 100% rename from yarn-project/pxe/src/note_processor/utils/brute_force_note_info.ts rename to yarn-project/pxe/src/note_decryption_utils/brute_force_note_info.ts diff --git a/yarn-project/pxe/src/note_processor/utils/index.ts b/yarn-project/pxe/src/note_decryption_utils/index.ts similarity index 100% rename from yarn-project/pxe/src/note_processor/utils/index.ts rename to yarn-project/pxe/src/note_decryption_utils/index.ts diff --git a/yarn-project/pxe/src/note_processor/utils/produce_note_daos.ts b/yarn-project/pxe/src/note_decryption_utils/produce_note_daos.ts similarity index 79% rename from yarn-project/pxe/src/note_processor/utils/produce_note_daos.ts rename to yarn-project/pxe/src/note_decryption_utils/produce_note_daos.ts index 07a1a8a8433..8e857d51330 100644 --- a/yarn-project/pxe/src/note_processor/utils/produce_note_daos.ts +++ b/yarn-project/pxe/src/note_decryption_utils/produce_note_daos.ts @@ -1,12 +1,11 @@ -import { type L1NotePayload, type PublicKey, type TxHash, type UnencryptedTxL2Logs } from '@aztec/circuit-types'; +import { type L1NotePayload, type PublicKey, type TxHash } from '@aztec/circuit-types'; import { type Fr } from '@aztec/foundation/fields'; import { type Logger } from '@aztec/foundation/log'; import { type AcirSimulator } from '@aztec/simulator'; -import { type DeferredNoteDao } from '../../database/deferred_note_dao.js'; -import { IncomingNoteDao } from '../../database/incoming_note_dao.js'; -import { OutgoingNoteDao } from '../../database/outgoing_note_dao.js'; -import { type PxeDatabase } from '../../database/pxe_database.js'; +import { IncomingNoteDao } from '../database/incoming_note_dao.js'; +import { OutgoingNoteDao } from '../database/outgoing_note_dao.js'; +import { type PxeDatabase } from '../database/pxe_database.js'; import { produceNoteDaosForKey } from './produce_note_daos_for_key.js'; /** @@ -39,12 +38,9 @@ export async function produceNoteDaos( dataStartIndexForTx: number, excludedIndices: Set, logger: Logger, - unencryptedLogs: UnencryptedTxL2Logs, ): Promise<{ incomingNote: IncomingNoteDao | undefined; outgoingNote: OutgoingNoteDao | undefined; - incomingDeferredNote: DeferredNoteDao | undefined; - outgoingDeferredNote: DeferredNoteDao | undefined; }> { if (!addressPoint && !ovpkM) { throw new Error('Both addressPoint and ovpkM are undefined. Cannot create note.'); @@ -52,11 +48,9 @@ export async function produceNoteDaos( let incomingNote: IncomingNoteDao | undefined; let outgoingNote: OutgoingNoteDao | undefined; - let incomingDeferredNote: DeferredNoteDao | undefined; - let outgoingDeferredNote: DeferredNoteDao | undefined; if (addressPoint) { - [incomingNote, incomingDeferredNote] = await produceNoteDaosForKey( + incomingNote = await produceNoteDaosForKey( simulator, db, addressPoint, @@ -66,7 +60,6 @@ export async function produceNoteDaos( dataStartIndexForTx, excludedIndices, logger, - unencryptedLogs, IncomingNoteDao.fromPayloadAndNoteInfo, ); } @@ -87,7 +80,7 @@ export async function produceNoteDaos( ovpkM, ); } else { - [outgoingNote, outgoingDeferredNote] = await produceNoteDaosForKey( + outgoingNote = await produceNoteDaosForKey( simulator, db, ovpkM, @@ -97,7 +90,6 @@ export async function produceNoteDaos( dataStartIndexForTx, excludedIndices, logger, - unencryptedLogs, OutgoingNoteDao.fromPayloadAndNoteInfo, ); } @@ -106,7 +98,5 @@ export async function produceNoteDaos( return { incomingNote, outgoingNote, - incomingDeferredNote, - outgoingDeferredNote, }; } diff --git a/yarn-project/pxe/src/note_processor/utils/produce_note_daos_for_key.ts b/yarn-project/pxe/src/note_decryption_utils/produce_note_daos_for_key.ts similarity index 62% rename from yarn-project/pxe/src/note_processor/utils/produce_note_daos_for_key.ts rename to yarn-project/pxe/src/note_decryption_utils/produce_note_daos_for_key.ts index 42b04fc3c13..9e530b387d1 100644 --- a/yarn-project/pxe/src/note_processor/utils/produce_note_daos_for_key.ts +++ b/yarn-project/pxe/src/note_decryption_utils/produce_note_daos_for_key.ts @@ -1,10 +1,9 @@ -import { type L1NotePayload, type Note, type TxHash, type UnencryptedTxL2Logs } from '@aztec/circuit-types'; +import { type L1NotePayload, type Note, type TxHash } from '@aztec/circuit-types'; import { type Fr, type PublicKey } from '@aztec/circuits.js'; import { type Logger } from '@aztec/foundation/log'; -import { type AcirSimulator, ContractNotFoundError } from '@aztec/simulator'; +import { type AcirSimulator } from '@aztec/simulator'; -import { DeferredNoteDao } from '../../database/deferred_note_dao.js'; -import { type PxeDatabase } from '../../database/pxe_database.js'; +import { type PxeDatabase } from '../database/pxe_database.js'; import { getOrderedNoteItems } from './add_public_values_to_payload.js'; import { type NoteInfo, bruteForceNoteInfo } from './brute_force_note_info.js'; @@ -18,7 +17,6 @@ export async function produceNoteDaosForKey( dataStartIndexForTx: number, excludedIndices: Set, logger: Logger, - unencryptedLogs: UnencryptedTxL2Logs, daoConstructor: ( note: Note, payload: L1NotePayload, @@ -26,9 +24,8 @@ export async function produceNoteDaosForKey( dataStartIndexForTx: number, pkM: PublicKey, ) => T, -): Promise<[T | undefined, DeferredNoteDao | undefined]> { +): Promise { let noteDao: T | undefined; - let deferredNoteDao: DeferredNoteDao | undefined; try { // We get the note by merging publicly and privately delivered note values. @@ -49,14 +46,8 @@ export async function produceNoteDaosForKey( noteDao = daoConstructor(note, payload, noteInfo, dataStartIndexForTx, pkM); } catch (e) { - if (e instanceof ContractNotFoundError) { - logger.warn(e.message); - - deferredNoteDao = new DeferredNoteDao(pkM, payload, txHash, noteHashes, dataStartIndexForTx, unencryptedLogs); - } else { - logger.error(`Could not process note because of "${e}". Discarding note...`); - } + logger.error(`Could not process note because of "${e}". Discarding note...`); } - return [noteDao, deferredNoteDao]; + return noteDao; } diff --git a/yarn-project/pxe/src/note_processor/index.ts b/yarn-project/pxe/src/note_processor/index.ts deleted file mode 100644 index 3190e8eb40e..00000000000 --- a/yarn-project/pxe/src/note_processor/index.ts +++ /dev/null @@ -1 +0,0 @@ -export * from './note_processor.js'; diff --git a/yarn-project/pxe/src/note_processor/note_processor.test.ts b/yarn-project/pxe/src/note_processor/note_processor.test.ts deleted file mode 100644 index a5d7c77f0d8..00000000000 --- a/yarn-project/pxe/src/note_processor/note_processor.test.ts +++ /dev/null @@ -1,391 +0,0 @@ -import { - type AztecNode, - EncryptedL2NoteLog, - EncryptedLogPayload, - L1NotePayload, - L2Block, - Note, -} from '@aztec/circuit-types'; -import { - AztecAddress, - CompleteAddress, - Fr, - INITIAL_L2_BLOCK_NUM, - KeyValidationRequest, - MAX_NOTE_HASHES_PER_TX, - type PublicKey, - computeOvskApp, - deriveKeys, -} from '@aztec/circuits.js'; -import { pedersenHash } from '@aztec/foundation/crypto'; -import { GrumpkinScalar } from '@aztec/foundation/fields'; -import { type KeyStore } from '@aztec/key-store'; -import { openTmpStore } from '@aztec/kv-store/utils'; -import { type AcirSimulator } from '@aztec/simulator'; - -import { jest } from '@jest/globals'; -import { type MockProxy, mock } from 'jest-mock-extended'; - -import { type IncomingNoteDao } from '../database/incoming_note_dao.js'; -import { type PxeDatabase } from '../database/index.js'; -import { KVPxeDatabase } from '../database/kv_pxe_database.js'; -import { type OutgoingNoteDao } from '../database/outgoing_note_dao.js'; -import { NoteProcessor } from './note_processor.js'; - -const TXS_PER_BLOCK = 4; -const NUM_NOTE_HASHES_PER_BLOCK = TXS_PER_BLOCK * MAX_NOTE_HASHES_PER_TX; - -/** A wrapper containing info about a note we want to mock and insert into a block. */ -class MockNoteRequest { - constructor( - /** Log payload corresponding to a note we want to insert into a block. */ - public readonly logPayload: EncryptedLogPayload, - /** Block number this note corresponds to. */ - public readonly blockNumber: number, - /** Index of a tx within a block this note corresponds to. */ - public readonly txIndex: number, - /** Index of a note hash within a list of note hashes for 1 tx. */ - public readonly noteHashIndex: number, - /** Address point we use when encrypting a note. */ - public readonly recipient: AztecAddress, - /** ovKeys we use when encrypting a note. */ - public readonly ovKeys: KeyValidationRequest, - ) { - if (blockNumber < INITIAL_L2_BLOCK_NUM) { - throw new Error(`Block number should be greater than or equal to ${INITIAL_L2_BLOCK_NUM}.`); - } - if (noteHashIndex >= MAX_NOTE_HASHES_PER_TX) { - throw new Error(`Data index should be less than ${MAX_NOTE_HASHES_PER_TX}.`); - } - if (txIndex >= TXS_PER_BLOCK) { - throw new Error(`Tx index should be less than ${TXS_PER_BLOCK}.`); - } - } - - encrypt(): EncryptedL2NoteLog { - const ephSk = GrumpkinScalar.random(); - const log = this.logPayload.encrypt(ephSk, this.recipient, this.ovKeys); - return new EncryptedL2NoteLog(log); - } - - get indexWithinNoteHashTree(): bigint { - return BigInt( - (this.blockNumber - 1) * NUM_NOTE_HASHES_PER_BLOCK + this.txIndex * MAX_NOTE_HASHES_PER_TX + this.noteHashIndex, - ); - } - - get snippetOfNoteDao() { - const payload = L1NotePayload.fromIncomingBodyPlaintextContractAndPublicValues( - this.logPayload.incomingBodyPlaintext, - this.logPayload.contractAddress, - [], - )!; - return { - note: new Note(payload.privateNoteValues), - contractAddress: payload.contractAddress, - storageSlot: payload.storageSlot, - noteTypeId: payload.noteTypeId, - }; - } -} - -describe('Note Processor', () => { - let database: PxeDatabase; - let aztecNode: ReturnType>; - let addNotesSpy: any; - let noteProcessor: NoteProcessor; - let keyStore: MockProxy; - let simulator: MockProxy; - - const app = AztecAddress.random(); - - let ownerIvskM: GrumpkinScalar; - let ownerOvskM: GrumpkinScalar; - let ownerOvKeys: KeyValidationRequest; - let account: CompleteAddress; - - function mockBlocks(requests: MockNoteRequest[]) { - const blocks = []; - - // The number of blocks we create starts from INITIAL_L2_BLOCK_NUM and ends at the highest block number in requests - const numBlocks = requests.reduce((maxBlockNum, request) => Math.max(maxBlockNum, request.blockNumber), 0); - - for (let i = 0; i < numBlocks; i++) { - // First we get a random block with correct block number - const block = L2Block.random(INITIAL_L2_BLOCK_NUM + i, TXS_PER_BLOCK, 1, 0, 4); - - // We have to update the next available leaf index in note hash tree to match the block number - block.header.state.partial.noteHashTree.nextAvailableLeafIndex = block.number * NUM_NOTE_HASHES_PER_BLOCK; - - // Then we get all the note requests for the block - const noteRequestsForBlock = requests.filter(request => request.blockNumber === block.number); - - // Then we update the relevant note hashes to match the note requests - for (const request of noteRequestsForBlock) { - const note = request.snippetOfNoteDao.note; - const noteHash = pedersenHash(note.items); - block.body.txEffects[request.txIndex].noteHashes[request.noteHashIndex] = noteHash; - - // Now we populate the log - to simplify we say that there is only 1 function invocation in each tx - block.body.txEffects[request.txIndex].noteEncryptedLogs.functionLogs[0].logs[request.noteHashIndex] = - request.encrypt(); - } - - // The block is finished so we add it to the list of blocks - blocks.push(block); - } - - return blocks; - } - - beforeAll(() => { - const ownerSk = Fr.random(); - const partialAddress = Fr.random(); - - account = CompleteAddress.fromSecretKeyAndPartialAddress(ownerSk, partialAddress); - - ({ masterIncomingViewingSecretKey: ownerIvskM, masterOutgoingViewingSecretKey: ownerOvskM } = deriveKeys(ownerSk)); - - ownerOvKeys = new KeyValidationRequest( - account.publicKeys.masterOutgoingViewingPublicKey, - computeOvskApp(ownerOvskM, app), - ); - }); - - beforeEach(() => { - database = new KVPxeDatabase(openTmpStore()); - addNotesSpy = jest.spyOn(database, 'addNotes'); - - aztecNode = mock(); - keyStore = mock(); - simulator = mock(); - - keyStore.getMasterSecretKey.mockImplementation((pkM: PublicKey) => { - if (pkM.equals(account.publicKeys.masterIncomingViewingPublicKey)) { - return Promise.resolve(ownerIvskM); - } - if (pkM.equals(ownerOvKeys.pkM)) { - return Promise.resolve(ownerOvskM); - } - throw new Error(`Unknown public key: ${pkM}`); - }); - - keyStore.getMasterIncomingViewingPublicKey.mockResolvedValue(account.publicKeys.masterIncomingViewingPublicKey); - keyStore.getMasterOutgoingViewingPublicKey.mockResolvedValue(account.publicKeys.masterOutgoingViewingPublicKey); - - noteProcessor = NoteProcessor.create(account, keyStore, database, aztecNode, INITIAL_L2_BLOCK_NUM, simulator); - - simulator.computeNoteHashAndOptionallyANullifier.mockImplementation((...args) => - Promise.resolve({ - noteHash: Fr.random(), - uniqueNoteHash: Fr.random(), - siloedNoteHash: pedersenHash(args[5].items), // args[5] is note - innerNullifier: Fr.random(), - }), - ); - }); - - afterEach(() => { - addNotesSpy.mockReset(); - }); - - it('should store an incoming note that belongs to us', async () => { - const request = new MockNoteRequest( - getRandomNoteLogPayload(app), - 4, - 0, - 2, - account.address, - KeyValidationRequest.random(), - ); - - const blocks = mockBlocks([request]); - await noteProcessor.process(blocks); - - expect(addNotesSpy).toHaveBeenCalledTimes(1); - expect(addNotesSpy).toHaveBeenCalledWith( - [ - expect.objectContaining({ - ...request.snippetOfNoteDao, - index: request.indexWithinNoteHashTree, - }), - ], - [], - account.address, - ); - }, 25_000); - - it('should store an outgoing note that belongs to us', async () => { - const request = new MockNoteRequest( - getRandomNoteLogPayload(app), - 4, - 0, - 2, - CompleteAddress.random().address, - ownerOvKeys, - ); - - const blocks = mockBlocks([request]); - await noteProcessor.process(blocks); - - expect(addNotesSpy).toHaveBeenCalledTimes(1); - // For outgoing notes, the resulting DAO does not contain index. - expect(addNotesSpy).toHaveBeenCalledWith([], [expect.objectContaining(request.snippetOfNoteDao)], account.address); - }, 25_000); - - it('should store multiple notes that belong to us', async () => { - const requests = [ - new MockNoteRequest(getRandomNoteLogPayload(app), 1, 1, 1, account.address, ownerOvKeys), - new MockNoteRequest(getRandomNoteLogPayload(app), 2, 3, 0, CompleteAddress.random().address, ownerOvKeys), - new MockNoteRequest(getRandomNoteLogPayload(app), 6, 3, 2, account.address, KeyValidationRequest.random()), - new MockNoteRequest( - getRandomNoteLogPayload(app), - 9, - 3, - 2, - CompleteAddress.random().address, - KeyValidationRequest.random(), - ), - new MockNoteRequest(getRandomNoteLogPayload(app), 12, 3, 2, account.address, ownerOvKeys), - ]; - - const blocks = mockBlocks(requests); - await noteProcessor.process(blocks); - - expect(addNotesSpy).toHaveBeenCalledTimes(1); - expect(addNotesSpy).toHaveBeenCalledWith( - // Incoming should contain notes from requests 0, 2, 4 because in those requests we set owner address point. - [ - expect.objectContaining({ - ...requests[0].snippetOfNoteDao, - index: requests[0].indexWithinNoteHashTree, - }), - expect.objectContaining({ - ...requests[2].snippetOfNoteDao, - index: requests[2].indexWithinNoteHashTree, - }), - expect.objectContaining({ - ...requests[4].snippetOfNoteDao, - index: requests[4].indexWithinNoteHashTree, - }), - ], - // Outgoing should contain notes from requests 0, 1, 4 because in those requests we set owner ovKeys. - [ - expect.objectContaining(requests[0].snippetOfNoteDao), - expect.objectContaining(requests[1].snippetOfNoteDao), - expect.objectContaining(requests[4].snippetOfNoteDao), - ], - account.address, - ); - }, 30_000); - - it('should not store notes that do not belong to us', async () => { - // Both notes should be ignored because the encryption keys do not belong to owner (they are random). - const blocks = mockBlocks([ - new MockNoteRequest( - getRandomNoteLogPayload(), - 2, - 1, - 1, - CompleteAddress.random().address, - KeyValidationRequest.random(), - ), - new MockNoteRequest( - getRandomNoteLogPayload(), - 2, - 3, - 0, - CompleteAddress.random().address, - KeyValidationRequest.random(), - ), - ]); - await noteProcessor.process(blocks); - - expect(addNotesSpy).toHaveBeenCalledTimes(0); - }); - - it('should be able to recover two note payloads containing the same note', async () => { - const note = getRandomNoteLogPayload(app); - const note2 = getRandomNoteLogPayload(app); - // All note payloads except one have the same contract address, storage slot, and the actual note. - const requests = [ - new MockNoteRequest(note, 3, 0, 0, account.address, ownerOvKeys), - new MockNoteRequest(note, 4, 0, 2, account.address, ownerOvKeys), - new MockNoteRequest(note, 4, 2, 0, account.address, ownerOvKeys), - new MockNoteRequest(note2, 5, 2, 1, account.address, ownerOvKeys), - new MockNoteRequest(note, 6, 2, 3, account.address, ownerOvKeys), - ]; - - const blocks = mockBlocks(requests); - await noteProcessor.process(blocks); - - // First we check incoming - { - const addedIncoming: IncomingNoteDao[] = addNotesSpy.mock.calls[0][0]; - expect(addedIncoming.map(dao => dao)).toEqual([ - expect.objectContaining({ ...requests[0].snippetOfNoteDao, index: requests[0].indexWithinNoteHashTree }), - expect.objectContaining({ ...requests[1].snippetOfNoteDao, index: requests[1].indexWithinNoteHashTree }), - expect.objectContaining({ ...requests[2].snippetOfNoteDao, index: requests[2].indexWithinNoteHashTree }), - expect.objectContaining({ ...requests[3].snippetOfNoteDao, index: requests[3].indexWithinNoteHashTree }), - expect.objectContaining({ ...requests[4].snippetOfNoteDao, index: requests[4].indexWithinNoteHashTree }), - ]); - - // Check that every note has a different nonce. - const nonceSet = new Set(); - addedIncoming.forEach(info => nonceSet.add(info.nonce.value)); - expect(nonceSet.size).toBe(requests.length); - } - - // Then we check outgoing - { - const addedOutgoing: OutgoingNoteDao[] = addNotesSpy.mock.calls[0][1]; - expect(addedOutgoing.map(dao => dao)).toEqual([ - expect.objectContaining(requests[0].snippetOfNoteDao), - expect.objectContaining(requests[1].snippetOfNoteDao), - expect.objectContaining(requests[2].snippetOfNoteDao), - expect.objectContaining(requests[3].snippetOfNoteDao), - expect.objectContaining(requests[4].snippetOfNoteDao), - ]); - - // Outgoing note daos do not have a nonce so we don't check it. - } - }); - - it('advances the block number', async () => { - const request = new MockNoteRequest(getRandomNoteLogPayload(), 6, 0, 2, account.address, ownerOvKeys); - - const blocks = mockBlocks([request]); - await noteProcessor.process(blocks); - - expect(noteProcessor.status.syncedToBlock).toEqual(blocks.at(-1)?.number); - }); - - it('should restore the last block number processed and ignore the starting block', async () => { - const request = new MockNoteRequest( - getRandomNoteLogPayload(), - 6, - 0, - 2, - CompleteAddress.random().address, - KeyValidationRequest.random(), - ); - - const blocks = mockBlocks([request]); - await noteProcessor.process(blocks); - - const newNoteProcessor = NoteProcessor.create( - account, - keyStore, - database, - aztecNode, - INITIAL_L2_BLOCK_NUM, - simulator, - ); - - expect(newNoteProcessor.status).toEqual(noteProcessor.status); - }); - - function getRandomNoteLogPayload(app = AztecAddress.random()): EncryptedLogPayload { - return new EncryptedLogPayload(Fr.random(), app, L1NotePayload.random(app).toIncomingBodyPlaintext()); - } -}); diff --git a/yarn-project/pxe/src/note_processor/note_processor.ts b/yarn-project/pxe/src/note_processor/note_processor.ts deleted file mode 100644 index 128b58b8104..00000000000 --- a/yarn-project/pxe/src/note_processor/note_processor.ts +++ /dev/null @@ -1,358 +0,0 @@ -import { type AztecNode, L1NotePayload, type L2Block } from '@aztec/circuit-types'; -import { type NoteProcessorStats } from '@aztec/circuit-types/stats'; -import { - type CompleteAddress, - INITIAL_L2_BLOCK_NUM, - MAX_NOTE_HASHES_PER_TX, - computeAddressSecret, - computePoint, -} from '@aztec/circuits.js'; -import { type Fr } from '@aztec/foundation/fields'; -import { type Logger, createDebugLogger } from '@aztec/foundation/log'; -import { Timer } from '@aztec/foundation/timer'; -import { type KeyStore } from '@aztec/key-store'; -import { type AcirSimulator } from '@aztec/simulator'; - -import { type DeferredNoteDao } from '../database/deferred_note_dao.js'; -import { type IncomingNoteDao } from '../database/incoming_note_dao.js'; -import { type PxeDatabase } from '../database/index.js'; -import { type OutgoingNoteDao } from '../database/outgoing_note_dao.js'; -import { getAcirSimulator } from '../simulator/index.js'; -import { produceNoteDaos } from './utils/produce_note_daos.js'; - -/** - * Contains all the decrypted data in this array so that we can later batch insert it all into the database. - */ -interface ProcessedData { - /** Holds L2 block. */ - block: L2Block; - /** DAOs of processed incoming notes. */ - incomingNotes: IncomingNoteDao[]; - /** DAOs of processed outgoing notes. */ - outgoingNotes: OutgoingNoteDao[]; -} - -/** - * NoteProcessor is responsible for decrypting logs and converting them to notes via their originating contracts - * before storing them against their owner. - */ -export class NoteProcessor { - /** Keeps track of processing time since an instance is created. */ - public readonly timer: Timer = new Timer(); - - /** Stats accumulated for this processor. */ - public readonly stats: NoteProcessorStats = { - seen: 0, - decryptedIncoming: 0, - decryptedOutgoing: 0, - deferredIncoming: 0, - deferredOutgoing: 0, - failed: 0, - blocks: 0, - txs: 0, - }; - - private constructor( - public readonly account: CompleteAddress, - private keyStore: KeyStore, - private db: PxeDatabase, - private node: AztecNode, - private startingBlock: number, - private simulator: AcirSimulator, - private log: Logger, - ) {} - - public static create( - account: CompleteAddress, - keyStore: KeyStore, - db: PxeDatabase, - node: AztecNode, - startingBlock: number = INITIAL_L2_BLOCK_NUM, - simulator = getAcirSimulator(db, node, keyStore), - log = createDebugLogger('aztec:note_processor'), - ) { - return new NoteProcessor(account, keyStore, db, node, startingBlock, simulator, log); - } - - /** - * Check if the NoteProcessor is synchronized with the remote block number. - * The function queries the remote block number from the AztecNode and compares it with the syncedToBlock value in the NoteProcessor. - * If the values are equal, then the NoteProcessor is considered to be synchronized, otherwise not. - * - * @returns A boolean indicating whether the NoteProcessor is synchronized with the remote block number or not. - */ - public async isSynchronized() { - const remoteBlockNumber = await this.node.getBlockNumber(); - return this.getSyncedToBlock() === remoteBlockNumber; - } - - /** - * Returns synchronization status (ie up to which block has been synced ) for this note processor. - */ - public get status() { - return { syncedToBlock: this.getSyncedToBlock() }; - } - - private getSyncedToBlock(): number { - return this.db.getSynchedBlockNumberForAccount(this.account.address) ?? this.startingBlock - 1; - } - - /** - * Extracts new user-relevant notes from the information contained in the provided L2 blocks and encrypted logs. - * - * @param blocks - L2 blocks to be processed. - * @returns A promise that resolves once the processing is completed. - */ - public async process(blocks: L2Block[]): Promise { - if (blocks.length === 0) { - return; - } - - const blocksAndNotes: ProcessedData[] = []; - // Keep track of notes that we couldn't process because the contract was not found. - const deferredIncomingNotes: DeferredNoteDao[] = []; - const deferredOutgoingNotes: DeferredNoteDao[] = []; - - const ivskM = await this.keyStore.getMasterSecretKey(this.account.publicKeys.masterIncomingViewingPublicKey); - const addressSecret = computeAddressSecret(this.account.getPreaddress(), ivskM); - - const ovskM = await this.keyStore.getMasterSecretKey(this.account.publicKeys.masterOutgoingViewingPublicKey); - - // Iterate over both blocks and encrypted logs. - for (const block of blocks) { - this.stats.blocks++; - const { txLogs: encryptedTxLogs } = block.body.noteEncryptedLogs; - const { txLogs: unencryptedTxLogs } = block.body.unencryptedLogs; - - const dataStartIndexForBlock = - block.header.state.partial.noteHashTree.nextAvailableLeafIndex - - block.body.numberOfTxsIncludingPadded * MAX_NOTE_HASHES_PER_TX; - - // We are using set for `userPertainingTxIndices` to avoid duplicates. This would happen in case there were - // multiple encrypted logs in a tx pertaining to a user. - const incomingNotes: IncomingNoteDao[] = []; - const outgoingNotes: OutgoingNoteDao[] = []; - - // Iterate over all the encrypted logs and try decrypting them. If successful, store the note. - for (let indexOfTxInABlock = 0; indexOfTxInABlock < encryptedTxLogs.length; ++indexOfTxInABlock) { - this.stats.txs++; - const dataStartIndexForTx = dataStartIndexForBlock + indexOfTxInABlock * MAX_NOTE_HASHES_PER_TX; - const noteHashes = block.body.txEffects[indexOfTxInABlock].noteHashes; - // Note: Each tx generates a `TxL2Logs` object and for this reason we can rely on its index corresponding - // to the index of a tx in a block. - const encryptedTxFunctionLogs = encryptedTxLogs[indexOfTxInABlock].functionLogs; - const unencryptedTxFunctionLogs = unencryptedTxLogs[indexOfTxInABlock].functionLogs; - const excludedIndices: Set = new Set(); - - // We iterate over both encrypted and unencrypted logs to decrypt the notes since partial notes are passed - // via the unencrypted logs stream. - for (const txFunctionLogs of [encryptedTxFunctionLogs, unencryptedTxFunctionLogs]) { - const isFromPublic = txFunctionLogs === unencryptedTxFunctionLogs; - for (const functionLogs of txFunctionLogs) { - for (const unprocessedLog of functionLogs.logs) { - this.stats.seen++; - const incomingNotePayload = L1NotePayload.decryptAsIncoming( - unprocessedLog.data, - addressSecret, - isFromPublic, - ); - const outgoingNotePayload = L1NotePayload.decryptAsOutgoing(unprocessedLog.data, ovskM, isFromPublic); - - if (incomingNotePayload || outgoingNotePayload) { - if (incomingNotePayload && outgoingNotePayload && !incomingNotePayload.equals(outgoingNotePayload)) { - throw new Error( - `Incoming and outgoing note payloads do not match. Incoming: ${JSON.stringify( - incomingNotePayload, - )}, Outgoing: ${JSON.stringify(outgoingNotePayload)}`, - ); - } - - const payload = incomingNotePayload || outgoingNotePayload; - - const txEffect = block.body.txEffects[indexOfTxInABlock]; - const { incomingNote, outgoingNote, incomingDeferredNote, outgoingDeferredNote } = - await produceNoteDaos( - this.simulator, - this.db, - incomingNotePayload ? computePoint(this.account.address) : undefined, - outgoingNotePayload ? this.account.publicKeys.masterOutgoingViewingPublicKey : undefined, - payload!, - txEffect.txHash, - noteHashes, - dataStartIndexForTx, - excludedIndices, - this.log, - txEffect.unencryptedLogs, - ); - - if (incomingNote) { - incomingNotes.push(incomingNote); - this.stats.decryptedIncoming++; - } - if (outgoingNote) { - outgoingNotes.push(outgoingNote); - this.stats.decryptedOutgoing++; - } - if (incomingDeferredNote) { - deferredIncomingNotes.push(incomingDeferredNote); - this.stats.deferredIncoming++; - } - if (outgoingDeferredNote) { - deferredOutgoingNotes.push(outgoingDeferredNote); - this.stats.deferredOutgoing++; - } - - if (incomingNote == undefined && outgoingNote == undefined && incomingDeferredNote == undefined) { - this.stats.failed++; - } - } - } - } - } - } - - blocksAndNotes.push({ - block, - incomingNotes, - outgoingNotes, - }); - } - - await this.processBlocksAndNotes(blocksAndNotes); - await this.processDeferredNotes(deferredIncomingNotes, deferredOutgoingNotes); - - const syncedToBlock = blocks[blocks.length - 1].number; - await this.db.setSynchedBlockNumberForAccount(this.account.address, syncedToBlock); - - this.log.debug(`Synched block ${syncedToBlock}`); - } - - /** - * Process the given blocks and their associated transaction auxiliary data. - * This function updates the database with information about new transactions, - * user-pertaining transaction indices, and auxiliary data. It also removes nullified - * transaction auxiliary data from the database. This function keeps track of new nullifiers - * and ensures all other transactions are updated with newly settled block information. - * - * @param blocksAndNotes - Array of objects containing L2 blocks, user-pertaining transaction indices, and NoteDaos. - */ - private async processBlocksAndNotes(blocksAndNotes: ProcessedData[]) { - const incomingNotes = blocksAndNotes.flatMap(b => b.incomingNotes); - const outgoingNotes = blocksAndNotes.flatMap(b => b.outgoingNotes); - if (incomingNotes.length || outgoingNotes.length) { - await this.db.addNotes(incomingNotes, outgoingNotes, this.account.address); - incomingNotes.forEach(noteDao => { - this.log.verbose( - `Added incoming note for contract ${noteDao.contractAddress} at slot ${ - noteDao.storageSlot - } with nullifier ${noteDao.siloedNullifier.toString()}`, - ); - }); - outgoingNotes.forEach(noteDao => { - this.log.verbose(`Added outgoing note for contract ${noteDao.contractAddress} at slot ${noteDao.storageSlot}`); - }); - } - - const nullifiers: Fr[] = blocksAndNotes.flatMap(b => - b.block.body.txEffects.flatMap(txEffect => txEffect.nullifiers), - ); - const removedNotes = await this.db.removeNullifiedNotes(nullifiers, computePoint(this.account.address)); - removedNotes.forEach(noteDao => { - this.log.verbose( - `Removed note for contract ${noteDao.contractAddress} at slot ${ - noteDao.storageSlot - } with nullifier ${noteDao.siloedNullifier.toString()}`, - ); - }); - } - - /** - * Store the given deferred notes in the database for later decoding. - * - * @param deferredIncomingNotes - incoming notes that are intended for us but we couldn't process because the contract was not found. - * @param deferredOutgoingNotes - outgoing notes that we couldn't process because the contract was not found. - */ - private async processDeferredNotes( - deferredIncomingNotes: DeferredNoteDao[], - deferredOutgoingNotes: DeferredNoteDao[], - ) { - if (deferredIncomingNotes.length || deferredOutgoingNotes.length) { - await this.db.addDeferredNotes([...deferredIncomingNotes, ...deferredOutgoingNotes]); - deferredIncomingNotes.forEach(noteDao => { - this.log.verbose( - `Deferred incoming note for contract ${noteDao.payload.contractAddress} at slot ${ - noteDao.payload.storageSlot - } in tx ${noteDao.txHash.toString()}`, - ); - }); - deferredOutgoingNotes.forEach(noteDao => { - this.log.verbose( - `Deferred outgoing note for contract ${noteDao.payload.contractAddress} at slot ${ - noteDao.payload.storageSlot - } in tx ${noteDao.txHash.toString()}`, - ); - }); - } - } - - /** - * Retry decoding the given deferred notes because we now have the contract code. - * - * @param deferredNoteDaos - notes that we have previously deferred because the contract was not found - * @returns An object containing arrays of incoming and outgoing notes that were successfully decoded. - * - * @remarks Caller is responsible for making sure that we have the contract for the - * deferred notes provided: we will not retry notes that fail again. - */ - public async decodeDeferredNotes(deferredNoteDaos: DeferredNoteDao[]): Promise<{ - incomingNotes: IncomingNoteDao[]; - outgoingNotes: OutgoingNoteDao[]; - }> { - const excludedIndices: Set = new Set(); - const incomingNotes: IncomingNoteDao[] = []; - const outgoingNotes: OutgoingNoteDao[] = []; - - for (const deferredNote of deferredNoteDaos) { - const { publicKey, payload, txHash, noteHashes, dataStartIndexForTx, unencryptedLogs } = deferredNote; - - const isIncoming = publicKey.equals(computePoint(this.account.address)); - const isOutgoing = publicKey.equals(this.account.publicKeys.masterOutgoingViewingPublicKey); - - if (!isIncoming && !isOutgoing) { - // The note does not belong to this note processor - continue; - } - - const { incomingNote, outgoingNote } = await produceNoteDaos( - this.simulator, - this.db, - isIncoming ? computePoint(this.account.address) : undefined, - isOutgoing ? this.account.publicKeys.masterOutgoingViewingPublicKey : undefined, - payload, - txHash, - noteHashes, - dataStartIndexForTx, - excludedIndices, - this.log, - unencryptedLogs, - ); - - if (isIncoming) { - if (!incomingNote) { - throw new Error('Deferred incoming note could not be decoded'); - } - incomingNotes.push(incomingNote); - this.stats.decryptedIncoming++; - } - if (outgoingNote) { - if (!outgoingNote) { - throw new Error('Deferred outgoing note could not be decoded'); - } - outgoingNotes.push(outgoingNote); - this.stats.decryptedOutgoing++; - } - } - - return { incomingNotes, outgoingNotes }; - } -} diff --git a/yarn-project/pxe/src/pxe_service/pxe_service.ts b/yarn-project/pxe/src/pxe_service/pxe_service.ts index c6451ace7d6..62e96081780 100644 --- a/yarn-project/pxe/src/pxe_service/pxe_service.ts +++ b/yarn-project/pxe/src/pxe_service/pxe_service.ts @@ -31,7 +31,6 @@ import { UniqueNote, getNonNullifiedL1ToL2MessageWitness, } from '@aztec/circuit-types'; -import { type NoteProcessorStats } from '@aztec/circuit-types/stats'; import { type AztecAddress, type CompleteAddress, @@ -115,33 +114,11 @@ export class PXEService implements PXE { public async start() { const { l2BlockPollingIntervalMS } = this.config; await this.synchronizer.start(1, l2BlockPollingIntervalMS); - await this.restoreNoteProcessors(); await this.#registerProtocolContracts(); const info = await this.getNodeInfo(); this.log.info(`Started PXE connected to chain ${info.l1ChainId} version ${info.protocolVersion}`); } - private async restoreNoteProcessors() { - const accounts = await this.keyStore.getAccounts(); - const accountsSet = new Set(accounts.map(k => k.toString())); - - const registeredAddresses = await this.db.getCompleteAddresses(); - - let count = 0; - for (const completeAddress of registeredAddresses) { - if (!accountsSet.has(completeAddress.address.toString())) { - continue; - } - - count++; - this.synchronizer.addAccount(completeAddress, this.keyStore, this.config.l2StartingBlock); - } - - if (count > 0) { - this.log.info(`Restored ${count} accounts`); - } - } - /** * Stops the PXE Service, halting processing of new transactions and shutting down the synchronizer. * This function ensures that all ongoing tasks are completed before stopping the server. @@ -193,7 +170,6 @@ export class PXEService implements PXE { this.log.info(`Account:\n "${accountCompleteAddress.address.toString()}"\n already registered.`); return accountCompleteAddress; } else { - this.synchronizer.addAccount(accountCompleteAddress, this.keyStore, this.config.l2StartingBlock); this.log.info(`Registered account ${accountCompleteAddress.address.toString()}`); this.log.debug(`Registered account\n ${accountCompleteAddress.toReadableString()}`); } @@ -293,7 +269,6 @@ export class PXEService implements PXE { this.log.info(`Added contract ${artifact.name} at ${instance.address.toString()}`); await this.db.addContractInstance(instance); - await this.synchronizer.reprocessDeferredNotesForContract(instance.address); } public getContracts(): Promise { @@ -390,13 +365,15 @@ export class PXEService implements PXE { note.note, ); - const index = await this.node.findLeafIndex('latest', MerkleTreeId.NOTE_HASH_TREE, siloedNoteHash); + const [index] = await this.node.findLeavesIndexes('latest', MerkleTreeId.NOTE_HASH_TREE, [siloedNoteHash]); if (index === undefined) { throw new Error('Note does not exist.'); } const siloedNullifier = siloNullifier(note.contractAddress, innerNullifier!); - const nullifierIndex = await this.node.findLeafIndex('latest', MerkleTreeId.NULLIFIER_TREE, siloedNullifier); + const [nullifierIndex] = await this.node.findLeavesIndexes('latest', MerkleTreeId.NULLIFIER_TREE, [ + siloedNullifier, + ]); if (nullifierIndex !== undefined) { throw new Error('The note has been destroyed.'); } @@ -439,7 +416,7 @@ export class PXEService implements PXE { throw new Error('Unexpectedly received non-zero nullifier.'); } - const index = await this.node.findLeafIndex('latest', MerkleTreeId.NOTE_HASH_TREE, siloedNoteHash); + const [index] = await this.node.findLeavesIndexes('latest', MerkleTreeId.NOTE_HASH_TREE, [siloedNoteHash]); if (index === undefined) { throw new Error('Note does not exist.'); } @@ -704,7 +681,6 @@ export class PXEService implements PXE { const { address, contractClass, instance, artifact } = getCanonicalProtocolContract(name); await this.db.addContractArtifact(contractClass.id, artifact); await this.db.addContractInstance(instance); - await this.synchronizer.reprocessDeferredNotesForContract(address); this.log.info(`Added protocol contract ${name} at ${address.toString()}`); } } @@ -849,18 +825,10 @@ export class PXEService implements PXE { return await this.synchronizer.isGlobalStateSynchronized(); } - public async isAccountStateSynchronized(account: AztecAddress) { - return await this.synchronizer.isAccountStateSynchronized(account); - } - public getSyncStatus() { return Promise.resolve(this.synchronizer.getSyncStatus()); } - public getSyncStats(): Promise<{ [address: string]: NoteProcessorStats }> { - return Promise.resolve(this.synchronizer.getSyncStats()); - } - public async isContractClassPubliclyRegistered(id: Fr): Promise { return !!(await this.node.getContractClass(id)); } diff --git a/yarn-project/pxe/src/simulator_oracle/index.ts b/yarn-project/pxe/src/simulator_oracle/index.ts index 425603d6516..73cf685b016 100644 --- a/yarn-project/pxe/src/simulator_oracle/index.ts +++ b/yarn-project/pxe/src/simulator_oracle/index.ts @@ -2,24 +2,25 @@ import { type AztecNode, L1NotePayload, type L2Block, + type L2BlockNumber, MerkleTreeId, type NoteStatus, type NullifierMembershipWitness, type PublicDataWitness, - type TxScopedEncryptedL2NoteLog, + type TxEffect, + type TxScopedL2Log, getNonNullifiedL1ToL2MessageWitness, } from '@aztec/circuit-types'; import { type AztecAddress, type CompleteAddress, type ContractInstance, - type Fr, + Fr, type FunctionSelector, type Header, IndexedTaggingSecret, type KeyValidationRequest, type L1_TO_L2_MSG_TREE_HEIGHT, - TaggingSecret, computeAddressSecret, computePoint, computeTaggingSecret, @@ -31,11 +32,10 @@ import { type KeyStore } from '@aztec/key-store'; import { type AcirSimulator, type DBOracle, MessageLoadOracleInputs } from '@aztec/simulator'; import { type ContractDataOracle } from '../contract_data_oracle/index.js'; -import { type DeferredNoteDao } from '../database/deferred_note_dao.js'; import { type IncomingNoteDao } from '../database/incoming_note_dao.js'; import { type PxeDatabase } from '../database/index.js'; import { type OutgoingNoteDao } from '../database/outgoing_note_dao.js'; -import { produceNoteDaos } from '../note_processor/utils/produce_note_daos.js'; +import { produceNoteDaos } from '../note_decryption_utils/produce_note_daos.js'; import { getAcirSimulator } from '../simulator/index.js'; /** @@ -161,7 +161,7 @@ export class SimulatorOracle implements DBOracle { * @returns - The index of the commitment. Undefined if it does not exist in the tree. */ async getCommitmentIndex(commitment: Fr) { - return await this.aztecNode.findLeafIndex('latest', MerkleTreeId.NOTE_HASH_TREE, commitment); + return await this.findLeafIndex('latest', MerkleTreeId.NOTE_HASH_TREE, commitment); } // We need this in public as part of the EXISTS calls - but isn't used in private @@ -170,11 +170,16 @@ export class SimulatorOracle implements DBOracle { } async getNullifierIndex(nullifier: Fr) { - return await this.aztecNode.findLeafIndex('latest', MerkleTreeId.NULLIFIER_TREE, nullifier); + return await this.findLeafIndex('latest', MerkleTreeId.NULLIFIER_TREE, nullifier); } - public async findLeafIndex(blockNumber: number, treeId: MerkleTreeId, leafValue: Fr): Promise { - return await this.aztecNode.findLeafIndex(blockNumber, treeId, leafValue); + public async findLeafIndex( + blockNumber: L2BlockNumber, + treeId: MerkleTreeId, + leafValue: Fr, + ): Promise { + const [leafIndex] = await this.aztecNode.findLeavesIndexes(blockNumber, treeId, [leafValue]); + return leafIndex; } public async getSiblingPath(blockNumber: number, treeId: MerkleTreeId, leafIndex: bigint): Promise { @@ -258,14 +263,14 @@ export class SimulatorOracle implements DBOracle { * @param recipient - The address receiving the note * @returns A siloed tagging secret that can be used to tag notes. */ - public async getAppTaggingSecret( + public async getAppTaggingSecretAsSender( contractAddress: AztecAddress, sender: AztecAddress, recipient: AztecAddress, ): Promise { - const directionalSecret = await this.#calculateDirectionalSecret(contractAddress, sender, recipient); - const [index] = await this.db.getTaggingSecretsIndexes([directionalSecret]); - return IndexedTaggingSecret.fromTaggingSecret(directionalSecret, index); + const secret = await this.#calculateTaggingSecret(contractAddress, sender, recipient); + const [index] = await this.db.getTaggingSecretsIndexesAsSender([secret]); + return new IndexedTaggingSecret(secret, index); } /** @@ -274,24 +279,26 @@ export class SimulatorOracle implements DBOracle { * @param sender - The address sending the note * @param recipient - The address receiving the note */ - public async incrementAppTaggingSecret( + public async incrementAppTaggingSecretIndexAsSender( contractAddress: AztecAddress, sender: AztecAddress, recipient: AztecAddress, ): Promise { - const directionalSecret = await this.#calculateDirectionalSecret(contractAddress, sender, recipient); - await this.db.incrementTaggingSecretsIndexes([directionalSecret]); + const secret = await this.#calculateTaggingSecret(contractAddress, sender, recipient); + const contractName = await this.contractDataOracle.getDebugContractName(contractAddress); + this.log.verbose( + `Incrementing secret ${secret} as sender ${sender} for recipient: ${recipient} at contract: ${contractName}(${contractAddress})`, + ); + await this.db.incrementTaggingSecretsIndexesAsSender([secret]); } - async #calculateDirectionalSecret(contractAddress: AztecAddress, sender: AztecAddress, recipient: AztecAddress) { + async #calculateTaggingSecret(contractAddress: AztecAddress, sender: AztecAddress, recipient: AztecAddress) { const senderCompleteAddress = await this.getCompleteAddress(sender); const senderIvsk = await this.keyStore.getMasterIncomingViewingSecretKey(sender); const sharedSecret = computeTaggingSecret(senderCompleteAddress, senderIvsk, recipient); // Silo the secret to the app so it can't be used to track other app's notes const siloedSecret = poseidon2Hash([sharedSecret.x, sharedSecret.y, contractAddress]); - // Get the index of the secret, ensuring the directionality (sender -> recipient) - const directionalSecret = new TaggingSecret(siloedSecret, recipient); - return directionalSecret; + return siloedSecret; } /** @@ -303,29 +310,27 @@ export class SimulatorOracle implements DBOracle { * @param recipient - The address receiving the notes * @returns A list of siloed tagging secrets */ - async #getAppTaggingSecretsForSenders( + async #getAppTaggingSecretsForContacts( contractAddress: AztecAddress, recipient: AztecAddress, ): Promise { const recipientCompleteAddress = await this.getCompleteAddress(recipient); const recipientIvsk = await this.keyStore.getMasterIncomingViewingSecretKey(recipient); - // We implicitly add the recipient as a contact, this helps us decrypt tags on notes that we send to ourselves (recipient = us, sender = us) - const contacts = [...this.db.getContactAddresses(), recipient]; + // We implicitly add all PXE accounts as contacts, this helps us decrypt tags on notes that we send to ourselves (recipient = us, sender = us) + const contacts = [...this.db.getContactAddresses(), ...(await this.keyStore.getAccounts())].filter( + (address, index, self) => index === self.findIndex(otherAddress => otherAddress.equals(address)), + ); const appTaggingSecrets = contacts.map(contact => { const sharedSecret = computeTaggingSecret(recipientCompleteAddress, recipientIvsk, contact); return poseidon2Hash([sharedSecret.x, sharedSecret.y, contractAddress]); }); - // Ensure the directionality (sender -> recipient) - const directionalSecrets = appTaggingSecrets.map(secret => new TaggingSecret(secret, recipient)); - const indexes = await this.db.getTaggingSecretsIndexes(directionalSecrets); - return directionalSecrets.map((directionalSecret, i) => - IndexedTaggingSecret.fromTaggingSecret(directionalSecret, indexes[i]), - ); + const indexes = await this.db.getTaggingSecretsIndexesAsRecipient(appTaggingSecrets); + return appTaggingSecrets.map((secret, i) => new IndexedTaggingSecret(secret, indexes[i])); } /** - * Synchronizes the logs tagged with the recipient's address and all the senders in the addressbook. + * Synchronizes the logs tagged with scoped addresses and all the senders in the addressbook. * Returns the unsynched logs and updates the indexes of the secrets used to tag them until there are no more logs to sync. * @param contractAddress - The address of the contract that the logs are tagged for * @param recipient - The address of the recipient @@ -333,40 +338,113 @@ export class SimulatorOracle implements DBOracle { */ public async syncTaggedLogs( contractAddress: AztecAddress, - recipient: AztecAddress, - ): Promise { - // Ideally this algorithm would be implemented in noir, exposing its building blocks as oracles. - // However it is impossible at the moment due to the language not supporting nested slices. - // This nesting is necessary because for a given set of tags we don't - // know how many logs we will get back. Furthermore, these logs are of undetermined - // length, since we don't really know the note they correspond to until we decrypt them. - - // 1. Get all the secrets for the recipient and sender pairs (#9365) - let appTaggingSecrets = await this.#getAppTaggingSecretsForSenders(contractAddress, recipient); - - const logs: TxScopedEncryptedL2NoteLog[] = []; - while (appTaggingSecrets.length > 0) { - // 2. Compute tags using the secrets, recipient and index. Obtain logs for each tag (#9380) - const currentTags = appTaggingSecrets.map(taggingSecret => taggingSecret.computeTag()); - const logsByTags = await this.aztecNode.getLogsByTags(currentTags); - const newTaggingSecrets: IndexedTaggingSecret[] = []; - logsByTags.forEach((logsByTag, index) => { - // 3.1. Append logs to the list and increment the index for the tags that have logs (#9380) - if (logsByTag.length > 0) { - logs.push(...logsByTag); - // 3.2. Increment the index for the tags that have logs (#9380) - newTaggingSecrets.push( - new IndexedTaggingSecret(appTaggingSecrets[index].secret, recipient, appTaggingSecrets[index].index + 1), + maxBlockNumber: number, + scopes?: AztecAddress[], + ): Promise> { + const recipients = scopes ? scopes : await this.keyStore.getAccounts(); + const result = new Map(); + const contractName = await this.contractDataOracle.getDebugContractName(contractAddress); + for (const recipient of recipients) { + const logs: TxScopedL2Log[] = []; + // Ideally this algorithm would be implemented in noir, exposing its building blocks as oracles. + // However it is impossible at the moment due to the language not supporting nested slices. + // This nesting is necessary because for a given set of tags we don't + // know how many logs we will get back. Furthermore, these logs are of undetermined + // length, since we don't really know the note they correspond to until we decrypt them. + + // 1. Get all the secrets for the recipient and sender pairs (#9365) + const appTaggingSecrets = await this.#getAppTaggingSecretsForContacts(contractAddress, recipient); + + // 1.1 Set up a sliding window with an offset. Chances are the sender might have messed up + // and inadvertedly incremented their index without use getting any logs (for example, in case + // of a revert). If we stopped looking for logs the first time + // we receive 0 for a tag, we might never receive anything from that sender again. + // Also there's a possibility that we have advanced our index, but the sender has reused it, so + // we might have missed some logs. For these reasons, we have to look both back and ahead of the + // stored index + const INDEX_OFFSET = 10; + type SearchState = { + currentTagggingSecrets: IndexedTaggingSecret[]; + maxIndexesToCheck: { [k: string]: number }; + initialSecretIndexes: { [k: string]: number }; + secretsToIncrement: { [k: string]: number }; + }; + const searchState = appTaggingSecrets.reduce( + (acc, appTaggingSecret) => ({ + // Start looking for logs before the stored index + currentTagggingSecrets: acc.currentTagggingSecrets.concat([ + new IndexedTaggingSecret(appTaggingSecret.secret, Math.max(0, appTaggingSecret.index - INDEX_OFFSET)), + ]), + // Keep looking for logs beyond the stored index + maxIndexesToCheck: { + ...acc.maxIndexesToCheck, + ...{ [appTaggingSecret.secret.toString()]: appTaggingSecret.index + INDEX_OFFSET }, + }, + // Keeps track of the secrets we have to increment in the database + secretsToIncrement: {}, + // Store the initial set of indexes for the secrets + initialSecretIndexes: { + ...acc.initialSecretIndexes, + ...{ [appTaggingSecret.secret.toString()]: appTaggingSecret.index }, + }, + }), + { currentTagggingSecrets: [], maxIndexesToCheck: {}, secretsToIncrement: {}, initialSecretIndexes: {} }, + ); + + let { currentTagggingSecrets } = searchState; + const { maxIndexesToCheck, secretsToIncrement, initialSecretIndexes } = searchState; + + while (currentTagggingSecrets.length > 0) { + // 2. Compute tags using the secrets, recipient and index. Obtain logs for each tag (#9380) + const currentTags = currentTagggingSecrets.map(taggingSecret => taggingSecret.computeTag(recipient)); + const logsByTags = await this.aztecNode.getLogsByTags(currentTags); + const newTaggingSecrets: IndexedTaggingSecret[] = []; + logsByTags.forEach((logsByTag, logIndex) => { + const { secret: currentSecret, index: currentIndex } = currentTagggingSecrets[logIndex]; + const currentSecretAsStr = currentSecret.toString(); + this.log.debug( + `Syncing logs for recipient ${recipient}, secret ${currentSecretAsStr}:${currentIndex} at contract: ${contractName}(${contractAddress})`, ); - } - }); - // 4. Consolidate in db and replace initial appTaggingSecrets with the new ones (updated indexes) - await this.db.incrementTaggingSecretsIndexes( - newTaggingSecrets.map(secret => new TaggingSecret(secret.secret, recipient)), + // 3.1. Append logs to the list and increment the index for the tags that have logs (#9380) + if (logsByTag.length > 0) { + this.log.verbose( + `Found ${ + logsByTag.length + } logs for secret ${currentSecretAsStr} as recipient ${recipient}. Incrementing index to ${ + currentIndex + 1 + } at contract: ${contractName}(${contractAddress})`, + ); + logs.push(...logsByTag); + + if (currentIndex >= initialSecretIndexes[currentSecretAsStr]) { + // 3.2. Increment the index for the tags that have logs, provided they're higher than the one + // we have stored in the db (#9380) + secretsToIncrement[currentSecretAsStr] = currentIndex + 1; + // 3.3. Slide the window forwards if we have found logs beyond the initial index + maxIndexesToCheck[currentSecretAsStr] = currentIndex + INDEX_OFFSET; + } + } + // 3.4 Keep increasing the index (inside the window) temporarily for the tags that have no logs + // There's a chance the sender missed some and we want to catch up + if (currentIndex < maxIndexesToCheck[currentSecretAsStr]) { + const newTaggingSecret = new IndexedTaggingSecret(currentSecret, currentIndex + 1); + newTaggingSecrets.push(newTaggingSecret); + } + }); + await this.db.setTaggingSecretsIndexesAsRecipient( + Object.keys(secretsToIncrement).map( + secret => new IndexedTaggingSecret(Fr.fromString(secret), secretsToIncrement[secret]), + ), + ); + currentTagggingSecrets = newTaggingSecrets; + } + + result.set( + recipient.toString(), + logs.filter(log => log.blockNumber <= maxBlockNumber), ); - appTaggingSecrets = newTaggingSecrets; } - return logs; + return result; } /** @@ -376,11 +454,7 @@ export class SimulatorOracle implements DBOracle { * @param simulator - The simulator to use for decryption. * @returns The decrypted notes. */ - async #decryptTaggedLogs( - scopedLogs: TxScopedEncryptedL2NoteLog[], - recipient: AztecAddress, - simulator: AcirSimulator, - ) { + async #decryptTaggedLogs(scopedLogs: TxScopedL2Log[], recipient: AztecAddress, simulator?: AcirSimulator) { const recipientCompleteAddress = await this.getCompleteAddress(recipient); const ivskM = await this.keyStore.getMasterSecretKey( recipientCompleteAddress.publicKeys.masterIncomingViewingPublicKey, @@ -394,11 +468,16 @@ export class SimulatorOracle implements DBOracle { const excludedIndices: Map> = new Map(); const incomingNotes: IncomingNoteDao[] = []; const outgoingNotes: OutgoingNoteDao[] = []; - const deferredIncomingNotes: DeferredNoteDao[] = []; - const deferredOutgoingNotes: DeferredNoteDao[] = []; + + const txEffectsCache = new Map(); + for (const scopedLog of scopedLogs) { - const incomingNotePayload = L1NotePayload.decryptAsIncoming(scopedLog.log.data, addressSecret); - const outgoingNotePayload = L1NotePayload.decryptAsOutgoing(scopedLog.log.data, ovskM); + const incomingNotePayload = L1NotePayload.decryptAsIncoming( + scopedLog.logData, + addressSecret, + scopedLog.isFromPublic, + ); + const outgoingNotePayload = L1NotePayload.decryptAsOutgoing(scopedLog.logData, ovskM, scopedLog.isFromPublic); if (incomingNotePayload || outgoingNotePayload) { if (incomingNotePayload && outgoingNotePayload && !incomingNotePayload.equals(outgoingNotePayload)) { @@ -407,19 +486,25 @@ export class SimulatorOracle implements DBOracle { incomingNotePayload, )}, Outgoing: ${JSON.stringify(outgoingNotePayload)}`, ); + continue; } const payload = incomingNotePayload || outgoingNotePayload; - const txEffect = await this.aztecNode.getTxEffect(scopedLog.txHash); + + const txEffect = + txEffectsCache.get(scopedLog.txHash.toString()) ?? (await this.aztecNode.getTxEffect(scopedLog.txHash)); if (!txEffect) { this.log.warn(`No tx effect found for ${scopedLog.txHash} while decrypting tagged logs`); continue; } + + txEffectsCache.set(scopedLog.txHash.toString(), txEffect); + if (!excludedIndices.has(scopedLog.txHash.toString())) { excludedIndices.set(scopedLog.txHash.toString(), new Set()); } - const { incomingNote, outgoingNote, incomingDeferredNote, outgoingDeferredNote } = await produceNoteDaos( + const { incomingNote, outgoingNote } = await produceNoteDaos( // I don't like this at all, but we need a simulator to run `computeNoteHashAndOptionallyANullifier`. This generates // a chicken-and-egg problem due to this oracle requiring a simulator, which in turn requires this oracle. Furthermore, since jest doesn't allow // mocking ESM exports, we have to pollute the method even more by providing a simulator parameter so tests can inject a fake one. @@ -433,7 +518,6 @@ export class SimulatorOracle implements DBOracle { scopedLog.dataStartIndexForTx, excludedIndices.get(scopedLog.txHash.toString())!, this.log, - txEffect.unencryptedLogs, ); if (incomingNote) { @@ -442,18 +526,8 @@ export class SimulatorOracle implements DBOracle { if (outgoingNote) { outgoingNotes.push(outgoingNote); } - if (incomingDeferredNote) { - deferredIncomingNotes.push(incomingDeferredNote); - } - if (outgoingDeferredNote) { - deferredOutgoingNotes.push(outgoingDeferredNote); - } } } - if (deferredIncomingNotes.length || deferredOutgoingNotes.length) { - this.log.warn('Found deferred notes when processing tagged logs. This should not happen.'); - } - return { incomingNotes, outgoingNotes }; } @@ -463,15 +537,11 @@ export class SimulatorOracle implements DBOracle { * @param recipient - The recipient of the logs. */ public async processTaggedLogs( - logs: TxScopedEncryptedL2NoteLog[], + logs: TxScopedL2Log[], recipient: AztecAddress, simulator?: AcirSimulator, ): Promise { - const { incomingNotes, outgoingNotes } = await this.#decryptTaggedLogs( - logs, - recipient, - simulator ?? getAcirSimulator(this.db, this.aztecNode, this.keyStore, this.contractDataOracle), - ); + const { incomingNotes, outgoingNotes } = await this.#decryptTaggedLogs(logs, recipient, simulator); if (incomingNotes.length || outgoingNotes.length) { await this.db.addNotes(incomingNotes, outgoingNotes, recipient); incomingNotes.forEach(noteDao => { @@ -486,21 +556,18 @@ export class SimulatorOracle implements DBOracle { }); } const nullifiedNotes: IncomingNoteDao[] = []; - for (const incomingNote of incomingNotes) { - // NOTE: this leaks information about the nullifiers I'm interested in to the node. - const found = await this.aztecNode.findLeafIndex( - 'latest', - MerkleTreeId.NULLIFIER_TREE, - incomingNote.siloedNullifier, - ); - if (found) { - nullifiedNotes.push(incomingNote); - } - } - await this.db.removeNullifiedNotes( - nullifiedNotes.map(note => note.siloedNullifier), - computePoint(recipient), + const currentNotesForRecipient = await this.db.getIncomingNotes({ owner: recipient }); + const nullifierIndexes = await this.aztecNode.findLeavesIndexes( + 'latest', + MerkleTreeId.NULLIFIER_TREE, + currentNotesForRecipient.map(note => note.siloedNullifier), ); + + const foundNullifiers = currentNotesForRecipient + .filter((_, i) => nullifierIndexes[i] !== undefined) + .map(note => note.siloedNullifier); + + await this.db.removeNullifiedNotes(foundNullifiers, computePoint(recipient)); nullifiedNotes.forEach(noteDao => { this.log.verbose( `Removed note for contract ${noteDao.contractAddress} at slot ${ diff --git a/yarn-project/pxe/src/simulator_oracle/simulator_oracle.test.ts b/yarn-project/pxe/src/simulator_oracle/simulator_oracle.test.ts index a10c21271e0..f4630610b6e 100644 --- a/yarn-project/pxe/src/simulator_oracle/simulator_oracle.test.ts +++ b/yarn-project/pxe/src/simulator_oracle/simulator_oracle.test.ts @@ -6,7 +6,7 @@ import { Note, type TxEffect, TxHash, - TxScopedEncryptedL2NoteLog, + TxScopedL2Log, } from '@aztec/circuit-types'; import { AztecAddress, @@ -15,9 +15,9 @@ import { Fr, GrumpkinScalar, INITIAL_L2_BLOCK_NUM, + IndexedTaggingSecret, KeyValidationRequest, MAX_NOTE_HASHES_PER_TX, - TaggingSecret, computeAddress, computeOvskApp, computeTaggingSecret, @@ -127,6 +127,7 @@ describe('Simulator oracle', () => { aztecNode = mock(); database = new KVPxeDatabase(db); contractDataOracle = new ContractDataOracle(database); + jest.spyOn(contractDataOracle, 'getDebugContractName').mockImplementation(() => Promise.resolve('TestContract')); keyStore = new KeyStore(db); const simulatorOracleModule = await import('../simulator_oracle/index.js'); simulatorOracle = new simulatorOracleModule.SimulatorOracle(contractDataOracle, database, keyStore, aztecNode); @@ -141,36 +142,26 @@ describe('Simulator oracle', () => { describe('sync tagged logs', () => { const NUM_SENDERS = 10; + const SENDER_OFFSET_WINDOW_SIZE = 10; let senders: { completeAddress: CompleteAddress; ivsk: Fq }[]; - beforeEach(async () => { - // Set up the address book - senders = times(NUM_SENDERS).map((_, index) => { - const keys = deriveKeys(new Fr(index)); - const partialAddress = Fr.random(); - const address = computeAddress(keys.publicKeys, partialAddress); - const completeAddress = new CompleteAddress(address, keys.publicKeys, partialAddress); - return { completeAddress, ivsk: keys.masterIncomingViewingSecretKey }; - }); - for (const sender of senders) { - await database.addContactAddress(sender.completeAddress.address); - } + function generateMockLogs(senderOffset: number) { + const logs: { [k: string]: TxScopedL2Log[] } = {}; - const logs: { [k: string]: TxScopedEncryptedL2NoteLog[] } = {}; - - // Add a random note from every address in the address book for our account with index 0 + // Add a random note from every address in the address book for our account with index senderOffset // Compute the tag as sender (knowledge of preaddress and ivsk) for (const sender of senders) { - const tag = computeTagForIndex(sender, recipient.address, contractAddress, 0); + const tag = computeTagForIndex(sender, recipient.address, contractAddress, senderOffset); + const blockNumber = 1; const randomNote = new MockNoteRequest( getRandomNoteLogPayload(tag, contractAddress), - 1, + blockNumber, 1, 1, recipient.address, recipientOvKeys, ); - const log = new TxScopedEncryptedL2NoteLog(TxHash.random(), 0, randomNote.encrypt()); + const log = new TxScopedL2Log(TxHash.random(), 0, blockNumber, false, randomNote.encrypt().data); logs[tag.toString()] = [log]; } // Accumulated logs intended for recipient: NUM_SENDERS @@ -178,39 +169,41 @@ describe('Simulator oracle', () => { // Add a random note from the first sender in the address book, repeating the tag // Compute the tag as sender (knowledge of preaddress and ivsk) const firstSender = senders[0]; - const tag = computeTagForIndex(firstSender, recipient.address, contractAddress, 0); - const log = new TxScopedEncryptedL2NoteLog(TxHash.random(), 0, EncryptedL2NoteLog.random(tag)); + const tag = computeTagForIndex(firstSender, recipient.address, contractAddress, senderOffset); + const log = new TxScopedL2Log(TxHash.random(), 1, 0, false, EncryptedL2NoteLog.random(tag).data); logs[tag.toString()].push(log); // Accumulated logs intended for recipient: NUM_SENDERS + 1 - // Add a random note from half the address book for our account with index 1 + // Add a random note from half the address book for our account with index senderOffset + 1 // Compute the tag as sender (knowledge of preaddress and ivsk) for (let i = NUM_SENDERS / 2; i < NUM_SENDERS; i++) { const sender = senders[i]; - const tag = computeTagForIndex(sender, recipient.address, contractAddress, 1); + const tag = computeTagForIndex(sender, recipient.address, contractAddress, senderOffset + 1); + const blockNumber = 2; const randomNote = new MockNoteRequest( getRandomNoteLogPayload(tag, contractAddress), - 1, + blockNumber, 1, 1, recipient.address, recipientOvKeys, ); - const log = new TxScopedEncryptedL2NoteLog(TxHash.random(), 0, randomNote.encrypt()); + const log = new TxScopedL2Log(TxHash.random(), 0, blockNumber, false, randomNote.encrypt().data); logs[tag.toString()] = [log]; } // Accumulated logs intended for recipient: NUM_SENDERS + 1 + NUM_SENDERS / 2 - // Add a random note from every address in the address book for a random recipient with index 0 + // Add a random note from every address in the address book for a random recipient with index senderOffset // Compute the tag as sender (knowledge of preaddress and ivsk) for (const sender of senders) { const keys = deriveKeys(Fr.random()); const partialAddress = Fr.random(); const randomRecipient = computeAddress(keys.publicKeys, partialAddress); - const tag = computeTagForIndex(sender, randomRecipient, contractAddress, 0); + const tag = computeTagForIndex(sender, randomRecipient, contractAddress, senderOffset); + const blockNumber = 3; const randomNote = new MockNoteRequest( getRandomNoteLogPayload(tag, contractAddress), - 1, + blockNumber, 1, 1, randomRecipient, @@ -219,7 +212,7 @@ describe('Simulator oracle', () => { computeOvskApp(keys.masterOutgoingViewingSecretKey, contractAddress), ), ); - const log = new TxScopedEncryptedL2NoteLog(TxHash.random(), 0, randomNote.encrypt()); + const log = new TxScopedL2Log(TxHash.random(), 0, blockNumber, false, randomNote.encrypt().data); logs[tag.toString()] = [log]; } // Accumulated logs intended for recipient: NUM_SENDERS + 1 + NUM_SENDERS / 2 @@ -229,59 +222,147 @@ describe('Simulator oracle', () => { aztecNode.getLogsByTags.mockImplementation(tags => { return Promise.resolve(tags.map(tag => logs[tag.toString()] ?? [])); }); + } + + beforeEach(async () => { + // Set up the address book + senders = times(NUM_SENDERS).map((_, index) => { + const keys = deriveKeys(new Fr(index)); + const partialAddress = Fr.random(); + const address = computeAddress(keys.publicKeys, partialAddress); + const completeAddress = new CompleteAddress(address, keys.publicKeys, partialAddress); + return { completeAddress, ivsk: keys.masterIncomingViewingSecretKey }; + }); + for (const sender of senders) { + await database.addContactAddress(sender.completeAddress.address); + } + aztecNode.getLogsByTags.mockReset(); }); it('should sync tagged logs', async () => { - const syncedLogs = await simulatorOracle.syncTaggedLogs(contractAddress, recipient.address); + const senderOffset = 0; + generateMockLogs(senderOffset); + const syncedLogs = await simulatorOracle.syncTaggedLogs(contractAddress, 3); // We expect to have all logs intended for the recipient, one per sender + 1 with a duplicated tag for the first one + half of the logs for the second index - expect(syncedLogs).toHaveLength(NUM_SENDERS + 1 + NUM_SENDERS / 2); + expect(syncedLogs.get(recipient.address.toString())).toHaveLength(NUM_SENDERS + 1 + NUM_SENDERS / 2); // Recompute the secrets (as recipient) to ensure indexes are updated const ivsk = await keyStore.getMasterIncomingViewingSecretKey(recipient.address); - const directionalSecrets = senders.map(sender => { + const secrets = senders.map(sender => { const firstSenderSharedSecret = computeTaggingSecret(recipient, ivsk, sender.completeAddress.address); - const siloedSecret = poseidon2Hash([firstSenderSharedSecret.x, firstSenderSharedSecret.y, contractAddress]); - return new TaggingSecret(siloedSecret, recipient.address); + return poseidon2Hash([firstSenderSharedSecret.x, firstSenderSharedSecret.y, contractAddress]); }); // First sender should have 2 logs, but keep index 1 since they were built using the same tag - // Next 4 senders hould also have index 1 - // Last 5 senders should have index 2 - const indexes = await database.getTaggingSecretsIndexes(directionalSecrets); + // Next 4 senders hould also have index 1 = offset + 1 + // Last 5 senders should have index 2 = offset + 2 + const indexes = await database.getTaggingSecretsIndexesAsRecipient(secrets); expect(indexes).toHaveLength(NUM_SENDERS); expect(indexes).toEqual([1, 1, 1, 1, 1, 2, 2, 2, 2, 2]); + + // We should have called the node 12 times: + // 2 times with logs (sliding the window) + 10 times with no results (window size) + expect(aztecNode.getLogsByTags.mock.calls.length).toBe(2 + SENDER_OFFSET_WINDOW_SIZE); + }); + + it('should sync tagged logs with a sender index offset', async () => { + const senderOffset = 5; + generateMockLogs(senderOffset); + const syncedLogs = await simulatorOracle.syncTaggedLogs(contractAddress, 3); + // We expect to have all logs intended for the recipient, one per sender + 1 with a duplicated tag for the first one + half of the logs for the second index + expect(syncedLogs.get(recipient.address.toString())).toHaveLength(NUM_SENDERS + 1 + NUM_SENDERS / 2); + + // Recompute the secrets (as recipient) to ensure indexes are updated + const ivsk = await keyStore.getMasterIncomingViewingSecretKey(recipient.address); + const secrets = senders.map(sender => { + const firstSenderSharedSecret = computeTaggingSecret(recipient, ivsk, sender.completeAddress.address); + return poseidon2Hash([firstSenderSharedSecret.x, firstSenderSharedSecret.y, contractAddress]); + }); + + // First sender should have 2 logs, but keep index 1 since they were built using the same tag + // Next 4 senders hould also have index 6 = offset + 1 + // Last 5 senders should have index 7 = offset + 2 + const indexes = await database.getTaggingSecretsIndexesAsRecipient(secrets); + + expect(indexes).toHaveLength(NUM_SENDERS); + expect(indexes).toEqual([6, 6, 6, 6, 6, 7, 7, 7, 7, 7]); + + // We should have called the node 17 times: + // 5 times with no results (sender offset) + 2 times with logs (slide the window) + 10 times with no results (window size) + expect(aztecNode.getLogsByTags.mock.calls.length).toBe(5 + 2 + SENDER_OFFSET_WINDOW_SIZE); }); - it('should only sync tagged logs for which indexes are not updated', async () => { + it("should sync tagged logs for which indexes are not updated if they're inside the window", async () => { + const senderOffset = 1; + generateMockLogs(senderOffset); + // Recompute the secrets (as recipient) to update indexes + const ivsk = await keyStore.getMasterIncomingViewingSecretKey(recipient.address); + const secrets = senders.map(sender => { + const firstSenderSharedSecret = computeTaggingSecret(recipient, ivsk, sender.completeAddress.address); + return poseidon2Hash([firstSenderSharedSecret.x, firstSenderSharedSecret.y, contractAddress]); + }); + + await database.setTaggingSecretsIndexesAsRecipient(secrets.map(secret => new IndexedTaggingSecret(secret, 2))); + + const syncedLogs = await simulatorOracle.syncTaggedLogs(contractAddress, 3); + + // Even if our index as recipient is higher than what the recipient sent, we should be able to find the logs + expect(syncedLogs.get(recipient.address.toString())).toHaveLength(NUM_SENDERS + 1 + NUM_SENDERS / 2); + // We should have called the node 13 times: + // 1 time without logs + 2 times with logs (sliding the window) + 10 times with no results (window size) + expect(aztecNode.getLogsByTags.mock.calls.length).toBe(3 + SENDER_OFFSET_WINDOW_SIZE); + }); + + it("should sync not tagged logs for which indexes are not updated if they're outside the window", async () => { + const senderOffset = 0; + generateMockLogs(senderOffset); + + // Recompute the secrets (as recipient) to update indexes const ivsk = await keyStore.getMasterIncomingViewingSecretKey(recipient.address); - const directionalSecrets = senders.map(sender => { + const secrets = senders.map(sender => { const firstSenderSharedSecret = computeTaggingSecret(recipient, ivsk, sender.completeAddress.address); - const siloedSecret = poseidon2Hash([firstSenderSharedSecret.x, firstSenderSharedSecret.y, contractAddress]); - return new TaggingSecret(siloedSecret, recipient.address); + return poseidon2Hash([firstSenderSharedSecret.x, firstSenderSharedSecret.y, contractAddress]); }); - await database.incrementTaggingSecretsIndexes(directionalSecrets); + await database.setTaggingSecretsIndexesAsRecipient( + secrets.map(secret => new IndexedTaggingSecret(secret, SENDER_OFFSET_WINDOW_SIZE + 1)), + ); + + const syncedLogs = await simulatorOracle.syncTaggedLogs(contractAddress, 3); + + // Only half of the logs should be synced since we start from index 1 = offset + 1, the other half should be skipped + expect(syncedLogs.get(recipient.address.toString())).toHaveLength(NUM_SENDERS / 2); - const syncedLogs = await simulatorOracle.syncTaggedLogs(contractAddress, recipient.address); + // We should have called the node SENDER_OFFSET_WINDOW_SIZE + 1 (with logs) + SENDER_OFFSET_WINDOW_SIZE: + // Once for index 1 (NUM_SENDERS/2 logs) + 2 times the sliding window (no logs each time) + expect(aztecNode.getLogsByTags.mock.calls.length).toBe(1 + 2 * SENDER_OFFSET_WINDOW_SIZE); + }); - // Only half of the logs should be synced since we start from index 1, the other half should be skipped - expect(syncedLogs).toHaveLength(NUM_SENDERS / 2); + it('should not sync tagged logs with a blockNumber > maxBlockNumber', async () => { + const senderOffset = 0; + generateMockLogs(senderOffset); + const syncedLogs = await simulatorOracle.syncTaggedLogs(contractAddress, 1); - // We should have called the node twice, once for index 1 and once for index 2 (which should return no logs) - expect(aztecNode.getLogsByTags.mock.calls.length).toBe(2); + // Only NUM_SENDERS + 1 logs should be synched, since the rest have blockNumber > 1 + expect(syncedLogs.get(recipient.address.toString())).toHaveLength(NUM_SENDERS + 1); }); }); describe('Process notes', () => { let addNotesSpy: any; + let getIncomingNotesSpy: any; + let removeNullifiedNotesSpy: any; let simulator: MockProxy; beforeEach(() => { addNotesSpy = jest.spyOn(database, 'addNotes'); + getIncomingNotesSpy = jest.spyOn(database, 'getIncomingNotes'); + removeNullifiedNotesSpy = jest.spyOn(database, 'removeNullifiedNotes'); + removeNullifiedNotesSpy.mockImplementation(() => Promise.resolve([])); simulator = mock(); simulator.computeNoteHashAndOptionallyANullifier.mockImplementation((...args: any) => Promise.resolve({ @@ -295,13 +376,15 @@ describe('Simulator oracle', () => { afterEach(() => { addNotesSpy.mockReset(); + getIncomingNotesSpy.mockReset(); + removeNullifiedNotesSpy.mockReset(); simulator.computeNoteHashAndOptionallyANullifier.mockReset(); aztecNode.getTxEffect.mockReset(); }); - function mockTaggedLogs(requests: MockNoteRequest[]) { + function mockTaggedLogs(requests: MockNoteRequest[], nullifiers: number = 0) { const txEffectsMap: { [k: string]: { noteHashes: Fr[]; txHash: TxHash } } = {}; - const taggedLogs: TxScopedEncryptedL2NoteLog[] = []; + const taggedLogs: TxScopedL2Log[] = []; const groupedByTx = requests.reduce<{ [i: number]: { [j: number]: MockNoteRequest[] } }>((acc, request) => { if (!acc[request.blockNumber]) { acc[request.blockNumber] = {}; @@ -330,7 +413,7 @@ describe('Simulator oracle', () => { } const dataStartIndex = (request.blockNumber - 1) * NUM_NOTE_HASHES_PER_BLOCK + request.txIndex * MAX_NOTE_HASHES_PER_TX; - const taggedLog = new TxScopedEncryptedL2NoteLog(txHash, dataStartIndex, request.encrypt()); + const taggedLog = new TxScopedL2Log(txHash, dataStartIndex, blockNumber, false, request.encrypt().data); const note = request.snippetOfNoteDao.note; const noteHash = pedersenHash(note.items); txEffectsMap[txHash.toString()].noteHashes[request.noteHashIndex] = noteHash; @@ -342,6 +425,13 @@ describe('Simulator oracle', () => { aztecNode.getTxEffect.mockImplementation(txHash => { return Promise.resolve(txEffectsMap[txHash.toString()] as TxEffect); }); + aztecNode.findLeavesIndexes.mockImplementation((_blockNumber, _treeId, leafValues) => + Promise.resolve( + Array(leafValues.length - nullifiers) + .fill(undefined) + .concat(Array(nullifiers).fill(1n)), + ), + ); return taggedLogs; } @@ -544,5 +634,68 @@ describe('Simulator oracle', () => { // Outgoing note daos do not have a nonce so we don't check it. } }); + + it('should not store nullified notes', async () => { + const requests = [ + new MockNoteRequest( + getRandomNoteLogPayload(Fr.random(), contractAddress), + 1, + 1, + 1, + recipient.address, + recipientOvKeys, + ), + new MockNoteRequest( + getRandomNoteLogPayload(Fr.random(), contractAddress), + 6, + 3, + 2, + recipient.address, + recipientOvKeys, + ), + new MockNoteRequest( + getRandomNoteLogPayload(Fr.random(), contractAddress), + 12, + 3, + 2, + recipient.address, + recipientOvKeys, + ), + ]; + + const taggedLogs = mockTaggedLogs(requests, 2); + + getIncomingNotesSpy.mockResolvedValueOnce(Promise.resolve(requests.map(request => request.snippetOfNoteDao))); + + await simulatorOracle.processTaggedLogs(taggedLogs, recipient.address, simulator); + + expect(addNotesSpy).toHaveBeenCalledTimes(1); + expect(addNotesSpy).toHaveBeenCalledWith( + // Incoming should contain notes from requests 0, 1, 2 because in those requests we set owner address point. + [ + expect.objectContaining({ + ...requests[0].snippetOfNoteDao, + index: requests[0].indexWithinNoteHashTree, + }), + expect.objectContaining({ + ...requests[1].snippetOfNoteDao, + index: requests[1].indexWithinNoteHashTree, + }), + expect.objectContaining({ + ...requests[2].snippetOfNoteDao, + index: requests[2].indexWithinNoteHashTree, + }), + ], + // Outgoing should contain notes from requests 0, 1, 2 because in those requests we set owner ovKeys. + [ + expect.objectContaining(requests[0].snippetOfNoteDao), + expect.objectContaining(requests[1].snippetOfNoteDao), + expect.objectContaining(requests[2].snippetOfNoteDao), + ], + recipient.address, + ); + + expect(removeNullifiedNotesSpy).toHaveBeenCalledTimes(1); + }, 30_000); }); }); diff --git a/yarn-project/pxe/src/synchronizer/synchronizer.test.ts b/yarn-project/pxe/src/synchronizer/synchronizer.test.ts index 22c1b4a9c60..8b0c6810eda 100644 --- a/yarn-project/pxe/src/synchronizer/synchronizer.test.ts +++ b/yarn-project/pxe/src/synchronizer/synchronizer.test.ts @@ -1,9 +1,8 @@ import { type AztecNode, L2Block } from '@aztec/circuit-types'; -import { Fr, type Header, INITIAL_L2_BLOCK_NUM } from '@aztec/circuits.js'; +import { type Header } from '@aztec/circuits.js'; import { makeHeader } from '@aztec/circuits.js/testing'; import { randomInt } from '@aztec/foundation/crypto'; import { SerialQueue } from '@aztec/foundation/queue'; -import { KeyStore } from '@aztec/key-store'; import { openTmpStore } from '@aztec/kv-store/utils'; import { type MockProxy, mock } from 'jest-mock-extended'; @@ -82,66 +81,6 @@ describe('Synchronizer', () => { expect(header5).not.toEqual(headerBlock3); expect(header5).toEqual(block5.header); }); - - it('note processor successfully catches up', async () => { - const blocks = [L2Block.random(1, 4), L2Block.random(2, 4)]; - - aztecNode.getLogs - // called by synchronizer.work - .mockResolvedValueOnce([blocks[0].body.encryptedLogs]) - .mockResolvedValueOnce([blocks[0].body.unencryptedLogs]) - .mockResolvedValueOnce([blocks[1].body.encryptedLogs]) - .mockResolvedValueOnce([blocks[1].body.encryptedLogs]) - // called by synchronizer.workNoteProcessorCatchUp - .mockResolvedValueOnce([blocks[0].body.encryptedLogs]) - .mockResolvedValueOnce([blocks[1].body.encryptedLogs]); - - aztecNode.getBlocks - // called by synchronizer.work, - .mockResolvedValueOnce([blocks[0]]) - .mockResolvedValueOnce([blocks[1]]) - // called by synchronizer.workNoteProcessorCatchUp - .mockResolvedValueOnce([blocks[0]]) - .mockResolvedValueOnce([blocks[1]]); - - aztecNode.getBlockNumber.mockResolvedValue(INITIAL_L2_BLOCK_NUM + 1); - - // Sync the synchronizer so that note processor has something to catch up to - // There are two blocks, and we have a limit of 1 block per work call - await synchronizer.work(1); - expect(await synchronizer.isGlobalStateSynchronized()).toBe(false); - await synchronizer.work(1); - expect(await synchronizer.isGlobalStateSynchronized()).toBe(true); - - // Manually adding account to database so that we can call synchronizer.isAccountStateSynchronized - const keyStore = new KeyStore(openTmpStore()); - const addAddress = async (startingBlockNum: number) => { - const secretKey = Fr.random(); - const partialAddress = Fr.random(); - const completeAddress = await keyStore.addAccount(secretKey, partialAddress); - await database.addCompleteAddress(completeAddress); - synchronizer.addAccount(completeAddress, keyStore, startingBlockNum); - return completeAddress; - }; - - const [completeAddressA, completeAddressB, completeAddressC] = await Promise.all([ - addAddress(INITIAL_L2_BLOCK_NUM), - addAddress(INITIAL_L2_BLOCK_NUM), - addAddress(INITIAL_L2_BLOCK_NUM + 1), - ]); - - await synchronizer.workNoteProcessorCatchUp(); - - expect(await synchronizer.isAccountStateSynchronized(completeAddressA.address)).toBe(false); - expect(await synchronizer.isAccountStateSynchronized(completeAddressB.address)).toBe(false); - expect(await synchronizer.isAccountStateSynchronized(completeAddressC.address)).toBe(false); - - await synchronizer.workNoteProcessorCatchUp(); - - expect(await synchronizer.isAccountStateSynchronized(completeAddressA.address)).toBe(true); - expect(await synchronizer.isAccountStateSynchronized(completeAddressB.address)).toBe(true); - expect(await synchronizer.isAccountStateSynchronized(completeAddressC.address)).toBe(true); - }); }); class TestSynchronizer extends Synchronizer { @@ -152,8 +91,4 @@ class TestSynchronizer extends Synchronizer { public override initialSync(): Promise { return super.initialSync(); } - - public override workNoteProcessorCatchUp(limit = 1): Promise { - return super.workNoteProcessorCatchUp(limit); - } } diff --git a/yarn-project/pxe/src/synchronizer/synchronizer.ts b/yarn-project/pxe/src/synchronizer/synchronizer.ts index dfef4ccd49e..3ed458b2db1 100644 --- a/yarn-project/pxe/src/synchronizer/synchronizer.ts +++ b/yarn-project/pxe/src/synchronizer/synchronizer.ts @@ -1,21 +1,10 @@ -import { type AztecNode, type L2Block, MerkleTreeId, type TxHash } from '@aztec/circuit-types'; -import { type NoteProcessorCaughtUpStats } from '@aztec/circuit-types/stats'; -import { - type AztecAddress, - type CompleteAddress, - type Fr, - INITIAL_L2_BLOCK_NUM, - type PublicKey, -} from '@aztec/circuits.js'; +import { type AztecNode, type L2Block } from '@aztec/circuit-types'; +import { INITIAL_L2_BLOCK_NUM } from '@aztec/circuits.js'; import { type DebugLogger, createDebugLogger } from '@aztec/foundation/log'; import { type SerialQueue } from '@aztec/foundation/queue'; import { RunningPromise } from '@aztec/foundation/running-promise'; -import { type KeyStore } from '@aztec/key-store'; -import { type DeferredNoteDao } from '../database/deferred_note_dao.js'; -import { type IncomingNoteDao } from '../database/incoming_note_dao.js'; import { type PxeDatabase } from '../database/index.js'; -import { NoteProcessor } from '../note_processor/index.js'; /** * The Synchronizer class manages the synchronization of note processors and interacts with the Aztec node @@ -26,11 +15,9 @@ import { NoteProcessor } from '../note_processor/index.js'; */ export class Synchronizer { private runningPromise?: RunningPromise; - private noteProcessors: NoteProcessor[] = []; private running = false; private initialSyncBlockNumber = INITIAL_L2_BLOCK_NUM - 1; private log: DebugLogger; - private noteProcessorsToCatchUp: NoteProcessor[] = []; constructor(private node: AztecNode, private db: PxeDatabase, private jobQueue: SerialQueue, logSuffix = '') { this.log = createDebugLogger(logSuffix ? `aztec:pxe_synchronizer_${logSuffix}` : 'aztec:pxe_synchronizer'); @@ -80,13 +67,7 @@ export class Synchronizer { let moreWork = true; // keep external this.running flag to interrupt greedy sync while (moreWork && this.running) { - if (this.noteProcessorsToCatchUp.length > 0) { - // There is a note processor that needs to catch up. We hijack the main loop to catch up the note processor. - moreWork = await this.workNoteProcessorCatchUp(limit); - } else { - // No note processor needs to catch up. We continue with the normal flow. - moreWork = await this.work(limit); - } + moreWork = await this.work(limit); } }); } @@ -108,11 +89,6 @@ export class Synchronizer { // Update latest tree roots from the most recent block const latestBlock = blocks[blocks.length - 1]; await this.setHeaderFromBlock(latestBlock); - - this.log.debug(`Forwarding ${blocks.length} blocks to ${this.noteProcessors.length} note processors`); - for (const noteProcessor of this.noteProcessors) { - await noteProcessor.process(blocks); - } return true; } catch (err) { this.log.error(`Error in synchronizer work`, err); @@ -120,104 +96,6 @@ export class Synchronizer { } } - /** - * Catch up note processors that are lagging behind the main sync. - * e.g. because we just added a new account. - * - * @param limit - the maximum number of encrypted, unencrypted logs and blocks to fetch in each iteration. - * @returns true if there could be more work, false if there was an error which allows a retry with delay. - */ - protected async workNoteProcessorCatchUp(limit = 1): Promise { - const toBlockNumber = this.getSynchedBlockNumber(); - - // filter out note processors that are already caught up - // and sort them by the block number they are lagging behind in ascending order - const noteProcessorsToCatchUp: NoteProcessor[] = []; - - this.noteProcessorsToCatchUp.forEach(noteProcessor => { - if (noteProcessor.status.syncedToBlock >= toBlockNumber) { - // Note processor is ahead of main sync, nothing to do - this.noteProcessors.push(noteProcessor); - } else { - noteProcessorsToCatchUp.push(noteProcessor); - } - }); - - this.noteProcessorsToCatchUp = noteProcessorsToCatchUp; - - if (!this.noteProcessorsToCatchUp.length) { - // No note processors to catch up, nothing to do here, - // but we return true to continue with the normal flow. - return true; - } - - // create a copy so that: - // 1. we can modify the original array while iterating over it - // 2. we don't need to serialize insertions into the array - const catchUpGroup = this.noteProcessorsToCatchUp - .slice() - // sort by the block number they are lagging behind - .sort((a, b) => a.status.syncedToBlock - b.status.syncedToBlock); - - // grab the note processor that is lagging behind the most - const from = catchUpGroup[0].status.syncedToBlock + 1; - // Ensuring that the note processor does not sync further than the main sync. - limit = Math.min(limit, toBlockNumber - from + 1); - // this.log(`Catching up ${catchUpGroup.length} note processors by up to ${limit} blocks starting at block ${from}`); - - if (limit < 1) { - throw new Error(`Unexpected limit ${limit} for note processor catch up`); - } - - try { - const blocks = await this.node.getBlocks(from, limit); - if (!blocks.length) { - // This should never happen because this function should only be called when the note processor is lagging - // behind main sync. - throw new Error('No blocks in processor catch up mode'); - } - - for (const noteProcessor of catchUpGroup) { - // find the index of the first block that the note processor is not yet synced to - const index = blocks.findIndex(block => block.number > noteProcessor.status.syncedToBlock); - if (index === -1) { - // Due to the limit, we might not have fetched a new enough block for the note processor. - // And since the group is sorted, we break as soon as we find a note processor - // that needs blocks newer than the newest block we fetched. - break; - } - - this.log.debug( - `Catching up note processor ${noteProcessor.account.toString()} by processing ${ - blocks.length - index - } blocks`, - ); - await noteProcessor.process(blocks.slice(index)); - - if (noteProcessor.status.syncedToBlock === toBlockNumber) { - // Note processor caught up, move it to `noteProcessors` from `noteProcessorsToCatchUp`. - this.log.debug(`Note processor for ${noteProcessor.account.toString()} has caught up`, { - eventName: 'note-processor-caught-up', - account: noteProcessor.account.toString(), - duration: noteProcessor.timer.ms(), - dbSize: await this.db.estimateSize(), - ...noteProcessor.stats, - } satisfies NoteProcessorCaughtUpStats); - - this.noteProcessorsToCatchUp = this.noteProcessorsToCatchUp.filter( - np => !np.account.equals(noteProcessor.account), - ); - this.noteProcessors.push(noteProcessor); - } - } - - return true; // could be more work, immediately continue syncing - } catch (err) { - this.log.error(`Error in synchronizer workNoteProcessorCatchUp`, err); - return false; - } - } - private async setHeaderFromBlock(latestBlock: L2Block) { if (latestBlock.number < this.initialSyncBlockNumber) { return; @@ -239,46 +117,6 @@ export class Synchronizer { this.log.info('Stopped'); } - /** - * Add a new account to the Synchronizer with the specified private key. - * Creates a NoteProcessor instance for the account and pushes it into the noteProcessors array. - * The method resolves immediately after pushing the new note processor. - * - * @param publicKey - The public key for the account. - * @param keyStore - The key store. - * @param startingBlock - The block where to start scanning for notes for this accounts. - * @returns A promise that resolves once the account is added to the Synchronizer. - */ - public addAccount(account: CompleteAddress, keyStore: KeyStore, startingBlock: number) { - const predicate = (x: NoteProcessor) => x.account.equals(account); - const processor = this.noteProcessors.find(predicate) ?? this.noteProcessorsToCatchUp.find(predicate); - if (processor) { - return; - } - - this.noteProcessorsToCatchUp.push(NoteProcessor.create(account, keyStore, this.db, this.node, startingBlock)); - } - - /** - * Checks if the specified account is synchronized. - * @param account - The aztec address for which to query the sync status. - * @returns True if the account is fully synched, false otherwise. - * @remarks Checks whether all the notes from all the blocks have been processed. If it is not the case, the - * retrieved information from contracts might be old/stale (e.g. old token balance). - * @throws If checking a sync status of account which is not registered. - */ - public async isAccountStateSynchronized(account: AztecAddress) { - const findByAccountAddress = (x: NoteProcessor) => x.account.address.equals(account); - const processor = - this.noteProcessors.find(findByAccountAddress) ?? this.noteProcessorsToCatchUp.find(findByAccountAddress); - if (!processor) { - throw new Error( - `Checking if account is synched is not possible for ${account} because it is only registered as a recipient.`, - ); - } - return await processor.isSynchronized(); - } - private getSynchedBlockNumber() { return this.db.getBlockNumber() ?? this.initialSyncBlockNumber; } @@ -302,94 +140,6 @@ export class Synchronizer { const lastBlockNumber = this.getSynchedBlockNumber(); return { blocks: lastBlockNumber, - notes: Object.fromEntries(this.noteProcessors.map(n => [n.account.address.toString(), n.status.syncedToBlock])), }; } - - /** - * Returns the note processor stats. - * @returns The note processor stats for notes for each public key being tracked. - */ - public getSyncStats() { - return Object.fromEntries(this.noteProcessors.map(n => [n.account.address.toString(), n.stats])); - } - - /** - * Retry decoding any deferred notes for the specified contract address. - * @param contractAddress - the contract address that has just been added - */ - public reprocessDeferredNotesForContract(contractAddress: AztecAddress): Promise { - return this.jobQueue.put(() => this.#reprocessDeferredNotesForContract(contractAddress)); - } - - async #reprocessDeferredNotesForContract(contractAddress: AztecAddress): Promise { - const deferredNotes = await this.db.getDeferredNotesByContract(contractAddress); - - // group deferred notes by txHash to properly deal with possible duplicates - const txHashToDeferredNotes: Map = new Map(); - for (const note of deferredNotes) { - const notesForTx = txHashToDeferredNotes.get(note.txHash) ?? []; - notesForTx.push(note); - txHashToDeferredNotes.set(note.txHash, notesForTx); - } - - // keep track of decoded notes - const incomingNotes: IncomingNoteDao[] = []; - - // now process each txHash - for (const deferredNotes of txHashToDeferredNotes.values()) { - // to be safe, try each note processor in case the deferred notes are for different accounts. - for (const processor of this.noteProcessors) { - const { incomingNotes: inNotes, outgoingNotes: outNotes } = await processor.decodeDeferredNotes(deferredNotes); - incomingNotes.push(...inNotes); - - await this.db.addNotes(inNotes, outNotes, processor.account.address); - - inNotes.forEach(noteDao => { - this.log.debug( - `Decoded deferred incoming note under account ${processor.account.toString()} for contract ${ - noteDao.contractAddress - } at slot ${noteDao.storageSlot} with nullifier ${noteDao.siloedNullifier.toString()}`, - ); - }); - - outNotes.forEach(noteDao => { - this.log.debug( - `Decoded deferred outgoing note under account ${processor.account.toString()} for contract ${ - noteDao.contractAddress - } at slot ${noteDao.storageSlot}`, - ); - }); - } - } - - // now drop the deferred notes, and add the decoded notes - await this.db.removeDeferredNotesByContract(contractAddress); - - await this.#removeNullifiedNotes(incomingNotes); - } - - async #removeNullifiedNotes(notes: IncomingNoteDao[]) { - // now group the decoded incoming notes by public key - const addressPointToIncomingNotes: Map = new Map(); - for (const noteDao of notes) { - const notesForAddressPoint = addressPointToIncomingNotes.get(noteDao.addressPoint) ?? []; - notesForAddressPoint.push(noteDao); - addressPointToIncomingNotes.set(noteDao.addressPoint, notesForAddressPoint); - } - - // now for each group, look for the nullifiers in the nullifier tree - for (const [publicKey, notes] of addressPointToIncomingNotes.entries()) { - const nullifiers = notes.map(n => n.siloedNullifier); - const relevantNullifiers: Fr[] = []; - for (const nullifier of nullifiers) { - // NOTE: this leaks information about the nullifiers I'm interested in to the node. - const found = await this.node.findLeafIndex('latest', MerkleTreeId.NULLIFIER_TREE, nullifier); - if (found) { - relevantNullifiers.push(nullifier); - } - } - await this.db.removeNullifiedNotes(relevantNullifiers, publicKey); - } - } } diff --git a/yarn-project/scripts/src/benchmarks/aggregate.ts b/yarn-project/scripts/src/benchmarks/aggregate.ts index 4e8222bd11a..5b82bd208ce 100644 --- a/yarn-project/scripts/src/benchmarks/aggregate.ts +++ b/yarn-project/scripts/src/benchmarks/aggregate.ts @@ -24,7 +24,6 @@ import { type L2BlockHandledStats, type MetricName, type NodeSyncedChainHistoryStats, - type NoteProcessorCaughtUpStats, type ProofConstructed, type PublicDBAccessStats, type Stats, @@ -186,15 +185,6 @@ function processCircuitWitnessGeneration(entry: CircuitWitnessGenerationStats, r append(results, 'protocol_circuit_witness_generation_time_in_ms', bucket, entry.duration); } } -/** - * Processes an entry with event name 'note-processor-caught-up' and updates results - */ -function processNoteProcessorCaughtUp(entry: NoteProcessorCaughtUpStats, results: BenchmarkCollectedResults) { - const { decryptedIncoming, decryptedOutgoing, blocks, dbSize } = entry; - if (BENCHMARK_HISTORY_CHAIN_LENGTHS.includes(blocks) && (decryptedIncoming > 0 || decryptedOutgoing > 0)) { - append(results, 'pxe_database_size_in_bytes', blocks, dbSize); - } -} /** Processes an entry with event name 'l2-block-built' and updates results where buckets are rollup sizes */ function processL2BlockBuilt(entry: L2BlockBuiltStats, results: BenchmarkCollectedResults) { @@ -267,8 +257,6 @@ function processEntry(entry: Stats, results: BenchmarkCollectedResults) { return processCircuitWitnessGeneration(entry, results); case 'circuit-proving': return processCircuitProving(entry, results); - case 'note-processor-caught-up': - return processNoteProcessorCaughtUp(entry, results); case 'l2-block-built': return processL2BlockBuilt(entry, results); case 'node-synced-chain-history': diff --git a/yarn-project/simulator/src/acvm/oracle/oracle.ts b/yarn-project/simulator/src/acvm/oracle/oracle.ts index e4149d094bf..f52db4237b0 100644 --- a/yarn-project/simulator/src/acvm/oracle/oracle.ts +++ b/yarn-project/simulator/src/acvm/oracle/oracle.ts @@ -405,22 +405,22 @@ export class Oracle { this.typedOracle.notifySetMinRevertibleSideEffectCounter(frToNumber(fromACVMField(minRevertibleSideEffectCounter))); } - async getAppTaggingSecret([sender]: ACVMField[], [recipient]: ACVMField[]): Promise { - const taggingSecret = await this.typedOracle.getAppTaggingSecret( + async getAppTaggingSecretAsSender([sender]: ACVMField[], [recipient]: ACVMField[]): Promise { + const taggingSecret = await this.typedOracle.getAppTaggingSecretAsSender( AztecAddress.fromString(sender), AztecAddress.fromString(recipient), ); return taggingSecret.toFields().map(toACVMField); } - async incrementAppTaggingSecret([sender]: ACVMField[], [recipient]: ACVMField[]) { - await this.typedOracle.incrementAppTaggingSecret( + async incrementAppTaggingSecretIndexAsSender([sender]: ACVMField[], [recipient]: ACVMField[]) { + await this.typedOracle.incrementAppTaggingSecretIndexAsSender( AztecAddress.fromString(sender), AztecAddress.fromString(recipient), ); } - async syncNotes([recipient]: ACVMField[]) { - await this.typedOracle.syncNotes(AztecAddress.fromString(recipient)); + async syncNotes() { + await this.typedOracle.syncNotes(); } } diff --git a/yarn-project/simulator/src/acvm/oracle/typed_oracle.ts b/yarn-project/simulator/src/acvm/oracle/typed_oracle.ts index b17d9ed8076..541774c5979 100644 --- a/yarn-project/simulator/src/acvm/oracle/typed_oracle.ts +++ b/yarn-project/simulator/src/acvm/oracle/typed_oracle.ts @@ -254,15 +254,15 @@ export abstract class TypedOracle { throw new OracleMethodNotAvailableError('debugLog'); } - getAppTaggingSecret(_sender: AztecAddress, _recipient: AztecAddress): Promise { - throw new OracleMethodNotAvailableError('getAppTaggingSecret'); + getAppTaggingSecretAsSender(_sender: AztecAddress, _recipient: AztecAddress): Promise { + throw new OracleMethodNotAvailableError('getAppTaggingSecretAsSender'); } - incrementAppTaggingSecret(_sender: AztecAddress, _recipient: AztecAddress): Promise { - throw new OracleMethodNotAvailableError('incrementAppTaggingSecret'); + incrementAppTaggingSecretIndexAsSender(_sender: AztecAddress, _recipient: AztecAddress): Promise { + throw new OracleMethodNotAvailableError('incrementAppTaggingSecretIndexAsSender'); } - syncNotes(_recipient: AztecAddress): Promise { + syncNotes(): Promise { throw new OracleMethodNotAvailableError('syncNotes'); } } diff --git a/yarn-project/simulator/src/client/client_execution_context.ts b/yarn-project/simulator/src/client/client_execution_context.ts index d6bca3aab24..c933b4caeba 100644 --- a/yarn-project/simulator/src/client/client_execution_context.ts +++ b/yarn-project/simulator/src/client/client_execution_context.ts @@ -24,7 +24,7 @@ import { } from '@aztec/circuits.js'; import { computeUniqueNoteHash, siloNoteHash } from '@aztec/circuits.js/hash'; import { type FunctionAbi, type FunctionArtifact, type NoteSelector, countArgumentsSize } from '@aztec/foundation/abi'; -import { type AztecAddress } from '@aztec/foundation/aztec-address'; +import { AztecAddress } from '@aztec/foundation/aztec-address'; import { poseidon2HashWithSeparator } from '@aztec/foundation/crypto'; import { Fr } from '@aztec/foundation/fields'; import { applyStringFormatting, createDebugLogger } from '@aztec/foundation/log'; @@ -598,12 +598,18 @@ export class ClientExecutionContext extends ViewDataOracle { return this.db.getDebugFunctionName(this.contractAddress, this.callContext.functionSelector); } - public override async incrementAppTaggingSecret(sender: AztecAddress, recipient: AztecAddress) { - await this.db.incrementAppTaggingSecret(this.contractAddress, sender, recipient); + public override async incrementAppTaggingSecretIndexAsSender(sender: AztecAddress, recipient: AztecAddress) { + await this.db.incrementAppTaggingSecretIndexAsSender(this.contractAddress, sender, recipient); } - public override async syncNotes(recipient: AztecAddress) { - const taggedLogs = await this.db.syncTaggedLogs(this.contractAddress, recipient); - await this.db.processTaggedLogs(taggedLogs, recipient); + public override async syncNotes() { + const taggedLogsByRecipient = await this.db.syncTaggedLogs( + this.contractAddress, + this.historicalHeader.globalVariables.blockNumber.toNumber(), + this.scopes, + ); + for (const [recipient, taggedLogs] of taggedLogsByRecipient.entries()) { + await this.db.processTaggedLogs(taggedLogs, AztecAddress.fromString(recipient)); + } } } diff --git a/yarn-project/simulator/src/client/db_oracle.ts b/yarn-project/simulator/src/client/db_oracle.ts index 304ea84d76c..4047c17d83b 100644 --- a/yarn-project/simulator/src/client/db_oracle.ts +++ b/yarn-project/simulator/src/client/db_oracle.ts @@ -1,10 +1,11 @@ import { type L2Block, + type L2BlockNumber, type MerkleTreeId, type NoteStatus, type NullifierMembershipWitness, type PublicDataWitness, - type TxScopedEncryptedL2NoteLog, + type TxScopedL2Log, } from '@aztec/circuit-types'; import { type CompleteAddress, @@ -146,7 +147,7 @@ export interface DBOracle extends CommitmentsDB { * @param leafValue - The leaf value buffer. * @returns - The index of the leaf. Undefined if it does not exist in the tree. */ - findLeafIndex(blockNumber: number, treeId: MerkleTreeId, leafValue: Fr): Promise; + findLeafIndex(blockNumber: L2BlockNumber, treeId: MerkleTreeId, leafValue: Fr): Promise; /** * Fetch the sibling path of the leaf in the respective tree @@ -204,7 +205,7 @@ export interface DBOracle extends CommitmentsDB { * @param recipient - The address receiving the note * @returns A tagging secret that can be used to tag notes. */ - getAppTaggingSecret( + getAppTaggingSecretAsSender( contractAddress: AztecAddress, sender: AztecAddress, recipient: AztecAddress, @@ -216,7 +217,7 @@ export interface DBOracle extends CommitmentsDB { * @param sender - The address sending the note * @param recipient - The address receiving the note */ - incrementAppTaggingSecret( + incrementAppTaggingSecretIndexAsSender( contractAddress: AztecAddress, sender: AztecAddress, recipient: AztecAddress, @@ -229,12 +230,16 @@ export interface DBOracle extends CommitmentsDB { * @param recipient - The address of the recipient * @returns A list of encrypted logs tagged with the recipient's address */ - syncTaggedLogs(contractAddress: AztecAddress, recipient: AztecAddress): Promise; + syncTaggedLogs( + contractAddress: AztecAddress, + maxBlockNumber: number, + scopes?: AztecAddress[], + ): Promise>; /** * Processes the tagged logs returned by syncTaggedLogs by decrypting them and storing them in the database. * @param logs - The logs to process. * @param recipient - The recipient of the logs. */ - processTaggedLogs(logs: TxScopedEncryptedL2NoteLog[], recipient: AztecAddress): Promise; + processTaggedLogs(logs: TxScopedL2Log[], recipient: AztecAddress): Promise; } diff --git a/yarn-project/simulator/src/client/private_execution.test.ts b/yarn-project/simulator/src/client/private_execution.test.ts index 72c684ff435..2d35d94c7f0 100644 --- a/yarn-project/simulator/src/client/private_execution.test.ts +++ b/yarn-project/simulator/src/client/private_execution.test.ts @@ -27,7 +27,6 @@ import { PUBLIC_DISPATCH_SELECTOR, PartialStateReference, StateReference, - TaggingSecret, TxContext, computeAppNullifierSecretKey, computeOvskApp, @@ -259,10 +258,10 @@ describe('Private Execution test suite', () => { throw new Error(`Unknown address: ${address}. Recipient: ${recipient}, Owner: ${owner}`); }); - oracle.getAppTaggingSecret.mockImplementation( - (_contractAddress: AztecAddress, _sender: AztecAddress, recipient: AztecAddress) => { - const directionalSecret = new TaggingSecret(Fr.random(), recipient); - return Promise.resolve(IndexedTaggingSecret.fromTaggingSecret(directionalSecret, 0)); + oracle.getAppTaggingSecretAsSender.mockImplementation( + (_contractAddress: AztecAddress, _sender: AztecAddress, _recipient: AztecAddress) => { + const secret = Fr.random(); + return Promise.resolve(new IndexedTaggingSecret(secret, 0)); }, ); @@ -414,6 +413,8 @@ describe('Private Execution test suite', () => { buildNote(60n, ownerCompleteAddress.address, storageSlot, valueNoteTypeId), buildNote(80n, ownerCompleteAddress.address, storageSlot, valueNoteTypeId), ]; + oracle.syncTaggedLogs.mockResolvedValue(new Map()); + oracle.processTaggedLogs.mockResolvedValue(); oracle.getNotes.mockResolvedValue(notes); const consumedNotes = await asyncMap(notes, ({ nonce, note }) => @@ -479,6 +480,8 @@ describe('Private Execution test suite', () => { const storageSlot = deriveStorageSlotInMap(new Fr(1n), owner); const notes = [buildNote(balance, ownerCompleteAddress.address, storageSlot, valueNoteTypeId)]; + oracle.syncTaggedLogs.mockResolvedValue(new Map()); + oracle.processTaggedLogs.mockResolvedValue(); oracle.getNotes.mockResolvedValue(notes); const consumedNotes = await asyncMap(notes, ({ nonce, note }) => @@ -812,6 +815,8 @@ describe('Private Execution test suite', () => { const secretHash = computeSecretHash(secret); const note = new Note([secretHash]); const storageSlot = TestContractArtifact.storageLayout['example_set'].slot; + oracle.syncTaggedLogs.mockResolvedValue(new Map()); + oracle.processTaggedLogs.mockResolvedValue(); oracle.getNotes.mockResolvedValue([ { contractAddress, @@ -915,6 +920,8 @@ describe('Private Execution test suite', () => { }); it('should be able to insert, read, and nullify pending note hashes in one call', async () => { + oracle.syncTaggedLogs.mockResolvedValue(new Map()); + oracle.processTaggedLogs.mockResolvedValue(); oracle.getNotes.mockResolvedValue([]); const amountToTransfer = 100n; @@ -976,6 +983,8 @@ describe('Private Execution test suite', () => { }); it('should be able to insert, read, and nullify pending note hashes in nested calls', async () => { + oracle.syncTaggedLogs.mockResolvedValue(new Map()); + oracle.processTaggedLogs.mockResolvedValue(); oracle.getNotes.mockResolvedValue([]); const amountToTransfer = 100n; @@ -1058,6 +1067,8 @@ describe('Private Execution test suite', () => { }); it('cant read a commitment that is inserted later in same call', async () => { + oracle.syncTaggedLogs.mockResolvedValue(new Map()); + oracle.processTaggedLogs.mockResolvedValue(); oracle.getNotes.mockResolvedValue([]); const amountToTransfer = 100n; @@ -1097,6 +1108,8 @@ describe('Private Execution test suite', () => { const artifact = getFunctionArtifact(TestContractArtifact, 'call_get_notes'); const args = [2n, true]; + oracle.syncTaggedLogs.mockResolvedValue(new Map()); + oracle.processTaggedLogs.mockResolvedValue(); oracle.getNotes.mockResolvedValue([]); await expect(() => runSimulator({ artifact, args })).rejects.toThrow( diff --git a/yarn-project/simulator/src/client/unconstrained_execution.test.ts b/yarn-project/simulator/src/client/unconstrained_execution.test.ts index b1eb64457a8..99bb3e3842d 100644 --- a/yarn-project/simulator/src/client/unconstrained_execution.test.ts +++ b/yarn-project/simulator/src/client/unconstrained_execution.test.ts @@ -53,6 +53,8 @@ describe('Unconstrained Execution test suite', () => { const notes: Note[] = [...Array(5).fill(buildNote(1n, owner)), ...Array(2).fill(buildNote(2n, owner))]; + oracle.syncTaggedLogs.mockResolvedValue(new Map()); + oracle.processTaggedLogs.mockResolvedValue(); oracle.getHeader.mockResolvedValue(Header.empty()); oracle.getNotes.mockResolvedValue( notes.map((note, index) => ({ diff --git a/yarn-project/simulator/src/client/view_data_oracle.ts b/yarn-project/simulator/src/client/view_data_oracle.ts index a6651c12f87..bd5633f2103 100644 --- a/yarn-project/simulator/src/client/view_data_oracle.ts +++ b/yarn-project/simulator/src/client/view_data_oracle.ts @@ -14,7 +14,7 @@ import { type KeyValidationRequest, } from '@aztec/circuits.js'; import { siloNullifier } from '@aztec/circuits.js/hash'; -import { type AztecAddress } from '@aztec/foundation/aztec-address'; +import { AztecAddress } from '@aztec/foundation/aztec-address'; import { Fr } from '@aztec/foundation/fields'; import { applyStringFormatting, createDebugLogger } from '@aztec/foundation/log'; @@ -302,10 +302,21 @@ export class ViewDataOracle extends TypedOracle { * @param recipient - The address receiving the note * @returns A tagging secret that can be used to tag notes. */ - public override async getAppTaggingSecret( + public override async getAppTaggingSecretAsSender( sender: AztecAddress, recipient: AztecAddress, ): Promise { - return await this.db.getAppTaggingSecret(this.contractAddress, sender, recipient); + return await this.db.getAppTaggingSecretAsSender(this.contractAddress, sender, recipient); + } + + public override async syncNotes() { + const taggedLogsByRecipient = await this.db.syncTaggedLogs( + this.contractAddress, + await this.aztecNode.getBlockNumber(), + this.scopes, + ); + for (const [recipient, taggedLogs] of taggedLogsByRecipient.entries()) { + await this.db.processTaggedLogs(taggedLogs, AztecAddress.fromString(recipient)); + } } } diff --git a/yarn-project/txe/src/oracle/txe_oracle.ts b/yarn-project/txe/src/oracle/txe_oracle.ts index 601206b75b8..b6c0f55b7ec 100644 --- a/yarn-project/txe/src/oracle/txe_oracle.ts +++ b/yarn-project/txe/src/oracle/txe_oracle.ts @@ -34,7 +34,6 @@ import { PublicDataTreeLeaf, type PublicDataTreeLeafPreimage, type PublicDataUpdateRequest, - TaggingSecret, computeContractClassId, computeTaggingSecret, deriveKeys, @@ -785,47 +784,27 @@ export class TXE implements TypedOracle { return; } - async incrementAppTaggingSecret(sender: AztecAddress, recipient: AztecAddress): Promise { - const directionalSecret = await this.#calculateDirectionalSecret(this.contractAddress, sender, recipient); - await this.txeDatabase.incrementTaggingSecretsIndexes([directionalSecret]); + async incrementAppTaggingSecretIndexAsSender(sender: AztecAddress, recipient: AztecAddress): Promise { + const directionalSecret = await this.#calculateTaggingSecret(this.contractAddress, sender, recipient); + await this.txeDatabase.incrementTaggingSecretsIndexesAsSender([directionalSecret]); } - async getAppTaggingSecret(sender: AztecAddress, recipient: AztecAddress): Promise { - const directionalSecret = await this.#calculateDirectionalSecret(this.contractAddress, sender, recipient); - const [index] = await this.txeDatabase.getTaggingSecretsIndexes([directionalSecret]); - return IndexedTaggingSecret.fromTaggingSecret(directionalSecret, index); + async getAppTaggingSecretAsSender(sender: AztecAddress, recipient: AztecAddress): Promise { + const secret = await this.#calculateTaggingSecret(this.contractAddress, sender, recipient); + const [index] = await this.txeDatabase.getTaggingSecretsIndexesAsSender([secret]); + return new IndexedTaggingSecret(secret, index); } - async #calculateDirectionalSecret(contractAddress: AztecAddress, sender: AztecAddress, recipient: AztecAddress) { + async #calculateTaggingSecret(contractAddress: AztecAddress, sender: AztecAddress, recipient: AztecAddress) { const senderCompleteAddress = await this.getCompleteAddress(sender); const senderIvsk = await this.keyStore.getMasterIncomingViewingSecretKey(sender); const sharedSecret = computeTaggingSecret(senderCompleteAddress, senderIvsk, recipient); // Silo the secret to the app so it can't be used to track other app's notes const siloedSecret = poseidon2Hash([sharedSecret.x, sharedSecret.y, contractAddress]); - // Get the index of the secret, ensuring the directionality (sender -> recipient) - const directionalSecret = new TaggingSecret(siloedSecret, recipient); - return directionalSecret; - } - - async #getAppTaggingSecretsForSenders(recipient: AztecAddress): Promise { - const recipientCompleteAddress = await this.getCompleteAddress(recipient); - const completeAddresses = await this.txeDatabase.getCompleteAddresses(); - // Filter out the addresses corresponding to accounts - const accounts = await this.keyStore.getAccounts(); - const senders = completeAddresses.filter( - completeAddress => !accounts.find(account => account.equals(completeAddress.address)), - ); - const recipientIvsk = await this.keyStore.getMasterIncomingViewingSecretKey(recipient); - const secrets = senders.map(({ address: sender }) => { - const sharedSecret = computeTaggingSecret(recipientCompleteAddress, recipientIvsk, sender); - return poseidon2Hash([sharedSecret.x, sharedSecret.y, this.contractAddress]); - }); - const directionalSecrets = secrets.map(secret => new TaggingSecret(secret, recipient)); - const indexes = await this.txeDatabase.getTaggingSecretsIndexes(directionalSecrets); - return secrets.map((secret, i) => new IndexedTaggingSecret(secret, recipient, indexes[i])); + return siloedSecret; } - syncNotes(_recipient: AztecAddress) { + syncNotes() { // TODO: Implement return Promise.resolve(); } diff --git a/yarn-project/txe/src/txe_service/txe_service.ts b/yarn-project/txe/src/txe_service/txe_service.ts index f5bb2fc80fb..75c7f5b5a20 100644 --- a/yarn-project/txe/src/txe_service/txe_service.ts +++ b/yarn-project/txe/src/txe_service/txe_service.ts @@ -601,16 +601,16 @@ export class TXEService { return toForeignCallResult([]); } - async getAppTaggingSecret(sender: ForeignCallSingle, recipient: ForeignCallSingle) { - const secret = await this.typedOracle.getAppTaggingSecret( + async getAppTaggingSecretAsSender(sender: ForeignCallSingle, recipient: ForeignCallSingle) { + const secret = await this.typedOracle.getAppTaggingSecretAsSender( AztecAddress.fromField(fromSingle(sender)), AztecAddress.fromField(fromSingle(recipient)), ); return toForeignCallResult([toArray(secret.toFields())]); } - async syncNotes(recipient: ForeignCallSingle) { - await this.typedOracle.syncNotes(AztecAddress.fromField(fromSingle(recipient))); + async syncNotes() { + await this.typedOracle.syncNotes(); return toForeignCallResult([]); }