From 1a6ad2cbd299246dc75b208e6be7e397d09c2b4e Mon Sep 17 00:00:00 2001 From: Philipp Gackstatter Date: Thu, 20 Feb 2025 15:06:18 +0100 Subject: [PATCH] feat: Update `get_block_inputs` --- Cargo.lock | 20 +- crates/block-producer/Cargo.toml | 1 + crates/block-producer/src/block.rs | 106 -- .../block-producer/src/block_builder/mod.rs | 194 ++-- .../prover/asm/block_kernel.masm | 244 ----- .../src/block_builder/prover/block_witness.rs | 328 ------ .../src/block_builder/prover/mod.rs | 128 --- .../src/block_builder/prover/tests.rs | 939 ------------------ crates/block-producer/src/errors.rs | 45 +- crates/block-producer/src/lib.rs | 1 - crates/block-producer/src/store/mod.rs | 14 +- crates/block-producer/src/test_utils/block.rs | 55 +- crates/block-producer/src/test_utils/store.rs | 129 ++- crates/proto/src/domain/account.rs | 34 +- crates/proto/src/domain/block.rs | 107 +- crates/proto/src/domain/nullifier.rs | 22 +- crates/proto/src/generated/requests.rs | 17 +- crates/proto/src/generated/responses.rs | 44 +- crates/proto/src/lib.rs | 4 +- crates/rpc-proto/proto/requests.proto | 19 +- crates/rpc-proto/proto/responses.proto | 36 +- crates/store/src/errors.rs | 4 + crates/store/src/server/api.rs | 12 +- crates/store/src/state.rs | 169 ++-- proto/requests.proto | 19 +- proto/responses.proto | 36 +- 26 files changed, 545 insertions(+), 2182 deletions(-) delete mode 100644 crates/block-producer/src/block.rs delete mode 100644 crates/block-producer/src/block_builder/prover/asm/block_kernel.masm delete mode 100644 crates/block-producer/src/block_builder/prover/block_witness.rs delete mode 100644 crates/block-producer/src/block_builder/prover/mod.rs delete mode 100644 crates/block-producer/src/block_builder/prover/tests.rs diff --git a/Cargo.lock b/Cargo.lock index ae688eaca..e2ae127ad 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -837,7 +837,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "33d852cb9b869c2a9b3df2f71a3074817f01e1844f839a144f5fcef059a4eb5d" dependencies = [ "libc", - "windows-sys 0.59.0", + "windows-sys 0.52.0", ] [[package]] @@ -1703,6 +1703,17 @@ dependencies = [ "unicode-width 0.2.0", ] +[[package]] +name = "miden-block-prover" +version = "0.8.0" +source = "git+https://github.com/0xPolygonMiden/miden-base?branch=pgackst-batch-expiration#fa7b5b1728f22d0c1b6264d62d96667002dbdb50" +dependencies = [ + "miden-crypto", + "miden-lib", + "miden-objects", + "thiserror 2.0.11", +] + [[package]] name = "miden-core" version = "0.12.0" @@ -1867,6 +1878,7 @@ dependencies = [ "futures", "itertools 0.14.0", "miden-air", + "miden-block-prover", "miden-lib", "miden-node-proto", "miden-node-test-macro", @@ -2918,7 +2930,7 @@ dependencies = [ "errno", "libc", "linux-raw-sys", - "windows-sys 0.59.0", + "windows-sys 0.52.0", ] [[package]] @@ -3322,7 +3334,7 @@ dependencies = [ "getrandom 0.3.1", "once_cell", "rustix", - "windows-sys 0.59.0", + "windows-sys 0.52.0", ] [[package]] @@ -4131,7 +4143,7 @@ version = "0.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cf221c93e13a30d793f7645a0e7762c55d169dbb0a49671918a2319d289b10bb" dependencies = [ - "windows-sys 0.59.0", + "windows-sys 0.52.0", ] [[package]] diff --git a/crates/block-producer/Cargo.toml b/crates/block-producer/Cargo.toml index 89a3f0cbc..2b904e44d 100644 --- a/crates/block-producer/Cargo.toml +++ b/crates/block-producer/Cargo.toml @@ -21,6 +21,7 @@ tracing-forest = ["miden-node-utils/tracing-forest"] async-trait = { version = "0.1" } futures = { version = "0.3" } itertools = { workspace = true } +miden-block-prover = { git = "https://github.com/0xPolygonMiden/miden-base.git", branch = "pgackst-batch-expiration" } miden-lib = { workspace = true } miden-node-proto = { workspace = true } miden-node-utils = { workspace = true } diff --git a/crates/block-producer/src/block.rs b/crates/block-producer/src/block.rs deleted file mode 100644 index 857a5b018..000000000 --- a/crates/block-producer/src/block.rs +++ /dev/null @@ -1,106 +0,0 @@ -use std::collections::BTreeMap; - -use miden_node_proto::{ - domain::note::NoteAuthenticationInfo, - errors::{ConversionError, MissingFieldHelper}, - generated::responses::GetBlockInputsResponse, - AccountInputRecord, NullifierWitness, -}; -use miden_objects::{ - account::AccountId, - block::BlockHeader, - crypto::merkle::{MerklePath, MmrPeaks, SmtProof}, - note::Nullifier, - Digest, -}; - -// BLOCK INPUTS -// ================================================================================================ - -/// Information needed from the store to build a block -#[derive(Clone, Debug)] -pub struct BlockInputs { - /// Previous block header - pub block_header: BlockHeader, - - /// MMR peaks for the current chain state - pub chain_peaks: MmrPeaks, - - /// The hashes of the requested accounts and their authentication paths - pub accounts: BTreeMap, - - /// The requested nullifiers and their authentication paths - pub nullifiers: BTreeMap, - - /// List of unauthenticated notes found in the store - pub found_unauthenticated_notes: NoteAuthenticationInfo, -} - -#[derive(Clone, Debug, Default)] -pub struct AccountWitness { - pub hash: Digest, - pub proof: MerklePath, -} - -impl TryFrom for BlockInputs { - type Error = ConversionError; - - fn try_from(response: GetBlockInputsResponse) -> Result { - let block_header: BlockHeader = response - .block_header - .ok_or(miden_node_proto::generated::block::BlockHeader::missing_field("block_header"))? - .try_into()?; - - let chain_peaks = { - // setting the number of leaves to the current block number gives us one leaf less than - // what is currently in the chain MMR (i.e., chain MMR with block_num = 1 has 2 leave); - // this is because GetBlockInputs returns the state of the chain MMR as of one block - // ago so that block_header.chain_root matches the hash of MMR peaks. - let num_leaves = block_header.block_num().as_usize(); - - MmrPeaks::new( - num_leaves, - response - .mmr_peaks - .into_iter() - .map(TryInto::try_into) - .collect::>()?, - )? - }; - - let accounts = response - .account_states - .into_iter() - .map(|entry| { - let domain: AccountInputRecord = entry.try_into()?; - let witness = AccountWitness { - hash: domain.account_hash, - proof: domain.proof, - }; - Ok((domain.account_id, witness)) - }) - .collect::, ConversionError>>()?; - - let nullifiers = response - .nullifiers - .into_iter() - .map(|entry| { - let witness: NullifierWitness = entry.try_into()?; - Ok((witness.nullifier, witness.proof)) - }) - .collect::, ConversionError>>()?; - - let found_unauthenticated_notes = response - .found_unauthenticated_notes - .ok_or(GetBlockInputsResponse::missing_field("found_authenticated_notes"))? - .try_into()?; - - Ok(Self { - block_header, - chain_peaks, - accounts, - nullifiers, - found_unauthenticated_notes, - }) - } -} diff --git a/crates/block-producer/src/block_builder/mod.rs b/crates/block-producer/src/block_builder/mod.rs index 738260312..acaecf7fe 100644 --- a/crates/block-producer/src/block_builder/mod.rs +++ b/crates/block-producer/src/block_builder/mod.rs @@ -1,30 +1,23 @@ -use std::{ - collections::BTreeSet, - ops::{Add, Range}, -}; +use std::ops::Range; use futures::FutureExt; +use miden_block_prover::LocalBlockProver; use miden_node_utils::tracing::OpenTelemetrySpanExt; use miden_objects::{ - account::AccountId, batch::ProvenBatch, - block::{BlockNumber, ProvenBlock}, - note::{NoteHeader, NoteId, Nullifier}, - transaction::{InputNoteCommitment, OutputNote}, + block::{BlockInputs, BlockNumber, ProposedBlock, ProvenBlock}, + note::NoteHeader, + MIN_PROOF_SECURITY_LEVEL, }; use rand::Rng; use tokio::time::Duration; use tracing::{instrument, Span}; use crate::{ - block::BlockInputs, errors::BuildBlockError, mempool::SharedMempool, store::StoreClient, - COMPONENT, SERVER_BLOCK_FREQUENCY, + errors::BuildBlockError, mempool::SharedMempool, store::StoreClient, COMPONENT, + SERVER_BLOCK_FREQUENCY, }; -pub(crate) mod prover; - -use self::prover::{block_witness::BlockWitness, BlockProver}; - // BLOCK BUILDER // ================================================================================================= @@ -39,7 +32,9 @@ pub struct BlockBuilder { pub failure_rate: f64, pub store: StoreClient, - pub block_kernel: BlockProver, + + /// The prover used to prove a proposed block into a proven block. + pub block_prover: LocalBlockProver, } impl BlockBuilder { @@ -49,7 +44,7 @@ impl BlockBuilder { // Note: The range cannot be empty. simulated_proof_time: Duration::ZERO..Duration::from_millis(1), failure_rate: 0.0, - block_kernel: BlockProver::new(), + block_prover: LocalBlockProver::new(MIN_PROOF_SECURITY_LEVEL), store, } } @@ -98,9 +93,9 @@ impl BlockBuilder { Self::select_block(mempool) .inspect(SelectedBlock::inject_telemetry) .then(|selected| self.get_block_inputs(selected)) - .inspect_ok(BlockSummaryAndInputs::inject_telemetry) + .inspect_ok(BlockBatchesAndInputs::inject_telemetry) .and_then(|inputs| self.prove_block(inputs)) - .inspect_ok(ProvenBlockWrapper::inject_telemetry) + .inspect_ok(BuiltBlock::inject_telemetry) // Failure must be injected before the final pipeline stage i.e. before commit is called. The system cannot // handle errors after it considers the process complete (which makes sense). .and_then(|proven_block| async { self.inject_failure(proven_block) }) @@ -119,68 +114,91 @@ impl BlockBuilder { SelectedBlock { block_number, batches } } + /// Fetches block inputs from the store for the [`SelectedBlock`]. + /// + /// For a given set of batches, we need to get the following block inputs from the store: + /// + /// - Note inclusion proofs for unauthenticated notes (not required to be complete due to the + /// possibility of note erasure) + /// - A chain MMR with: + /// - All blocks referenced by batches + /// - All blocks referenced by note inclusion proofs + /// - Account witnesses for all accounts updated in the block + /// - Nullifier witnesses for all nullifiers created in the block + /// - Due to note erasure the set of nullifiers the block creates it not necessarily equal to + /// the union of sets of all nullifiers created in proven batches. However, since we don't + /// yet know which nullifiers the block will actually create, we fetch witnesses for all + /// nullifiers created by batches. If we knew that a certain note will be erased, we would + /// not have to supply a nullifier witness for it. #[instrument(target = COMPONENT, name = "block_builder.get_block_inputs", skip_all, err)] async fn get_block_inputs( &self, selected_block: SelectedBlock, - ) -> Result { + ) -> Result { let SelectedBlock { block_number: _, batches } = selected_block; - let summary = BlockSummary::summarize_batches(&batches); + + let batch_iter = batches.iter(); + + let unauthenticated_notes_iter = batch_iter.clone().flat_map(|batch| { + // Note: .cloned() shouldn't be necessary but not having it produces an odd lifetime + // error in BlockProducer::serve. Not sure if there's a better fix. Error: + // implementation of `FnOnce` is not general enough + // closure with signature `fn(&InputNoteCommitment) -> miden_objects::note::NoteId` must + // implement `FnOnce<(&InputNoteCommitment,)>` ...but it actually implements + // `FnOnce<(&InputNoteCommitment,)>` + batch + .input_notes() + .iter() + .cloned() + .filter_map(|note| note.header().map(NoteHeader::id)) + }); + let block_references_iter = batch_iter.clone().map(ProvenBatch::reference_block_num); + let account_ids = batch_iter.clone().flat_map(ProvenBatch::updated_accounts); + let created_nullifiers = batch_iter.flat_map(ProvenBatch::produced_nullifiers); let inputs = self .store .get_block_inputs( - summary.updated_accounts.iter().copied(), - summary.nullifiers.iter(), - summary.dangling_notes.iter(), + account_ids, + created_nullifiers, + unauthenticated_notes_iter, + block_references_iter, ) .await .map_err(BuildBlockError::GetBlockInputsFailed)?; - let missing_notes: Vec<_> = summary - .dangling_notes - .difference(&inputs.found_unauthenticated_notes.note_ids()) - .copied() - .collect(); - if !missing_notes.is_empty() { - return Err(BuildBlockError::UnauthenticatedNotesNotFound(missing_notes)); - } - - Ok(BlockSummaryAndInputs { batches, summary, inputs }) + Ok(BlockBatchesAndInputs { batches, inputs }) } #[instrument(target = COMPONENT, name = "block_builder.prove_block", skip_all, err)] async fn prove_block( &self, - preimage: BlockSummaryAndInputs, - ) -> Result { - let BlockSummaryAndInputs { batches, summary, inputs } = preimage; + preimage: BlockBatchesAndInputs, + ) -> Result { + let BlockBatchesAndInputs { batches, inputs } = preimage; - let (block_header_witness, updated_accounts) = BlockWitness::new(inputs, &batches)?; + // Question: Should we split proposing and proving in two stages for telemetry reasons? + let proposed_block = + ProposedBlock::new(inputs, batches).map_err(BuildBlockError::ProposeBlockFailed)?; - let new_block_header = self.block_kernel.prove(block_header_witness)?; - - // TODO: Update. Temporarily left in an incorrect state. - let block = ProvenBlock::new_unchecked( - new_block_header, - updated_accounts, - vec![], - summary.nullifiers, - ); + let proven_block = self + .block_prover + .prove(proposed_block) + .map_err(BuildBlockError::ProveBlockFailed)?; self.simulate_proving().await; - Ok(ProvenBlockWrapper { block }) + Ok(BuiltBlock { block: proven_block }) } #[instrument(target = COMPONENT, name = "block_builder.commit_block", skip_all, err)] async fn commit_block( &self, mempool: &SharedMempool, - proven_block: ProvenBlockWrapper, + built_block: BuiltBlock, ) -> Result<(), BuildBlockError> { self.store - .apply_block(&proven_block.block) + .apply_block(&built_block.block) .await .map_err(BuildBlockError::StoreApplyBlockFailed)?; @@ -220,65 +238,16 @@ impl BlockBuilder { } } -struct BlockSummary { - updated_accounts: BTreeSet, - nullifiers: Vec, - output_notes: Vec>, - dangling_notes: BTreeSet, -} - -impl BlockSummary { - #[instrument(target = COMPONENT, name = "block_builder.summarize_batches", skip_all)] - fn summarize_batches(batches: &[ProvenBatch]) -> Self { - let updated_accounts: BTreeSet = batches - .iter() - .flat_map(ProvenBatch::account_updates) - .map(|(account_id, _)| *account_id) - .collect(); - - let output_notes: Vec<_> = - batches.iter().map(|batch| batch.output_notes().to_vec()).collect(); - - let nullifiers: Vec = - batches.iter().flat_map(ProvenBatch::produced_nullifiers).collect(); - - // Populate set of output notes from all batches - let output_notes_set: BTreeSet<_> = output_notes - .iter() - .flat_map(|output_notes| output_notes.iter().map(OutputNote::id)) - .collect(); - - // Build a set of unauthenticated input notes for this block which do not have a - // matching output note produced in this block - let dangling_notes: BTreeSet<_> = batches - .iter() - .flat_map(ProvenBatch::input_notes) - .filter_map(InputNoteCommitment::header) - .map(NoteHeader::id) - .filter(|note_id| !output_notes_set.contains(note_id)) - .collect(); - - Self { - updated_accounts, - nullifiers, - output_notes, - dangling_notes, - } - } -} - struct SelectedBlock { block_number: BlockNumber, batches: Vec, } -struct BlockSummaryAndInputs { +struct BlockBatchesAndInputs { batches: Vec, - summary: BlockSummary, inputs: BlockInputs, } -// TODO: Is this still needed? If so, what should be its name? -struct ProvenBlockWrapper { +struct BuiltBlock { block: ProvenBlock, } @@ -290,34 +259,30 @@ impl SelectedBlock { } } -impl BlockSummaryAndInputs { +impl BlockBatchesAndInputs { fn inject_telemetry(&self) { let span = Span::current(); // SAFETY: We do not expect to have more than u32::MAX of any count per block. span.set_attribute( "block.updated_accounts.count", - i64::try_from(self.summary.updated_accounts.len()) + i64::try_from(self.inputs.account_witnesses().len()) .expect("less than u32::MAX account updates"), ); - span.set_attribute( - "block.output_notes.count", - i64::try_from(self.summary.output_notes.iter().fold(0, |acc, x| acc.add(x.len()))) - .expect("less than u32::MAX output notes"), - ); span.set_attribute( "block.nullifiers.count", - i64::try_from(self.summary.nullifiers.len()).expect("less than u32::MAX nullifiers"), + i64::try_from(self.inputs.nullifier_witnesses().len()) + .expect("less than u32::MAX nullifiers"), ); span.set_attribute( - "block.dangling_notes.count", - i64::try_from(self.summary.dangling_notes.len()) + "block.unauthenticated_notes.count", + i64::try_from(self.inputs.unauthenticated_note_proofs().len()) .expect("less than u32::MAX dangling notes"), ); } } -impl ProvenBlockWrapper { +impl BuiltBlock { fn inject_telemetry(&self) { let span = Span::current(); let header = self.block.header(); @@ -328,6 +293,13 @@ impl ProvenBlockWrapper { span.set_attribute("block.protocol.version", i64::from(header.version())); + // Question: Should this be here? (Moved here because output notes are no longer) + span.set_attribute( + "block.output_notes.count", + i64::try_from(self.block.output_notes().count()) + .expect("less than u32::MAX output notes"), + ); + span.set_attribute("block.commitments.kernel", header.kernel_root()); span.set_attribute("block.commitments.nullifier", header.nullifier_root()); span.set_attribute("block.commitments.account", header.account_root()); diff --git a/crates/block-producer/src/block_builder/prover/asm/block_kernel.masm b/crates/block-producer/src/block_builder/prover/asm/block_kernel.masm deleted file mode 100644 index 309501d59..000000000 --- a/crates/block-producer/src/block_builder/prover/asm/block_kernel.masm +++ /dev/null @@ -1,244 +0,0 @@ -#! Note: For now, the "block kernel" only computes the account root. Eventually, it will compute -#! the entire block header. -#! -#! Stack inputs: [num_accounts_updated, OLD_ACCOUNT_ROOT, NEW_ACCOUNT_HASH_0, account_id_0, ... , -#! NEW_ACCOUNT_HASH_n, account_id_n] - -use.std::collections::smt -use.std::collections::mmr -use.std::sys - -const.ACCOUNT_TREE_DEPTH=64 -const.BLOCK_NOTES_BATCH_TREE_DEPTH=6 -const.CHAIN_MMR_PTR=1000 - -#! Compute the account root -#! -#! Inputs: -#! Operand stack: [] -#! Advice stack: [num_accounts_updated, OLD_ACCOUNT_ROOT, [NEW_ACCOUNT_HASH_i, account_id_i]] -#! Outputs: -#! Operand stack: [NEW_ACCOUNT_ROOT] -proc.compute_account_root - # move the number of updated accounts and an old account root to the operand stack - adv_push.5 - # OS => [OLD_ACCOUNT_ROOT, num_accounts_updated] - # AS => [[NEW_ACCOUNT_HASH_i, account_id_i]] - - # assess if we should loop - dup.4 neq.0 - # OS => [flag, OLD_ACCOUNT_ROOT, num_accounts_updated] - # AS => [[NEW_ACCOUNT_HASH_i, account_id_i]] - - while.true - # num_accounts_updated here serves as a counter, so rename it accordingly - # old account root will be updated in each iteration, so rename it to the ROOT_i - # OS => [ROOT_i, counter] - # AS => [[NEW_ACCOUNT_HASH_i, account_id_i]] - - # move the account hash to the operand stack and move it below the root - adv_push.4 swapw - # OS => [ROOT_i, NEW_ACCOUNT_HASH_i, counter] - # AS => [account_id_i, [NEW_ACCOUNT_HASH_{i+1}, account_id_{i+1}]] - - # move the account id to the operand stack, push the account tree depth - adv_push.1 push.ACCOUNT_TREE_DEPTH - # OS => [account_tree_depth, account_id_i, ROOT_i, NEW_ACCOUNT_HASH_i, counter] - # AS => [[NEW_ACCOUNT_HASH_{i+1}, account_id_{i+1}]] - - # set new value in SMT - mtree_set dropw - # OS => [ROOT_{i+1}, counter] - # AS => [[NEW_ACCOUNT_HASH_{i+1}, account_id_{i+1}]] - - # loop counter - movup.4 sub.1 dup movdn.5 neq.0 - # OS => [flag, ROOT_{i+1}, counter] - # AS => [[NEW_ACCOUNT_HASH_{i+1}, account_id_{i+1}]] - end - - # drop the counter - movup.4 drop - # OS => [ROOT_{n-1}] - # AS => [] -end - -#! Compute the note root. -#! -#! Each batch contains a tree of depth 10 for its created notes. The block's created notes tree is -#! created by aggregating up to 2^6 tree roots coming from the batches contained in the block. -#! -#! `SMT_EMPTY_ROOT` must be `E16`, the root of the empty tree of depth 16. If less than 2^6 batches -#! are contained in the block, `E10` is used as the padding value; this is derived from the fact -#! that `SMT_EMPTY_ROOT` is `E16`, and that our tree has depth 6. -#! -#! Inputs: -#! Operand stack: [] -#! Advice stack: [num_notes_updated, SMT_EMPTY_ROOT, [BATCH_NOTE_TREE_ROOT_i, batch_note_root_idx_i]] -#! Outputs: -#! Operand stack: [NOTES_ROOT] -proc.compute_note_root - # move the number of updated notes and empty root to the operand stack - adv_push.5 - # OS => [SMT_EMPTY_ROOT, num_notes_updated] - # AS => [[BATCH_NOTE_TREE_ROOT_i, batch_note_root_idx_i]] - - # assess if we should loop - dup.4 neq.0 - # OS => [flag, SMT_EMPTY_ROOT, num_notes_updated] - # AS => [[BATCH_NOTE_TREE_ROOT_i, batch_note_root_idx_i]] - - while.true - # num_notes_updated here serves as a counter, so rename it accordingly - # empty root will be updated in each iteration, so rename it to the ROOT_i - # OS => [ROOT_i, counter] - # AS => [[BATCH_NOTE_TREE_ROOT_i, batch_note_root_idx_i]] - - # move the batch note tree root to the operand stack and move it below the root - adv_push.4 swapw - # OS => [ROOT_i, BATCH_NOTE_TREE_ROOT_i, counter] - # AS => [batch_note_root_idx_i, [BATCH_NOTE_TREE_ROOT_{i+1}, batch_note_root_idx_{i+1}]] - - # move the batch note root index to the operand stack, push the block notes batch tree depth - adv_push.1 push.BLOCK_NOTES_BATCH_TREE_DEPTH - # OS => [batch_tree_depth, batch_note_root_idx_i, ROOT_i, BATCH_NOTE_TREE_ROOT_i, counter] - # AS => [[BATCH_NOTE_TREE_ROOT_{i+1}, batch_note_root_idx_{i+1}]] - - # set new value in SMT - mtree_set dropw - # OS => [ROOT_{i+1}, counter] - # AS => [[BATCH_NOTE_TREE_ROOT_{i+1}, batch_note_root_idx_{i+1}]] - - # loop counter - movup.4 sub.1 dup movdn.5 neq.0 - # OS => [flag, ROOT_{i+1}, counter] - # AS => [[BATCH_NOTE_TREE_ROOT_{i+1}, batch_note_root_idx_{i+1}]] - end - - # drop the counter - movup.4 drop - # OS => [ROOT_{n-1}] - # AS => [] -end - -#! Compute the nullifier root. -#! -#! Inputs: -#! Operand stack: [] -#! Advice stack: [num_produced_nullifiers, OLD_NULLIFIER_ROOT, NULLIFIER_VALUE, [NULLIFIER_i]] -#! Outputs: -#! Operand stack: [NULLIFIER_ROOT] -proc.compute_nullifier_root - # move the number of produced nullifiers, old root and nullifier value to the operand stack; - # move nullifier value below the root - adv_push.9 swapw - # OS => [OLD_NULLIFIER_ROOT, NULLIFIER_VALUE, num_produced_nullifiers] - # AS => [[NULLIFIER_i]] - - # assess if we should loop - dup.8 neq.0 - # OS => [flag, OLD_NULLIFIER_ROOT, NULLIFIER_VALUE, num_produced_nullifiers] - # AS => [[NULLIFIER_i]] - - while.true - # num_produced_nullifiers here serves as a counter, so rename it accordingly - # old nullifier root will be updated in each iteration, so rename it to the ROOT_i - # OS => [ROOT_i, NULLIFIER_VALUE, counter] - # AS => [[NULLIFIER_i]] - - # move the nullifier hash to the operand stack - adv_push.4 - # OS => [NULLIFIER_i, ROOT_i, NULLIFIER_VALUE, counter] - # AS => [[NULLIFIER_{i+1}]] - - # dup the nullifier value - dupw.2 - # OS => [NULLIFIER_VALUE, NULLIFIER_i, ROOT_i, NULLIFIER_VALUE, counter] - # AS => [[NULLIFIER_{i+1}]] - - exec.smt::set - # OS => [OLD_VALUE, ROOT_{i+1}, NULLIFIER_VALUE, counter] - # AS => [[NULLIFIER_{i+1}]] - - # Check that OLD_VALUE == 0 (i.e. that nullifier was indeed not previously produced) - assertz assertz assertz assertz - # OS => [ROOT_{i+1}, NULLIFIER_VALUE, counter] - # AS => [[NULLIFIER_{i+1}]] - - # loop counter - movup.8 sub.1 dup movdn.9 neq.0 - # OS => [flag, ROOT_{i+1}, NULLIFIER_VALUE, counter] - # AS => [[NULLIFIER_{i+1}]] - end - - # drop the counter and the nullifier value - swapw dropw movup.4 drop - # OS => [ROOT_{n-1}] - # AS => [] -end - -#! Compute the chain MMR root -#! -#! Inputs: -#! Operand stack: [] -#! Advice stack: [PREV_BLOCK_HASH_TO_INSERT, PREV_CHAIN_MMR_HASH] -#! Advice map: { -#! PREV_CHAIN_MMR_HASH: [NUM_LEAVES, [peak_i], ] -#! } -#! Outputs: -#! Operand stack: [CHAIN_MMR_ROOT] -proc.compute_chain_mmr_root - # move the previous block hash and chain MMR hash to the operand stack - adv_push.8 - # OS => [PREV_CHAIN_MMR_HASH, PREV_BLOCK_HASH_TO_INSERT] - # AS => [] - - # push chain MMR pointer to the operand stack - push.CHAIN_MMR_PTR movdn.4 - # OS => [PREV_CHAIN_MMR_HASH, chain_mmr_ptr, PREV_BLOCK_HASH_TO_INSERT] - - # load the chain MMR (as of previous block) at memory location CHAIN_MMR_PTR - exec.mmr::unpack - # OS => [PREV_BLOCK_HASH_TO_INSERT] - - # push chain MMR pointer to the operand stack - push.CHAIN_MMR_PTR movdn.4 - # OS => [PREV_BLOCK_HASH_TO_INSERT, chain_mmr_ptr] - - # add PREV_BLOCK_HASH_TO_INSERT to chain MMR - exec.mmr::add - # OS => [] - - # Compute new MMR root - push.CHAIN_MMR_PTR exec.mmr::pack - # OS => [CHAIN_MMR_ROOT] -end - -#! Inputs: -#! Operand stack: [] -#! Advice stack: [, , , ] -#! Advice map: { -#! PREV_CHAIN_MMR_HASH: [NUM_LEAVES, [peak_i], ] -#! } -#! Outputs: -#! Operand stack: [ACCOUNT_ROOT, NOTE_ROOT, NULLIFIER_ROOT, CHAIN_MMR_ROOT] -begin - exec.compute_account_root mem_storew.0 dropw - # => [, , ] - - exec.compute_note_root mem_storew.4 dropw - # => [, ] - - exec.compute_nullifier_root mem_storew.8 dropw - # => [] - - exec.compute_chain_mmr_root - # => [CHAIN_MMR_ROOT] - - # Load output on stack - padw mem_loadw.8 padw mem_loadw.4 padw mem_loadw.0 - # => [ACCOUNT_ROOT, NOTE_ROOT, NULLIFIER_ROOT, CHAIN_MMR_ROOT] - - # truncate the stack - exec.sys::truncate_stack -end diff --git a/crates/block-producer/src/block_builder/prover/block_witness.rs b/crates/block-producer/src/block_builder/prover/block_witness.rs deleted file mode 100644 index eac670ad4..000000000 --- a/crates/block-producer/src/block_builder/prover/block_witness.rs +++ /dev/null @@ -1,328 +0,0 @@ -use std::collections::{BTreeMap, BTreeSet}; - -use miden_objects::{ - account::{delta::AccountUpdateDetails, AccountId}, - batch::{BatchAccountUpdate, BatchNoteTree, ProvenBatch}, - block::{BlockAccountUpdate, BlockHeader}, - crypto::merkle::{EmptySubtreeRoots, MerklePath, MerkleStore, MmrPeaks, SmtProof}, - note::Nullifier, - transaction::TransactionId, - vm::{AdviceInputs, StackInputs}, - Digest, Felt, BLOCK_NOTE_TREE_DEPTH, MAX_BATCHES_PER_BLOCK, ZERO, -}; - -use crate::{ - block::BlockInputs, - errors::{BlockProverError, BuildBlockError}, -}; - -// BLOCK WITNESS -// ================================================================================================= - -/// Provides inputs to the `BlockKernel` so that it can generate the new header. -#[derive(Debug, PartialEq)] -pub struct BlockWitness { - pub(super) updated_accounts: Vec<(AccountId, AccountUpdateWitness)>, - /// (`batch_index`, `created_notes_root`) for batches that contain notes - pub(super) batch_created_notes_roots: BTreeMap, - pub(super) produced_nullifiers: BTreeMap, - pub(super) chain_peaks: MmrPeaks, - pub(super) prev_header: BlockHeader, -} - -impl BlockWitness { - pub fn new( - mut block_inputs: BlockInputs, - batches: &[ProvenBatch], - ) -> Result<(Self, Vec), BuildBlockError> { - // This limit should be enforced by the mempool. - assert!(batches.len() <= MAX_BATCHES_PER_BLOCK); - - Self::validate_nullifiers(&block_inputs, batches)?; - - let batch_created_notes_roots = batches - .iter() - .enumerate() - .filter(|(_, batch)| !batch.output_notes().is_empty()) - .map(|(batch_index, batch)| { - let batch_note_tree = BatchNoteTree::with_contiguous_leaves( - batch.output_notes().iter().map(|note| (note.id(), note.metadata())), - ) - .expect("number of output notes in batch should be within the allowed range"); - (batch_index, batch_note_tree.root()) - }) - .collect(); - - // Order account updates by account ID and each update's initial state hash. - // - // This let's us chronologically order the updates per account across batches. - let mut updated_accounts = - BTreeMap::>::new(); - for (account_id, update) in batches.iter().flat_map(ProvenBatch::account_updates) { - updated_accounts - .entry(*account_id) - .or_default() - .insert(update.initial_state_commitment(), update.clone()); - } - - // Build account witnesses. - let mut account_witnesses = Vec::with_capacity(updated_accounts.len()); - let mut block_updates = Vec::with_capacity(updated_accounts.len()); - - for (account_id, mut updates) in updated_accounts { - let (initial_state_hash, proof) = block_inputs - .accounts - .remove(&account_id) - .map(|witness| (witness.hash, witness.proof)) - .ok_or(BuildBlockError::MissingAccountInput(account_id))?; - - let mut details: Option = None; - - // Chronologically chain updates for this account together using the state hashes to - // link them. - let mut transactions = Vec::new(); - let mut current_hash = initial_state_hash; - while !updates.is_empty() { - let update = updates.remove(¤t_hash).ok_or_else(|| { - BuildBlockError::InconsistentAccountStateTransition( - account_id, - current_hash, - updates.keys().copied().collect(), - ) - })?; - - current_hash = update.final_state_commitment(); - let (update_transactions, update_details) = update.into_parts(); - transactions.extend(update_transactions); - - details = Some(match details { - None => update_details, - Some(details) => details.merge(update_details).map_err(|source| { - BuildBlockError::AccountUpdateError { account_id, source } - })?, - }); - } - - account_witnesses.push(( - account_id, - AccountUpdateWitness { - initial_state_hash, - final_state_hash: current_hash, - proof, - transactions: transactions.clone(), - }, - )); - - block_updates.push(BlockAccountUpdate::new( - account_id, - current_hash, - details.expect("Must be some by now"), - transactions, - )); - } - - if !block_inputs.accounts.is_empty() { - return Err(BuildBlockError::ExtraStoreData( - block_inputs.accounts.keys().copied().collect(), - )); - } - - Ok(( - Self { - updated_accounts: account_witnesses, - batch_created_notes_roots, - produced_nullifiers: block_inputs.nullifiers, - chain_peaks: block_inputs.chain_peaks, - prev_header: block_inputs.block_header, - }, - block_updates, - )) - } - - /// Converts [`BlockWitness`] into inputs to the block kernel program - pub(super) fn into_program_inputs( - self, - ) -> Result<(AdviceInputs, StackInputs), BlockProverError> { - let advice_inputs = self.build_advice_inputs()?; - - Ok((advice_inputs, StackInputs::default())) - } - - /// Returns an iterator over all transactions which affected accounts in the block with - /// corresponding account IDs. - pub(super) fn transactions(&self) -> impl Iterator + '_ { - self.updated_accounts.iter().flat_map(|(account_id, update)| { - update.transactions.iter().map(move |tx_id| (*tx_id, *account_id)) - }) - } - - // HELPERS - // --------------------------------------------------------------------------------------------- - - /// Validates that the nullifiers returned from the store are the same the produced nullifiers - /// in the batches. Note that validation that the value of the nullifiers is `0` will be - /// done in MASM. - fn validate_nullifiers( - block_inputs: &BlockInputs, - batches: &[ProvenBatch], - ) -> Result<(), BuildBlockError> { - let produced_nullifiers_from_store: BTreeSet = - block_inputs.nullifiers.keys().copied().collect(); - - let produced_nullifiers_from_batches: BTreeSet = - batches.iter().flat_map(ProvenBatch::produced_nullifiers).collect(); - - if produced_nullifiers_from_store == produced_nullifiers_from_batches { - Ok(()) - } else { - let differing_nullifiers: Vec = produced_nullifiers_from_store - .symmetric_difference(&produced_nullifiers_from_batches) - .copied() - .collect(); - - Err(BuildBlockError::InconsistentNullifiers(differing_nullifiers)) - } - } - - /// Builds the advice inputs to the block kernel - fn build_advice_inputs(self) -> Result { - let advice_stack = { - let mut advice_stack = Vec::new(); - - // add account stack inputs to the advice stack - { - let mut account_data = Vec::new(); - let mut num_accounts_updated: u64 = 0; - for (idx, (account_id, account_update)) in self.updated_accounts.iter().enumerate() - { - account_data.extend(account_update.final_state_hash); - account_data.push(account_id.prefix().as_felt()); - - let idx = u64::try_from(idx).expect("can't be more than 2^64 - 1 accounts"); - num_accounts_updated = idx + 1; - } - - // append number of accounts updated - advice_stack.push(num_accounts_updated.try_into().expect( - "updated accounts number is greater than or equal to the field modulus", - )); - - // append initial account root - advice_stack.extend(self.prev_header.account_root()); - - // append the updated accounts data - advice_stack.extend(account_data); - } - - // add notes stack inputs to the advice stack - { - // append the number of updated notes - advice_stack - .push(Felt::try_from(self.batch_created_notes_roots.len() as u64).expect( - "notes roots number is greater than or equal to the field modulus", - )); - - // append the empty root - let empty_root = EmptySubtreeRoots::entry(BLOCK_NOTE_TREE_DEPTH, 0); - advice_stack.extend(*empty_root); - - for (batch_index, batch_created_notes_root) in &self.batch_created_notes_roots { - advice_stack.extend(batch_created_notes_root.iter()); - - let batch_index = Felt::try_from(*batch_index as u64) - .expect("batch index is greater than or equal to the field modulus"); - advice_stack.push(batch_index); - } - } - - // Nullifiers stack inputs - { - let num_produced_nullifiers: Felt = (self.produced_nullifiers.len() as u64) - .try_into() - .expect("nullifiers number is greater than or equal to the field modulus"); - - // append number of nullifiers - advice_stack.push(num_produced_nullifiers); - - // append initial nullifier root - advice_stack.extend(self.prev_header.nullifier_root()); - - // append nullifier value (`[block_num, 0, 0, 0]`) - let block_num = self.prev_header.block_num() + 1; - advice_stack.extend([block_num.into(), ZERO, ZERO, ZERO]); - - for nullifier in self.produced_nullifiers.keys() { - advice_stack.extend(nullifier.inner()); - } - } - - // Chain MMR stack inputs - { - advice_stack.extend(self.prev_header.hash()); - advice_stack.extend(self.chain_peaks.hash_peaks()); - } - - advice_stack - }; - - let merkle_store = { - let mut merkle_store = MerkleStore::default(); - - // add accounts merkle paths - merkle_store - .add_merkle_paths(self.updated_accounts.into_iter().map( - |(account_id, AccountUpdateWitness { initial_state_hash, proof, .. })| { - (account_id.prefix().into(), initial_state_hash, proof) - }, - )) - .map_err(BlockProverError::InvalidMerklePaths)?; - - // add nullifiers merkle paths - merkle_store - .add_merkle_paths(self.produced_nullifiers.iter().map(|(nullifier, proof)| { - // Note: the initial value for all nullifiers in the tree is `[0, 0, 0, 0]` - ( - u64::from(nullifier.most_significant_felt()), - Digest::default(), - proof.path().clone(), - ) - })) - .map_err(BlockProverError::InvalidMerklePaths)?; - - merkle_store - }; - - let advice_map: Vec<_> = self - .produced_nullifiers - .values() - .map(|proof| (proof.leaf().hash(), proof.leaf().to_elements())) - .chain(std::iter::once(mmr_peaks_advice_map_key_value(&self.chain_peaks))) - .collect(); - - let advice_inputs = AdviceInputs::default() - .with_merkle_store(merkle_store) - .with_map(advice_map) - .with_stack(advice_stack); - - Ok(advice_inputs) - } -} - -#[derive(Debug, PartialEq, Eq)] -pub(super) struct AccountUpdateWitness { - pub initial_state_hash: Digest, - pub final_state_hash: Digest, - pub proof: MerklePath, - pub transactions: Vec, -} - -// HELPERS -// ================================================================================================= - -// Generates the advice map key/value for Mmr peaks -fn mmr_peaks_advice_map_key_value(peaks: &MmrPeaks) -> (Digest, Vec) { - let mut elements = vec![Felt::new(peaks.num_leaves() as u64), ZERO, ZERO, ZERO]; - elements.extend(peaks.flatten_and_pad_peaks()); - - (peaks.hash_peaks(), elements) -} diff --git a/crates/block-producer/src/block_builder/prover/mod.rs b/crates/block-producer/src/block_builder/prover/mod.rs deleted file mode 100644 index 2f141c352..000000000 --- a/crates/block-producer/src/block_builder/prover/mod.rs +++ /dev/null @@ -1,128 +0,0 @@ -use std::time::{SystemTime, UNIX_EPOCH}; - -use miden_lib::transaction::TransactionKernel; -use miden_objects::{assembly::Assembler, block::BlockHeader, Digest}; -use miden_processor::{execute, DefaultHost, ExecutionOptions, MemAdviceProvider, Program}; -use miden_stdlib::StdLibrary; - -use self::block_witness::BlockWitness; -use crate::errors::{BlockProverError, BuildBlockError}; - -/// The index of the word at which the account root is stored on the output stack. -pub const ACCOUNT_ROOT_WORD_IDX: usize = 0; - -/// The index of the word at which the note root is stored on the output stack. -pub const NOTE_ROOT_WORD_IDX: usize = 4; - -/// The index of the word at which the nullifier root is stored on the output stack. -pub const NULLIFIER_ROOT_WORD_IDX: usize = 8; - -/// The index of the word at which the note root is stored on the output stack. -pub const CHAIN_MMR_ROOT_WORD_IDX: usize = 12; - -pub mod block_witness; - -#[cfg(test)] -mod tests; - -const BLOCK_KERNEL_MASM: &str = include_str!("asm/block_kernel.masm"); - -#[derive(Debug)] -pub(crate) struct BlockProver { - kernel: Program, -} - -impl BlockProver { - pub fn new() -> Self { - let account_program = { - let assembler = Assembler::default() - .with_library(StdLibrary::default()) - .expect("failed to load std-lib"); - - assembler - .assemble_program(BLOCK_KERNEL_MASM) - .expect("failed to load account update program") - }; - - Self { kernel: account_program } - } - - // Note: this will eventually all be done in the VM, and also return an `ExecutionProof` - pub fn prove(&self, witness: BlockWitness) -> Result { - let prev_hash = witness.prev_header.hash(); - let block_num = witness.prev_header.block_num() + 1; - let version = witness.prev_header.version(); - - let tx_hash = BlockHeader::compute_tx_commitment(witness.transactions()); - let (account_root, note_root, nullifier_root, chain_root) = self.compute_roots(witness)?; - - let proof_hash = Digest::default(); - let timestamp = SystemTime::now() - .duration_since(UNIX_EPOCH) - .expect("today is expected to be after 1970") - .as_secs() - .try_into() - .expect("timestamp must fit in a `u32`"); - - Ok(BlockHeader::new( - version, - prev_hash, - block_num, - chain_root, - account_root, - nullifier_root, - note_root, - tx_hash, - TransactionKernel::kernel_root(), - proof_hash, - timestamp, - )) - } - - fn compute_roots( - &self, - witness: BlockWitness, - ) -> Result<(Digest, Digest, Digest, Digest), BlockProverError> { - let (advice_inputs, stack_inputs) = witness.into_program_inputs()?; - let mut host = { - let advice_provider = MemAdviceProvider::from(advice_inputs); - - let mut host = DefaultHost::new(advice_provider); - host.load_mast_forest(StdLibrary::default().mast_forest().clone()) - .expect("failed to load mast forest"); - - host - }; - - let execution_output = - execute(&self.kernel, stack_inputs, &mut host, ExecutionOptions::default()) - .map_err(BlockProverError::ProgramExecutionFailed)?; - - let new_account_root = execution_output - .stack_outputs() - .get_stack_word(ACCOUNT_ROOT_WORD_IDX) - .ok_or(BlockProverError::InvalidRootOutput("account"))?; - - let new_note_root = execution_output - .stack_outputs() - .get_stack_word(NOTE_ROOT_WORD_IDX) - .ok_or(BlockProverError::InvalidRootOutput("note"))?; - - let new_nullifier_root = execution_output - .stack_outputs() - .get_stack_word(NULLIFIER_ROOT_WORD_IDX) - .ok_or(BlockProverError::InvalidRootOutput("nullifier"))?; - - let new_chain_mmr_root = execution_output - .stack_outputs() - .get_stack_word(CHAIN_MMR_ROOT_WORD_IDX) - .ok_or(BlockProverError::InvalidRootOutput("chain mmr"))?; - - Ok(( - new_account_root.into(), - new_note_root.into(), - new_nullifier_root.into(), - new_chain_mmr_root.into(), - )) - } -} diff --git a/crates/block-producer/src/block_builder/prover/tests.rs b/crates/block-producer/src/block_builder/prover/tests.rs deleted file mode 100644 index b26f3895d..000000000 --- a/crates/block-producer/src/block_builder/prover/tests.rs +++ /dev/null @@ -1,939 +0,0 @@ -use std::{collections::BTreeMap, iter}; - -use assert_matches::assert_matches; -use miden_node_proto::domain::note::NoteAuthenticationInfo; -use miden_objects::{ - account::{ - delta::AccountUpdateDetails, AccountId, AccountIdVersion, AccountStorageMode, AccountType, - }, - batch::ProvenBatch, - block::{BlockAccountUpdate, BlockNoteIndex, BlockNoteTree, BlockNumber}, - crypto::merkle::{ - EmptySubtreeRoots, LeafIndex, MerklePath, Mmr, MmrPeaks, Smt, SmtLeaf, SmtProof, SMT_DEPTH, - }, - note::{NoteExecutionHint, NoteHeader, NoteMetadata, NoteTag, NoteType, Nullifier}, - testing::account_id::{ - ACCOUNT_ID_OFF_CHAIN_SENDER, ACCOUNT_ID_REGULAR_ACCOUNT_UPDATABLE_CODE_OFF_CHAIN, - }, - transaction::{OutputNote, ProvenTransaction}, - Felt, BATCH_NOTE_TREE_DEPTH, BLOCK_NOTE_TREE_DEPTH, ONE, ZERO, -}; - -use self::block_witness::AccountUpdateWitness; -use super::*; -use crate::{ - block::{AccountWitness, BlockInputs}, - test_utils::{ - batch::TransactionBatchConstructor, - block::{build_actual_block_header, build_expected_block_header, MockBlockBuilder}, - MockProvenTxBuilder, MockStoreSuccessBuilder, - }, -}; - -// BLOCK WITNESS TESTS -// ================================================================================================= - -/// Tests that `BlockWitness` constructor fails if the store and transaction batches contain a -/// different set of account ids. -/// -/// The store will contain accounts 1 & 2, while the transaction batches will contain 2 & 3. -#[test] -fn block_witness_validation_inconsistent_account_ids() { - let account_id_1 = AccountId::dummy( - [0; 15], - AccountIdVersion::Version0, - AccountType::RegularAccountImmutableCode, - miden_objects::account::AccountStorageMode::Private, - ); - let account_id_2 = AccountId::dummy( - [1; 15], - AccountIdVersion::Version0, - AccountType::RegularAccountImmutableCode, - miden_objects::account::AccountStorageMode::Private, - ); - let account_id_3 = AccountId::dummy( - [2; 15], - AccountIdVersion::Version0, - AccountType::RegularAccountImmutableCode, - miden_objects::account::AccountStorageMode::Private, - ); - - let block_inputs_from_store: BlockInputs = { - let block_header = BlockHeader::mock(0, None, None, &[], Digest::default()); - let chain_peaks = MmrPeaks::new(0, Vec::new()).unwrap(); - - let accounts = BTreeMap::from_iter(vec![ - (account_id_1, AccountWitness::default()), - (account_id_2, AccountWitness::default()), - ]); - - BlockInputs { - block_header, - chain_peaks, - accounts, - nullifiers: BTreeMap::default(), - found_unauthenticated_notes: NoteAuthenticationInfo::default(), - } - }; - - let batches: Vec = { - let batch_1 = { - let tx = MockProvenTxBuilder::with_account( - account_id_2, - Digest::default(), - Digest::default(), - ) - .build(); - - ProvenBatch::mocked_from_transactions([&tx]) - }; - - let batch_2 = { - let tx = MockProvenTxBuilder::with_account( - account_id_3, - Digest::default(), - Digest::default(), - ) - .build(); - - ProvenBatch::mocked_from_transactions([&tx]) - }; - - vec![batch_1, batch_2] - }; - - let block_witness_result = BlockWitness::new(block_inputs_from_store, &batches); - - assert!(block_witness_result.is_err()); -} - -/// Tests that `BlockWitness` constructor fails if the store and transaction batches contain a -/// different at least 1 account who's state hash is different. -/// -/// Only account 1 will have a different state hash -#[test] -fn block_witness_validation_inconsistent_account_hashes() { - let account_id_1 = - AccountId::try_from(ACCOUNT_ID_REGULAR_ACCOUNT_UPDATABLE_CODE_OFF_CHAIN).unwrap(); - let account_id_2 = AccountId::try_from(ACCOUNT_ID_OFF_CHAIN_SENDER).unwrap(); - - let account_1_hash_store = - Digest::new([Felt::new(1u64), Felt::new(2u64), Felt::new(3u64), Felt::new(4u64)]); - let account_1_hash_batches = - Digest::new([Felt::new(4u64), Felt::new(3u64), Felt::new(2u64), Felt::new(1u64)]); - - let block_inputs_from_store: BlockInputs = { - let block_header = BlockHeader::mock(0, None, None, &[], Digest::default()); - let chain_peaks = MmrPeaks::new(0, Vec::new()).unwrap(); - - let accounts = BTreeMap::from_iter(vec![ - ( - account_id_1, - AccountWitness { - hash: account_1_hash_store, - proof: MerklePath::default(), - }, - ), - (account_id_2, AccountWitness::default()), - ]); - - BlockInputs { - block_header, - chain_peaks, - accounts, - nullifiers: BTreeMap::default(), - found_unauthenticated_notes: NoteAuthenticationInfo::default(), - } - }; - - let batches = { - let batch_1 = ProvenBatch::mocked_from_transactions([&MockProvenTxBuilder::with_account( - account_id_1, - account_1_hash_batches, - Digest::default(), - ) - .build()]); - - let batch_2 = ProvenBatch::mocked_from_transactions([&MockProvenTxBuilder::with_account( - account_id_2, - Digest::default(), - Digest::default(), - ) - .build()]); - - vec![batch_1, batch_2] - }; - - let block_witness_result = BlockWitness::new(block_inputs_from_store, &batches); - - assert_matches!( - block_witness_result, - Err(BuildBlockError::InconsistentAccountStateTransition( - account_id, - account_hash_store, - account_hash_batches - )) => { - assert_eq!(account_id, account_id_1); - assert_eq!(account_hash_store, account_1_hash_store); - assert_eq!(account_hash_batches, vec![account_1_hash_batches]); - } - ); -} - -/// Creates two batches which each update the same pair of accounts. -/// -/// The transactions are ordered such that the batches cannot be chronologically ordered -/// themselves: `[tx_x0, tx_y1], [tx_y0, tx_x1]`. This test ensures that the witness is -/// produced correctly as if for a single batch: `[tx_x0, tx_x1, tx_y0, tx_y1]`. -#[test] -fn block_witness_multiple_batches_per_account() { - let x_account_id = - AccountId::try_from(ACCOUNT_ID_REGULAR_ACCOUNT_UPDATABLE_CODE_OFF_CHAIN).unwrap(); - let y_account_id = AccountId::try_from(ACCOUNT_ID_OFF_CHAIN_SENDER).unwrap(); - - let x_hashes = [ - Digest::new((0..4).map(Felt::new).collect::>().try_into().unwrap()), - Digest::new((4..8).map(Felt::new).collect::>().try_into().unwrap()), - Digest::new((8..12).map(Felt::new).collect::>().try_into().unwrap()), - ]; - let y_hashes = [ - Digest::new((12..16).map(Felt::new).collect::>().try_into().unwrap()), - Digest::new((16..20).map(Felt::new).collect::>().try_into().unwrap()), - Digest::new((20..24).map(Felt::new).collect::>().try_into().unwrap()), - ]; - - let x_txs = [ - MockProvenTxBuilder::with_account(x_account_id, x_hashes[0], x_hashes[1]).build(), - MockProvenTxBuilder::with_account(x_account_id, x_hashes[1], x_hashes[2]).build(), - ]; - let y_txs = [ - MockProvenTxBuilder::with_account(y_account_id, y_hashes[0], y_hashes[1]).build(), - MockProvenTxBuilder::with_account(y_account_id, y_hashes[1], y_hashes[2]).build(), - ]; - - let x_proof = MerklePath::new(vec![Digest::new( - (24..28).map(Felt::new).collect::>().try_into().unwrap(), - )]); - let y_proof = MerklePath::new(vec![Digest::new( - (28..32).map(Felt::new).collect::>().try_into().unwrap(), - )]); - - let block_inputs_from_store: BlockInputs = { - let block_header = BlockHeader::mock(0, None, None, &[], Digest::default()); - let chain_peaks = MmrPeaks::new(0, Vec::new()).unwrap(); - - let x_witness = AccountWitness { - hash: x_hashes[0], - proof: x_proof.clone(), - }; - let y_witness = AccountWitness { - hash: y_hashes[0], - proof: y_proof.clone(), - }; - let accounts = BTreeMap::from_iter([(x_account_id, x_witness), (y_account_id, y_witness)]); - - BlockInputs { - block_header, - chain_peaks, - accounts, - nullifiers: BTreeMap::default(), - found_unauthenticated_notes: NoteAuthenticationInfo::default(), - } - }; - - let batches = { - let batch_1 = ProvenBatch::mocked_from_transactions([&x_txs[0], &y_txs[1]]); - let batch_2 = ProvenBatch::mocked_from_transactions([&y_txs[0], &x_txs[1]]); - - vec![batch_1, batch_2] - }; - - let (block_witness, _) = BlockWitness::new(block_inputs_from_store, &batches).unwrap(); - let account_witnesses = block_witness.updated_accounts.into_iter().collect::>(); - - let x_expected = AccountUpdateWitness { - initial_state_hash: x_hashes[0], - final_state_hash: *x_hashes.last().unwrap(), - proof: x_proof, - transactions: x_txs.iter().map(ProvenTransaction::id).collect(), - }; - - let y_expected = AccountUpdateWitness { - initial_state_hash: y_hashes[0], - final_state_hash: *y_hashes.last().unwrap(), - proof: y_proof, - transactions: y_txs.iter().map(ProvenTransaction::id).collect(), - }; - - let expected = [(x_account_id, x_expected), (y_account_id, y_expected)].into(); - - assert_eq!(account_witnesses, expected); -} - -// ACCOUNT ROOT TESTS -// ================================================================================================= - -/// Tests that the `BlockProver` computes the proper account root. -/// -/// We assume an initial store with 5 accounts, and all will be updated. -#[tokio::test] -#[miden_node_test_macro::enable_logging] -async fn compute_account_root_success() { - // Set up account states - // --------------------------------------------------------------------------------------------- - let account_ids = [ - AccountId::dummy( - [0; 15], - AccountIdVersion::Version0, - AccountType::RegularAccountImmutableCode, - miden_objects::account::AccountStorageMode::Private, - ), - AccountId::dummy( - [1; 15], - AccountIdVersion::Version0, - AccountType::RegularAccountImmutableCode, - miden_objects::account::AccountStorageMode::Private, - ), - AccountId::dummy( - [2; 15], - AccountIdVersion::Version0, - AccountType::RegularAccountImmutableCode, - miden_objects::account::AccountStorageMode::Private, - ), - AccountId::dummy( - [3; 15], - AccountIdVersion::Version0, - AccountType::RegularAccountImmutableCode, - miden_objects::account::AccountStorageMode::Private, - ), - AccountId::dummy( - [4; 15], - AccountIdVersion::Version0, - AccountType::RegularAccountImmutableCode, - miden_objects::account::AccountStorageMode::Private, - ), - ]; - - let account_initial_states = [ - [Felt::new(1u64), Felt::new(1u64), Felt::new(1u64), Felt::new(1u64)], - [Felt::new(2u64), Felt::new(2u64), Felt::new(2u64), Felt::new(2u64)], - [Felt::new(3u64), Felt::new(3u64), Felt::new(3u64), Felt::new(3u64)], - [Felt::new(4u64), Felt::new(4u64), Felt::new(4u64), Felt::new(4u64)], - [Felt::new(5u64), Felt::new(5u64), Felt::new(5u64), Felt::new(5u64)], - ]; - - let account_final_states = [ - [Felt::new(2u64), Felt::new(2u64), Felt::new(2u64), Felt::new(2u64)], - [Felt::new(3u64), Felt::new(3u64), Felt::new(3u64), Felt::new(3u64)], - [Felt::new(4u64), Felt::new(4u64), Felt::new(4u64), Felt::new(4u64)], - [Felt::new(5u64), Felt::new(5u64), Felt::new(5u64), Felt::new(5u64)], - [Felt::new(1u64), Felt::new(1u64), Felt::new(1u64), Felt::new(1u64)], - ]; - - // Set up store's account SMT - // --------------------------------------------------------------------------------------------- - - let store = MockStoreSuccessBuilder::from_accounts( - account_ids - .iter() - .zip(account_initial_states.iter()) - .map(|(&account_id, &account_hash)| (account_id, account_hash.into())), - ) - .build(); - - // Block prover - // --------------------------------------------------------------------------------------------- - - // Block inputs is initialized with all the accounts and their initial state - let block_inputs_from_store: BlockInputs = store - .get_block_inputs(account_ids.into_iter(), std::iter::empty(), std::iter::empty()) - .await - .unwrap(); - - let batches: Vec = { - let txs: Vec<_> = account_ids - .iter() - .enumerate() - .map(|(idx, &account_id)| { - MockProvenTxBuilder::with_account( - account_id, - account_initial_states[idx].into(), - account_final_states[idx].into(), - ) - .build() - }) - .collect(); - - let batch_1 = ProvenBatch::mocked_from_transactions(&txs[..2]); - let batch_2 = ProvenBatch::mocked_from_transactions(&txs[2..]); - - vec![batch_1, batch_2] - }; - - let (block_witness, _) = BlockWitness::new(block_inputs_from_store, &batches).unwrap(); - - let block_prover = BlockProver::new(); - let block_header = block_prover.prove(block_witness).unwrap(); - - // Update SMT by hand to get new root - // --------------------------------------------------------------------------------------------- - let block = MockBlockBuilder::new(&store) - .await - .account_updates( - account_ids - .iter() - .zip(account_final_states.iter()) - .map(|(&account_id, &account_hash)| { - BlockAccountUpdate::new( - account_id, - account_hash.into(), - AccountUpdateDetails::Private, - vec![], - ) - }) - .collect(), - ) - .build(); - - // Compare roots - // --------------------------------------------------------------------------------------------- - assert_eq!(block_header.account_root(), block.header().account_root()); -} - -/// Test that the current account root is returned if the batches are empty -#[tokio::test] -#[miden_node_test_macro::enable_logging] -async fn compute_account_root_empty_batches() { - // Set up account states - // --------------------------------------------------------------------------------------------- - let account_ids = [ - AccountId::dummy( - [0; 15], - AccountIdVersion::Version0, - AccountType::RegularAccountImmutableCode, - AccountStorageMode::Private, - ), - AccountId::dummy( - [1; 15], - AccountIdVersion::Version0, - AccountType::RegularAccountImmutableCode, - AccountStorageMode::Private, - ), - AccountId::dummy( - [2; 15], - AccountIdVersion::Version0, - AccountType::RegularAccountImmutableCode, - AccountStorageMode::Private, - ), - AccountId::dummy( - [3; 15], - AccountIdVersion::Version0, - AccountType::RegularAccountImmutableCode, - AccountStorageMode::Private, - ), - AccountId::dummy( - [4; 15], - AccountIdVersion::Version0, - AccountType::RegularAccountImmutableCode, - AccountStorageMode::Private, - ), - ]; - - let account_initial_states = [ - [Felt::new(1u64), Felt::new(1u64), Felt::new(1u64), Felt::new(1u64)], - [Felt::new(2u64), Felt::new(2u64), Felt::new(2u64), Felt::new(2u64)], - [Felt::new(3u64), Felt::new(3u64), Felt::new(3u64), Felt::new(3u64)], - [Felt::new(4u64), Felt::new(4u64), Felt::new(4u64), Felt::new(4u64)], - [Felt::new(5u64), Felt::new(5u64), Felt::new(5u64), Felt::new(5u64)], - ]; - - // Set up store's account SMT - // --------------------------------------------------------------------------------------------- - - let store = MockStoreSuccessBuilder::from_accounts( - account_ids - .iter() - .zip(account_initial_states.iter()) - .map(|(&account_id, &account_hash)| (account_id, account_hash.into())), - ) - .build(); - - // Block prover - // --------------------------------------------------------------------------------------------- - - // Block inputs is initialized with all the accounts and their initial state - let block_inputs_from_store: BlockInputs = store - .get_block_inputs(std::iter::empty(), std::iter::empty(), std::iter::empty()) - .await - .unwrap(); - - let batches = Vec::new(); - let (block_witness, _) = BlockWitness::new(block_inputs_from_store, &batches).unwrap(); - - let block_prover = BlockProver::new(); - let block_header = block_prover.prove(block_witness).unwrap(); - - // Compare roots - // --------------------------------------------------------------------------------------------- - assert_eq!(block_header.account_root(), store.account_root().await); -} - -// NOTE ROOT TESTS -// ================================================================================================= - -/// Tests that the block kernel returns the empty tree (depth 20) if no notes were created, and -/// contains no batches -#[tokio::test] -#[miden_node_test_macro::enable_logging] -async fn compute_note_root_empty_batches_success() { - // Set up store - // --------------------------------------------------------------------------------------------- - - let store = MockStoreSuccessBuilder::from_batches(iter::empty()).build(); - - // Block prover - // --------------------------------------------------------------------------------------------- - - // Block inputs is initialized with all the accounts and their initial state - let block_inputs_from_store: BlockInputs = store - .get_block_inputs(std::iter::empty(), std::iter::empty(), std::iter::empty()) - .await - .unwrap(); - - let batches: Vec = Vec::new(); - - let (block_witness, _) = BlockWitness::new(block_inputs_from_store, &batches).unwrap(); - - let block_prover = BlockProver::new(); - let block_header = block_prover.prove(block_witness).unwrap(); - - // Compare roots - // --------------------------------------------------------------------------------------------- - let created_notes_empty_root = EmptySubtreeRoots::entry(BLOCK_NOTE_TREE_DEPTH, 0); - assert_eq!(block_header.note_root(), *created_notes_empty_root); -} - -/// Tests that the block kernel returns the empty tree (depth 20) if no notes were created, but -/// which contains at least 1 batch. -#[tokio::test] -#[miden_node_test_macro::enable_logging] -async fn compute_note_root_empty_notes_success() { - // Set up store - // --------------------------------------------------------------------------------------------- - - let store = MockStoreSuccessBuilder::from_batches(iter::empty()).build(); - - // Block prover - // --------------------------------------------------------------------------------------------- - - // Block inputs is initialized with all the accounts and their initial state - let block_inputs_from_store: BlockInputs = store - .get_block_inputs(std::iter::empty(), std::iter::empty(), std::iter::empty()) - .await - .unwrap(); - - let batches: Vec = { - let batch = ProvenBatch::mocked_from_transactions(vec![]); - vec![batch] - }; - - let (block_witness, _) = BlockWitness::new(block_inputs_from_store, &batches).unwrap(); - - let block_prover = BlockProver::new(); - let block_header = block_prover.prove(block_witness).unwrap(); - - // Compare roots - // --------------------------------------------------------------------------------------------- - let created_notes_empty_root = EmptySubtreeRoots::entry(BLOCK_NOTE_TREE_DEPTH, 0); - assert_eq!(block_header.note_root(), *created_notes_empty_root); -} - -/// Tests that the block kernel returns the expected tree when multiple notes were created across -/// many batches. -#[tokio::test] -#[miden_node_test_macro::enable_logging] -async fn compute_note_root_success() { - let account_ids = [ - AccountId::dummy( - [0; 15], - AccountIdVersion::Version0, - AccountType::RegularAccountImmutableCode, - miden_objects::account::AccountStorageMode::Private, - ), - AccountId::dummy( - [1; 15], - AccountIdVersion::Version0, - AccountType::RegularAccountImmutableCode, - miden_objects::account::AccountStorageMode::Private, - ), - AccountId::dummy( - [2; 15], - AccountIdVersion::Version0, - AccountType::RegularAccountImmutableCode, - miden_objects::account::AccountStorageMode::Private, - ), - ]; - - let notes_created: Vec = [ - Digest::from([Felt::new(1u64), Felt::new(1u64), Felt::new(1u64), Felt::new(1u64)]), - Digest::from([Felt::new(2u64), Felt::new(2u64), Felt::new(2u64), Felt::new(2u64)]), - Digest::from([Felt::new(3u64), Felt::new(3u64), Felt::new(3u64), Felt::new(3u64)]), - ] - .into_iter() - .zip(account_ids.iter()) - .map(|(note_digest, &account_id)| { - NoteHeader::new( - note_digest.into(), - NoteMetadata::new( - account_id, - NoteType::Private, - NoteTag::for_local_use_case(0u16, 0u16).unwrap(), - NoteExecutionHint::none(), - ONE, - ) - .unwrap(), - ) - }) - .collect(); - - // Set up store - // --------------------------------------------------------------------------------------------- - - let store = MockStoreSuccessBuilder::from_batches(iter::empty()).build(); - - // Block prover - // --------------------------------------------------------------------------------------------- - - // Block inputs is initialized with all the accounts and their initial state - let block_inputs_from_store: BlockInputs = store - .get_block_inputs(account_ids.into_iter(), std::iter::empty(), std::iter::empty()) - .await - .unwrap(); - - let batches: Vec = { - let txs: Vec<_> = notes_created - .iter() - .zip(account_ids.iter()) - .map(|(note, &account_id)| { - let note = OutputNote::Header(*note); - MockProvenTxBuilder::with_account(account_id, Digest::default(), Digest::default()) - .output_notes(vec![note]) - .build() - }) - .collect(); - - let batch_1 = ProvenBatch::mocked_from_transactions(&txs[..2]); - let batch_2 = ProvenBatch::mocked_from_transactions(&txs[2..]); - - vec![batch_1, batch_2] - }; - - let (block_witness, _) = BlockWitness::new(block_inputs_from_store, &batches).unwrap(); - - let block_prover = BlockProver::new(); - let block_header = block_prover.prove(block_witness).unwrap(); - - // Create block note tree to get new root - // --------------------------------------------------------------------------------------------- - - // The current logic is hardcoded to a depth of 6 - // Specifically, we assume the block has up to 2^6 batches, and each batch up to 2^10 created - // notes, where each note is stored at depth 10 in the batch tree. - #[allow(clippy::items_after_statements, reason = "assert belongs to this section")] - const _: () = assert!(BLOCK_NOTE_TREE_DEPTH - BATCH_NOTE_TREE_DEPTH == 6); - - // The first 2 txs were put in the first batch; the 3rd was put in the second - let note_tree = BlockNoteTree::with_entries([ - (BlockNoteIndex::new(0, 0), notes_created[0].id(), *notes_created[0].metadata()), - (BlockNoteIndex::new(0, 1), notes_created[1].id(), *notes_created[1].metadata()), - (BlockNoteIndex::new(1, 0), notes_created[2].id(), *notes_created[2].metadata()), - ]) - .unwrap(); - - // Compare roots - // --------------------------------------------------------------------------------------------- - assert_eq!(block_header.note_root(), note_tree.root()); -} - -// NULLIFIER ROOT TESTS -// ================================================================================================= - -/// Tests that `BlockWitness` constructor fails if the store and transaction batches contain a -/// different set of nullifiers. -/// -/// The transaction batches will contain nullifiers 1 & 2, while the store will contain 2 & 3. -#[test] -fn block_witness_validation_inconsistent_nullifiers() { - let batches: Vec = { - let batch_1 = { - let tx = MockProvenTxBuilder::with_account_index(0).nullifiers_range(0..1).build(); - - ProvenBatch::mocked_from_transactions([&tx]) - }; - - let batch_2 = { - let tx = MockProvenTxBuilder::with_account_index(1).nullifiers_range(1..2).build(); - - ProvenBatch::mocked_from_transactions([&tx]) - }; - - vec![batch_1, batch_2] - }; - - let nullifier_1 = batches[0].produced_nullifiers().next().unwrap(); - let nullifier_2 = batches[1].produced_nullifiers().next().unwrap(); - let nullifier_3 = - Nullifier::from([101_u32.into(), 102_u32.into(), 103_u32.into(), 104_u32.into()]); - - let block_inputs_from_store: BlockInputs = { - let block_header = BlockHeader::mock(0, None, None, &[], Digest::default()); - let chain_peaks = MmrPeaks::new(0, Vec::new()).unwrap(); - - let accounts = batches - .iter() - .flat_map(|batch| { - batch - .account_updates() - .iter() - .map(|(account_id, update)| (*account_id, update.initial_state_commitment())) - }) - .map(|(account_id, hash)| { - (account_id, AccountWitness { hash, proof: MerklePath::default() }) - }) - .collect(); - - let nullifiers = BTreeMap::from_iter(vec![ - ( - nullifier_2, - SmtProof::new( - MerklePath::new(vec![Digest::default(); SMT_DEPTH as usize]), - SmtLeaf::new_empty(LeafIndex::new_max_depth( - nullifier_2.most_significant_felt().into(), - )), - ) - .unwrap(), - ), - ( - nullifier_3, - SmtProof::new( - MerklePath::new(vec![Digest::default(); SMT_DEPTH as usize]), - SmtLeaf::new_empty(LeafIndex::new_max_depth( - nullifier_3.most_significant_felt().into(), - )), - ) - .unwrap(), - ), - ]); - - BlockInputs { - block_header, - chain_peaks, - accounts, - nullifiers, - found_unauthenticated_notes: NoteAuthenticationInfo::default(), - } - }; - - let block_witness_result = BlockWitness::new(block_inputs_from_store, &batches); - - assert_matches!( - block_witness_result, - Err(BuildBlockError::InconsistentNullifiers(nullifiers)) => { - assert_eq!(nullifiers, vec![nullifier_1, nullifier_3]); - } - ); -} - -/// Tests that the block kernel returns the expected nullifier tree when no nullifiers are present -/// in the transaction -#[tokio::test] -async fn compute_nullifier_root_empty_success() { - let batches: Vec = { - let batch_1 = { - let tx = MockProvenTxBuilder::with_account_index(0).build(); - - ProvenBatch::mocked_from_transactions([&tx]) - }; - - let batch_2 = { - let tx = MockProvenTxBuilder::with_account_index(1).build(); - - ProvenBatch::mocked_from_transactions([&tx]) - }; - - vec![batch_1, batch_2] - }; - - let account_ids: Vec = batches - .iter() - .flat_map(|batch| { - batch - .account_updates() - .iter() - .map(|(account_id, update)| (*account_id, update.initial_state_commitment())) - }) - .map(|(account_id, _)| account_id) - .collect(); - - // Set up store - // --------------------------------------------------------------------------------------------- - - let store = MockStoreSuccessBuilder::from_batches(batches.iter()).build(); - - // Block prover - // --------------------------------------------------------------------------------------------- - - // Block inputs is initialized with all the accounts and their initial state - let block_inputs_from_store: BlockInputs = store - .get_block_inputs(account_ids.into_iter(), std::iter::empty(), std::iter::empty()) - .await - .unwrap(); - - let (block_witness, _) = BlockWitness::new(block_inputs_from_store, &batches).unwrap(); - - let block_prover = BlockProver::new(); - let block_header = block_prover.prove(block_witness).unwrap(); - - // Create SMT by hand to get new root - // --------------------------------------------------------------------------------------------- - let nullifier_smt = Smt::new(); - - // Compare roots - // --------------------------------------------------------------------------------------------- - assert_eq!(block_header.nullifier_root(), nullifier_smt.root()); -} - -/// Tests that the block kernel returns the expected nullifier tree when multiple nullifiers are -/// present in the transaction -#[tokio::test] -async fn compute_nullifier_root_success() { - let batches: Vec = { - let batch_1 = { - let tx = MockProvenTxBuilder::with_account_index(0).nullifiers_range(0..1).build(); - - ProvenBatch::mocked_from_transactions([&tx]) - }; - - let batch_2 = { - let tx = MockProvenTxBuilder::with_account_index(1).nullifiers_range(1..2).build(); - - ProvenBatch::mocked_from_transactions([&tx]) - }; - - vec![batch_1, batch_2] - }; - - let account_ids: Vec = batches - .iter() - .flat_map(|batch| { - batch - .account_updates() - .iter() - .map(|(account_id, update)| (*account_id, update.initial_state_commitment())) - }) - .map(|(account_id, _)| account_id) - .collect(); - - let nullifiers = [ - batches[0].produced_nullifiers().next().unwrap(), - batches[1].produced_nullifiers().next().unwrap(), - ]; - - // Set up store - // --------------------------------------------------------------------------------------------- - let initial_block_num = BlockNumber::from(42); - - let store = MockStoreSuccessBuilder::from_batches(batches.iter()) - .initial_block_num(initial_block_num) - .build(); - - // Block prover - // --------------------------------------------------------------------------------------------- - - // Block inputs is initialized with all the accounts and their initial state - let block_inputs_from_store: BlockInputs = store - .get_block_inputs(account_ids.into_iter(), nullifiers.iter(), std::iter::empty()) - .await - .unwrap(); - - let (block_witness, _) = BlockWitness::new(block_inputs_from_store, &batches).unwrap(); - - let block_prover = BlockProver::new(); - let block_header = block_prover.prove(block_witness).unwrap(); - - // Create SMT by hand to get new root - // --------------------------------------------------------------------------------------------- - - // Note that the block number in store is 42; the nullifiers get added to the next block (i.e. - // block number 43) - let nullifier_smt = - Smt::with_entries(nullifiers.into_iter().map(|nullifier| { - (nullifier.inner(), [(initial_block_num + 1).into(), ZERO, ZERO, ZERO]) - })) - .unwrap(); - - // Compare roots - // --------------------------------------------------------------------------------------------- - assert_eq!(block_header.nullifier_root(), nullifier_smt.root()); -} - -// CHAIN MMR ROOT TESTS -// ================================================================================================= - -/// Test that the chain mmr root is as expected if the batches are empty -#[tokio::test] -#[miden_node_test_macro::enable_logging] -async fn compute_chain_mmr_root_empty_mmr() { - let store = MockStoreSuccessBuilder::from_batches(iter::empty()).build(); - - let expected_block_header = build_expected_block_header(&store, &[]).await; - let actual_block_header = build_actual_block_header(&store, Vec::new()).await; - - assert_eq!(actual_block_header.chain_root(), expected_block_header.chain_root()); -} - -/// add header to non-empty MMR (1 peak), and check that we get the expected commitment -#[tokio::test] -#[miden_node_test_macro::enable_logging] -async fn compute_chain_mmr_root_mmr_1_peak() { - let initial_chain_mmr = { - let mut mmr = Mmr::new(); - mmr.add(Digest::default()); - - mmr - }; - - let store = MockStoreSuccessBuilder::from_batches(iter::empty()) - .initial_chain_mmr(initial_chain_mmr) - .build(); - - let expected_block_header = build_expected_block_header(&store, &[]).await; - let actual_block_header = build_actual_block_header(&store, Vec::new()).await; - - assert_eq!(actual_block_header.chain_root(), expected_block_header.chain_root()); -} - -/// add header to an MMR with 17 peaks, and check that we get the expected commitment -#[tokio::test] -#[miden_node_test_macro::enable_logging] -async fn compute_chain_mmr_root_mmr_17_peaks() { - let initial_chain_mmr = { - let mut mmr = Mmr::new(); - for _ in 0..(2_u32.pow(17) - 1) { - mmr.add(Digest::default()); - } - - assert_eq!(mmr.peaks().peaks().len(), 17); - - mmr - }; - - let store = MockStoreSuccessBuilder::from_batches(iter::empty()) - .initial_chain_mmr(initial_chain_mmr) - .build(); - - let expected_block_header = build_expected_block_header(&store, &[]).await; - let actual_block_header = build_actual_block_header(&store, Vec::new()).await; - - assert_eq!(actual_block_header.chain_root(), expected_block_header.chain_root()); -} diff --git a/crates/block-producer/src/errors.rs b/crates/block-producer/src/errors.rs index ec20a6b1d..6c9de3e37 100644 --- a/crates/block-producer/src/errors.rs +++ b/crates/block-producer/src/errors.rs @@ -1,14 +1,12 @@ +use miden_block_prover::ProvenBlockError; use miden_node_proto::errors::ConversionError; use miden_node_utils::formatting::format_opt; use miden_objects::{ - account::AccountId, block::BlockNumber, - crypto::merkle::MerkleError, note::{NoteId, Nullifier}, transaction::TransactionId, - AccountDeltaError, Digest, ProposedBatchError, + Digest, ProposedBatchError, ProposedBlockError, }; -use miden_processor::ExecutionError; use miden_tx_batch_prover::errors::ProvenBatchError; use thiserror::Error; use tokio::task::JoinError; @@ -146,50 +144,19 @@ pub enum BuildBatchError { ProveBatchError(#[source] ProvenBatchError), } -// Block prover errors -// ================================================================================================= - -#[derive(Debug, Error)] -pub enum BlockProverError { - #[error("received invalid merkle path")] - InvalidMerklePaths(#[source] MerkleError), - #[error("program execution failed")] - ProgramExecutionFailed(#[source] ExecutionError), - #[error("failed to retrieve {0} root from stack outputs")] - InvalidRootOutput(&'static str), -} - // Block building errors // ================================================================================================= #[derive(Debug, Error)] pub enum BuildBlockError { - #[error("failed to compute new block")] - BlockProverFailed(#[from] BlockProverError), #[error("failed to apply block to store")] StoreApplyBlockFailed(#[source] StoreError), #[error("failed to get block inputs from store")] GetBlockInputsFailed(#[source] StoreError), - #[error("block inputs from store did not contain data for account {0}")] - MissingAccountInput(AccountId), - #[error("block inputs from store contained extra data for accounts {0:?}")] - ExtraStoreData(Vec), - #[error("account {0} with state {1} cannot transaction to remaining states {2:?}")] - InconsistentAccountStateTransition(AccountId, Digest, Vec), - #[error( - "block inputs from store and transaction batches produced different nullifiers: {0:?}" - )] - InconsistentNullifiers(Vec), - #[error("unauthenticated transaction notes not found in the store or in outputs of other transactions in the block: {0:?}")] - UnauthenticatedNotesNotFound(Vec), - #[error("failed to merge transaction delta into account {account_id}")] - AccountUpdateError { - account_id: AccountId, - source: AccountDeltaError, - }, - // TODO: Check if needed. - // #[error("block construction failed")] - // BlockConstructionError, + #[error("failed to propose block")] + ProposeBlockFailed(#[source] ProposedBlockError), + #[error("failed to prove block")] + ProveBlockFailed(#[source] ProvenBlockError), /// We sometimes randomly inject errors into the batch building process to test our failure /// responses. #[error("nothing actually went wrong, failure was injected on purpose")] diff --git a/crates/block-producer/src/lib.rs b/crates/block-producer/src/lib.rs index 1cb3b62c9..8aa594232 100644 --- a/crates/block-producer/src/lib.rs +++ b/crates/block-producer/src/lib.rs @@ -10,7 +10,6 @@ mod errors; mod mempool; mod store; -pub mod block; pub mod config; pub mod server; diff --git a/crates/block-producer/src/store/mod.rs b/crates/block-producer/src/store/mod.rs index d367a7a75..9d4ad4d40 100644 --- a/crates/block-producer/src/store/mod.rs +++ b/crates/block-producer/src/store/mod.rs @@ -22,7 +22,7 @@ use miden_node_proto::{ use miden_node_utils::{formatting::format_opt, tracing::grpc::OtelInterceptor}; use miden_objects::{ account::AccountId, - block::{BlockHeader, BlockNumber, ProvenBlock}, + block::{BlockHeader, BlockInputs, BlockNumber, ProvenBlock}, note::{NoteId, Nullifier}, transaction::ProvenTransaction, utils::Serializable, @@ -32,7 +32,7 @@ use miden_processor::crypto::RpoDigest; use tonic::{service::interceptor::InterceptedService, transport::Channel}; use tracing::{debug, info, instrument}; -use crate::{block::BlockInputs, errors::StoreError, COMPONENT}; +use crate::{errors::StoreError, COMPONENT}; // TRANSACTION INPUTS // ================================================================================================ @@ -197,13 +197,15 @@ impl StoreClient { pub async fn get_block_inputs( &self, updated_accounts: impl Iterator + Send, - produced_nullifiers: impl Iterator + Send, - notes: impl Iterator + Send, + created_nullifiers: impl Iterator + Send, + unauthenticated_notes: impl Iterator + Send, + reference_blocks: impl Iterator + Send, ) -> Result { let request = tonic::Request::new(GetBlockInputsRequest { account_ids: updated_accounts.map(Into::into).collect(), - nullifiers: produced_nullifiers.map(digest::Digest::from).collect(), - unauthenticated_notes: notes.map(digest::Digest::from).collect(), + nullifiers: created_nullifiers.map(digest::Digest::from).collect(), + unauthenticated_notes: unauthenticated_notes.map(digest::Digest::from).collect(), + reference_blocks: reference_blocks.map(|block_num| block_num.as_u32()).collect(), }); let store_response = self.inner.clone().get_block_inputs(request).await?.into_inner(); diff --git a/crates/block-producer/src/test_utils/block.rs b/crates/block-producer/src/test_utils/block.rs index eb7aef93f..6293f5428 100644 --- a/crates/block-producer/src/test_utils/block.rs +++ b/crates/block-producer/src/test_utils/block.rs @@ -1,5 +1,3 @@ -use std::iter; - use miden_objects::{ batch::ProvenBatch, block::{ @@ -13,10 +11,6 @@ use miden_objects::{ }; use super::MockStoreSuccess; -use crate::{ - block::BlockInputs, - block_builder::prover::{block_witness::BlockWitness, BlockProver}, -}; /// Constructs the block we expect to be built given the store state, and a set of transaction /// batches to be applied @@ -74,30 +68,31 @@ pub async fn build_expected_block_header( ) } -/// Builds the "actual" block header; i.e. the block header built using the Miden VM, used in the -/// node -pub async fn build_actual_block_header( - store: &MockStoreSuccess, - batches: Vec, -) -> BlockHeader { - let updated_accounts: Vec<_> = - batches.iter().flat_map(|batch| batch.account_updates().iter()).collect(); - let produced_nullifiers: Vec = - batches.iter().flat_map(ProvenBatch::produced_nullifiers).collect(); - - let block_inputs_from_store: BlockInputs = store - .get_block_inputs( - updated_accounts.iter().map(|(&account_id, _)| account_id), - produced_nullifiers.iter(), - iter::empty(), - ) - .await - .unwrap(); - - let (block_witness, _) = BlockWitness::new(block_inputs_from_store, &batches).unwrap(); - - BlockProver::new().prove(block_witness).unwrap() -} +// Note: Commented as it is unused atm. Is it worth fixing it? +// /// Builds the "actual" block header; i.e. the block header built using the Miden VM, used in the +// /// node +// pub async fn build_actual_block_header( +// store: &MockStoreSuccess, +// batches: Vec, +// ) -> BlockHeader { +// let updated_accounts: Vec<_> = +// batches.iter().flat_map(|batch| batch.account_updates().iter()).collect(); +// let produced_nullifiers: Vec = +// batches.iter().flat_map(ProvenBatch::produced_nullifiers).collect(); + +// let block_inputs_from_store: BlockInputs = store +// .get_block_inputs( +// updated_accounts.iter().map(|(&account_id, _)| account_id), +// produced_nullifiers.iter(), +// iter::empty(), +// ) +// .await +// .unwrap(); + +// let (block_witness, _) = BlockWitness::new(block_inputs_from_store, &batches).unwrap(); + +// BlockProver::new().prove(block_witness).unwrap() +// } #[derive(Debug)] pub struct MockBlockBuilder { diff --git a/crates/block-producer/src/test_utils/store.rs b/crates/block-producer/src/test_utils/store.rs index 080570f64..edf7cff4b 100644 --- a/crates/block-producer/src/test_utils/store.rs +++ b/crates/block-producer/src/test_utils/store.rs @@ -3,12 +3,11 @@ use std::{ num::NonZeroU32, }; -use miden_node_proto::domain::{block::BlockInclusionProof, note::NoteAuthenticationInfo}; use miden_objects::{ batch::ProvenBatch, block::{BlockHeader, BlockNumber, OutputNoteBatch, ProvenBlock}, - crypto::merkle::{Mmr, SimpleSmt, Smt, ValuePath}, - note::{NoteId, NoteInclusionProof, Nullifier}, + crypto::merkle::{Mmr, SimpleSmt, Smt}, + note::{NoteId, NoteInclusionProof}, transaction::ProvenTransaction, ACCOUNT_TREE_DEPTH, EMPTY_WORD, ZERO, }; @@ -16,7 +15,6 @@ use tokio::sync::RwLock; use super::*; use crate::{ - block::{AccountWitness, BlockInputs}, errors::StoreError, store::TransactionInputs, test_utils::block::{ @@ -293,65 +291,66 @@ impl MockStoreSuccess { }) } - pub async fn get_block_inputs( - &self, - updated_accounts: impl Iterator + Send, - produced_nullifiers: impl Iterator + Send, - notes: impl Iterator + Send, - ) -> Result { - let locked_accounts = self.accounts.read().await; - let locked_produced_nullifiers = self.produced_nullifiers.read().await; - - let chain_peaks = { - let locked_chain_mmr = self.chain_mmr.read().await; - locked_chain_mmr.peaks() - }; - - let accounts = { - updated_accounts - .map(|account_id| { - let ValuePath { value: hash, path: proof } = - locked_accounts.open(&account_id.into()); - - (account_id, AccountWitness { hash, proof }) - }) - .collect() - }; - - let nullifiers = produced_nullifiers - .map(|nullifier| (*nullifier, locked_produced_nullifiers.open(&nullifier.inner()))) - .collect(); - - let locked_notes = self.notes.read().await; - let note_proofs = notes - .filter_map(|id| locked_notes.get(id).map(|proof| (*id, proof.clone()))) - .collect::>(); - - let locked_headers = self.block_headers.read().await; - let latest_header = - *locked_headers.iter().max_by_key(|(block_num, _)| *block_num).unwrap().1; - - let locked_chain_mmr = self.chain_mmr.read().await; - let chain_length = latest_header.block_num(); - let block_proofs = note_proofs - .values() - .map(|note_proof| { - let block_num = note_proof.location().block_num(); - let block_header = *locked_headers.get(&block_num).unwrap(); - let mmr_path = locked_chain_mmr.open(block_num.as_usize()).unwrap().merkle_path; - - BlockInclusionProof { block_header, mmr_path, chain_length } - }) - .collect(); - - let found_unauthenticated_notes = NoteAuthenticationInfo { block_proofs, note_proofs }; - - Ok(BlockInputs { - block_header: latest_header, - chain_peaks, - accounts, - nullifiers, - found_unauthenticated_notes, - }) - } + // Note: Commented as it is unused atm. Is it worth fixing it? + // pub async fn get_block_inputs( + // &self, + // updated_accounts: impl Iterator + Send, + // produced_nullifiers: impl Iterator + Send, + // notes: impl Iterator + Send, + // ) -> Result { + // let locked_accounts = self.accounts.read().await; + // let locked_produced_nullifiers = self.produced_nullifiers.read().await; + + // let chain_peaks = { + // let locked_chain_mmr = self.chain_mmr.read().await; + // locked_chain_mmr.peaks() + // }; + + // let accounts = { + // updated_accounts + // .map(|account_id| { + // let ValuePath { value: hash, path: proof } = + // locked_accounts.open(&account_id.into()); + + // (account_id, AccountWitness { hash, proof }) + // }) + // .collect() + // }; + + // let nullifiers = produced_nullifiers + // .map(|nullifier| (*nullifier, locked_produced_nullifiers.open(&nullifier.inner()))) + // .collect(); + + // let locked_notes = self.notes.read().await; + // let note_proofs = notes + // .filter_map(|id| locked_notes.get(id).map(|proof| (*id, proof.clone()))) + // .collect::>(); + + // let locked_headers = self.block_headers.read().await; + // let latest_header = + // *locked_headers.iter().max_by_key(|(block_num, _)| *block_num).unwrap().1; + + // let locked_chain_mmr = self.chain_mmr.read().await; + // let chain_length = latest_header.block_num(); + // let block_proofs = note_proofs + // .values() + // .map(|note_proof| { + // let block_num = note_proof.location().block_num(); + // let block_header = *locked_headers.get(&block_num).unwrap(); + // let mmr_path = locked_chain_mmr.open(block_num.as_usize()).unwrap().merkle_path; + + // BlockInclusionProof { block_header, mmr_path, chain_length } + // }) + // .collect(); + + // let found_unauthenticated_notes = NoteAuthenticationInfo { block_proofs, note_proofs }; + + // Ok(BlockInputs { + // block_header: latest_header, + // chain_peaks, + // accounts, + // nullifiers, + // found_unauthenticated_notes, + // }) + // } } diff --git a/crates/proto/src/domain/account.rs b/crates/proto/src/domain/account.rs index 3d6680065..36fadc4a0 100644 --- a/crates/proto/src/domain/account.rs +++ b/crates/proto/src/domain/account.rs @@ -149,49 +149,45 @@ impl TryInto for proto::requests::get_account_proofs_reques } } -// ACCOUNT INPUT RECORD +// ACCOUNT WITNESS RECORD // ================================================================================================ #[derive(Clone, Debug)] -pub struct AccountInputRecord { +pub struct AccountWitnessRecord { pub account_id: AccountId, - pub account_hash: Digest, + pub initial_state_commitment: Digest, pub proof: MerklePath, } -impl From for proto::responses::AccountBlockInputRecord { - fn from(from: AccountInputRecord) -> Self { +impl From for proto::responses::AccountWitness { + fn from(from: AccountWitnessRecord) -> Self { Self { account_id: Some(from.account_id.into()), - account_hash: Some(from.account_hash.into()), + initial_state_commitment: Some(from.initial_state_commitment.into()), proof: Some(Into::into(&from.proof)), } } } -impl TryFrom for AccountInputRecord { +impl TryFrom for AccountWitnessRecord { type Error = ConversionError; fn try_from( - account_input_record: proto::responses::AccountBlockInputRecord, + account_witness_record: proto::responses::AccountWitness, ) -> Result { Ok(Self { - account_id: account_input_record + account_id: account_witness_record .account_id - .ok_or(proto::responses::AccountBlockInputRecord::missing_field(stringify!( - account_id - )))? + .ok_or(proto::responses::AccountWitness::missing_field(stringify!(account_id)))? .try_into()?, - account_hash: account_input_record - .account_hash - .ok_or(proto::responses::AccountBlockInputRecord::missing_field(stringify!( - account_hash - )))? + initial_state_commitment: account_witness_record + .initial_state_commitment + .ok_or(proto::responses::AccountWitness::missing_field(stringify!(account_hash)))? .try_into()?, - proof: account_input_record + proof: account_witness_record .proof .as_ref() - .ok_or(proto::responses::AccountBlockInputRecord::missing_field(stringify!(proof)))? + .ok_or(proto::responses::AccountWitness::missing_field(stringify!(proof)))? .try_into()?, }) } diff --git a/crates/proto/src/domain/block.rs b/crates/proto/src/domain/block.rs index fa7e4bcfb..43b7f2f1a 100644 --- a/crates/proto/src/domain/block.rs +++ b/crates/proto/src/domain/block.rs @@ -1,11 +1,19 @@ +use std::collections::BTreeMap; + use miden_objects::{ - block::{BlockHeader, BlockNumber}, + block::{AccountWitness, BlockHeader, BlockInputs, BlockNumber, NullifierWitness}, crypto::merkle::MerklePath, + note::{NoteId, NoteInclusionProof}, + transaction::ChainMmr, + utils::{Deserializable, Serializable}, }; use crate::{ errors::{ConversionError, MissingFieldHelper}, - generated::block as proto, + generated::{ + block as proto, note::NoteInclusionInBlockProof, responses::GetBlockInputsResponse, + }, + AccountWitnessRecord, NullifierWitnessRecord, }; // BLOCK HEADER @@ -124,3 +132,98 @@ impl TryFrom for BlockInclusionProof { Ok(result) } } + +// BLOCK INPUTS +// ================================================================================================ + +impl From for GetBlockInputsResponse { + fn from(inputs: BlockInputs) -> Self { + let ( + prev_block_header, + chain_mmr, + account_witnesses, + nullifier_witnesses, + unauthenticated_note_proofs, + ) = inputs.into_parts(); + + GetBlockInputsResponse { + latest_block_header: Some(prev_block_header.into()), + account_witnesses: account_witnesses + .into_iter() + .map(|(id, witness)| { + let (initial_state_commitment, proof) = witness.into_parts(); + AccountWitnessRecord { + account_id: id, + initial_state_commitment, + proof, + } + .into() + }) + .collect(), + nullifier_witnesses: nullifier_witnesses + .into_iter() + .map(|(nullifier, witness)| { + let proof = witness.into_proof(); + NullifierWitnessRecord { nullifier, proof }.into() + }) + .collect(), + chain_mmr: chain_mmr.to_bytes(), + unauthenticated_note_proofs: unauthenticated_note_proofs + .iter() + .map(NoteInclusionInBlockProof::from) + .collect(), + } + } +} + +impl TryFrom for BlockInputs { + type Error = ConversionError; + + fn try_from(response: GetBlockInputsResponse) -> Result { + let latest_block_header: BlockHeader = response + .latest_block_header + .ok_or(proto::BlockHeader::missing_field("block_header"))? + .try_into()?; + + let account_witnesses = response + .account_witnesses + .into_iter() + .map(|entry| { + let witness_record: AccountWitnessRecord = entry.try_into()?; + Ok(( + witness_record.account_id, + AccountWitness::new( + witness_record.initial_state_commitment, + witness_record.proof, + ), + )) + }) + .collect::, ConversionError>>()?; + + let nullifier_witnesses = response + .nullifier_witnesses + .into_iter() + .map(|entry| { + let witness: NullifierWitnessRecord = entry.try_into()?; + Ok((witness.nullifier, NullifierWitness::new(witness.proof))) + }) + .collect::, ConversionError>>()?; + + let unauthenticated_note_proofs = response + .unauthenticated_note_proofs + .iter() + .map(<(NoteId, NoteInclusionProof)>::try_from) + .collect::>()?; + + let chain_mmr = ChainMmr::read_from_bytes(&response.chain_mmr) + .map_err(|source| ConversionError::deserialization_error("ChainMmr", source))?; + + Ok(BlockInputs::new( + latest_block_header, + chain_mmr, + account_witnesses, + nullifier_witnesses, + unauthenticated_note_proofs, + )) + } +} diff --git a/crates/proto/src/domain/nullifier.rs b/crates/proto/src/domain/nullifier.rs index 482183a0f..c78fd3da0 100644 --- a/crates/proto/src/domain/nullifier.rs +++ b/crates/proto/src/domain/nullifier.rs @@ -39,36 +39,32 @@ impl TryFrom for Nullifier { // ================================================================================================ #[derive(Clone, Debug)] -pub struct NullifierWitness { +pub struct NullifierWitnessRecord { pub nullifier: Nullifier, pub proof: SmtProof, } -impl TryFrom for NullifierWitness { +impl TryFrom for NullifierWitnessRecord { type Error = ConversionError; fn try_from( - nullifier_input_record: proto::responses::NullifierBlockInputRecord, + nullifier_witness_record: proto::responses::NullifierWitness, ) -> Result { Ok(Self { - nullifier: nullifier_input_record + nullifier: nullifier_witness_record .nullifier - .ok_or(proto::responses::NullifierBlockInputRecord::missing_field(stringify!( - nullifier - )))? + .ok_or(proto::responses::NullifierWitness::missing_field(stringify!(nullifier)))? .try_into()?, - proof: nullifier_input_record + proof: nullifier_witness_record .opening - .ok_or(proto::responses::NullifierBlockInputRecord::missing_field(stringify!( - opening - )))? + .ok_or(proto::responses::NullifierWitness::missing_field(stringify!(opening)))? .try_into()?, }) } } -impl From for proto::responses::NullifierBlockInputRecord { - fn from(value: NullifierWitness) -> Self { +impl From for proto::responses::NullifierWitness { + fn from(value: NullifierWitnessRecord) -> Self { Self { nullifier: Some(value.nullifier.into()), opening: Some(value.proof.into()), diff --git a/crates/proto/src/generated/requests.rs b/crates/proto/src/generated/requests.rs index c8e19bb29..8f18373d4 100644 --- a/crates/proto/src/generated/requests.rs +++ b/crates/proto/src/generated/requests.rs @@ -82,15 +82,26 @@ pub struct SyncNoteRequest { /// Returns data required to prove the next block. #[derive(Clone, PartialEq, ::prost::Message)] pub struct GetBlockInputsRequest { - /// ID of the account against which a transaction is executed. + /// IDs of all accounts updated in the proposed block for which to retrieve account witnesses. #[prost(message, repeated, tag = "1")] pub account_ids: ::prost::alloc::vec::Vec, - /// Set of nullifiers consumed by this transaction. + /// Nullifiers all notes consumed by the block for which to retrieve witnesses. + /// + /// Due to note erasure it will generally not be possible to know the exact set of nullifiers + /// a block will create, unless we pre-execute note erasure. So in practice, this set of + /// nullifiers will be the set of nullifiers of all proven batches in the block, which is a + /// superset of the nullifiers the block may create. + /// + /// However, if it is known that a certain note will be erased, it would not be necessary to + /// provide a nullifier witness for it. #[prost(message, repeated, tag = "2")] pub nullifiers: ::prost::alloc::vec::Vec, - /// Array of note IDs to be checked for existence in the database. + /// Array of note IDs for which to retrieve note inclusion proofs, **if they exist**. #[prost(message, repeated, tag = "3")] pub unauthenticated_notes: ::prost::alloc::vec::Vec, + /// Array of block numbers referenced by all batches in the block. + #[prost(fixed32, repeated, tag = "4")] + pub reference_blocks: ::prost::alloc::vec::Vec, } /// Returns the inputs for a transaction batch. #[derive(Clone, PartialEq, ::prost::Message)] diff --git a/crates/proto/src/generated/responses.rs b/crates/proto/src/generated/responses.rs index c3a8f5f20..35f5bd220 100644 --- a/crates/proto/src/generated/responses.rs +++ b/crates/proto/src/generated/responses.rs @@ -86,24 +86,25 @@ pub struct SyncNoteResponse { } /// An account returned as a response to the `GetBlockInputs`. #[derive(Clone, PartialEq, ::prost::Message)] -pub struct AccountBlockInputRecord { +pub struct AccountWitness { /// The account ID. #[prost(message, optional, tag = "1")] pub account_id: ::core::option::Option, - /// The latest account hash, zero hash if the account doesn't exist. + /// The latest account state commitment used as the initial state of the requested block. + /// This will be the zero digest if the account doesn't exist. #[prost(message, optional, tag = "2")] - pub account_hash: ::core::option::Option, - /// Merkle path to verify the account's inclusion in the MMR. + pub initial_state_commitment: ::core::option::Option, + /// Merkle path to verify the account's inclusion in the account tree. #[prost(message, optional, tag = "3")] pub proof: ::core::option::Option, } /// A nullifier returned as a response to the `GetBlockInputs`. #[derive(Clone, PartialEq, ::prost::Message)] -pub struct NullifierBlockInputRecord { - /// The nullifier ID. +pub struct NullifierWitness { + /// The nullifier. #[prost(message, optional, tag = "1")] pub nullifier: ::core::option::Option, - /// Merkle path to verify the nullifier's inclusion in the MMR. + /// The SMT proof to verify the nullifier's inclusion in the nullifier tree. #[prost(message, optional, tag = "2")] pub opening: ::core::option::Option, } @@ -112,21 +113,24 @@ pub struct NullifierBlockInputRecord { pub struct GetBlockInputsResponse { /// The latest block header. #[prost(message, optional, tag = "1")] - pub block_header: ::core::option::Option, - /// Peaks of the above block's mmr, The `forest` value is equal to the block number. + pub latest_block_header: ::core::option::Option, + /// Proof of each requested unauthenticated note's inclusion in a block, **if it existed in + /// the store**. #[prost(message, repeated, tag = "2")] - pub mmr_peaks: ::prost::alloc::vec::Vec, - /// The hashes of the requested accounts and their authentication paths. - #[prost(message, repeated, tag = "3")] - pub account_states: ::prost::alloc::vec::Vec, - /// The requested nullifiers and their authentication paths. - #[prost(message, repeated, tag = "4")] - pub nullifiers: ::prost::alloc::vec::Vec, - /// The list of requested notes which were found in the database. - #[prost(message, optional, tag = "5")] - pub found_unauthenticated_notes: ::core::option::Option< - super::note::NoteAuthenticationInfo, + pub unauthenticated_note_proofs: ::prost::alloc::vec::Vec< + super::note::NoteInclusionInBlockProof, >, + /// The serialized chain MMR which includes proofs for all blocks referenced by the + /// above note inclusion proofs as well as proofs for inclusion of the requested blocks + /// referenced by the batches in the block. + #[prost(bytes = "vec", tag = "3")] + pub chain_mmr: ::prost::alloc::vec::Vec, + /// The state commitments of the requested accounts and their authentication paths. + #[prost(message, repeated, tag = "4")] + pub account_witnesses: ::prost::alloc::vec::Vec, + /// The requested nullifiers and their authentication paths. + #[prost(message, repeated, tag = "5")] + pub nullifier_witnesses: ::prost::alloc::vec::Vec, } /// Represents the result of getting batch inputs. #[derive(Clone, PartialEq, ::prost::Message)] diff --git a/crates/proto/src/lib.rs b/crates/proto/src/lib.rs index 9290fc739..ca950f020 100644 --- a/crates/proto/src/lib.rs +++ b/crates/proto/src/lib.rs @@ -8,8 +8,8 @@ pub mod generated; // ================================================================================================ pub use domain::{ - account::{AccountInputRecord, AccountState}, + account::{AccountState, AccountWitnessRecord}, convert, - nullifier::NullifierWitness, + nullifier::NullifierWitnessRecord, try_convert, }; diff --git a/crates/rpc-proto/proto/requests.proto b/crates/rpc-proto/proto/requests.proto index f2323c56c..4441ce578 100644 --- a/crates/rpc-proto/proto/requests.proto +++ b/crates/rpc-proto/proto/requests.proto @@ -78,12 +78,25 @@ message SyncNoteRequest { // Returns data required to prove the next block. message GetBlockInputsRequest { - // ID of the account against which a transaction is executed. + // IDs of all accounts updated in the proposed block for which to retrieve account witnesses. repeated account.AccountId account_ids = 1; - // Set of nullifiers consumed by this transaction. + + // Nullifiers all notes consumed by the block for which to retrieve witnesses. + // + // Due to note erasure it will generally not be possible to know the exact set of nullifiers + // a block will create, unless we pre-execute note erasure. So in practice, this set of + // nullifiers will be the set of nullifiers of all proven batches in the block, which is a + // superset of the nullifiers the block may create. + // + // However, if it is known that a certain note will be erased, it would not be necessary to + // provide a nullifier witness for it. repeated digest.Digest nullifiers = 2; - // Array of note IDs to be checked for existence in the database. + + // Array of note IDs for which to retrieve note inclusion proofs, **if they exist**. repeated digest.Digest unauthenticated_notes = 3; + + // Array of block numbers referenced by all batches in the block. + repeated fixed32 reference_blocks = 4; } // Returns the inputs for a transaction batch. diff --git a/crates/rpc-proto/proto/responses.proto b/crates/rpc-proto/proto/responses.proto index f1dfe5f90..3a0d58d24 100644 --- a/crates/rpc-proto/proto/responses.proto +++ b/crates/rpc-proto/proto/responses.proto @@ -90,42 +90,46 @@ message SyncNoteResponse { } // An account returned as a response to the `GetBlockInputs`. -message AccountBlockInputRecord { +message AccountWitness { // The account ID. account.AccountId account_id = 1; - // The latest account hash, zero hash if the account doesn't exist. - digest.Digest account_hash = 2; + // The latest account state commitment used as the initial state of the requested block. + // This will be the zero digest if the account doesn't exist. + digest.Digest initial_state_commitment = 2; - // Merkle path to verify the account's inclusion in the MMR. + // Merkle path to verify the account's inclusion in the account tree. merkle.MerklePath proof = 3; } // A nullifier returned as a response to the `GetBlockInputs`. -message NullifierBlockInputRecord { - // The nullifier ID. +message NullifierWitness { + // The nullifier. digest.Digest nullifier = 1; - // Merkle path to verify the nullifier's inclusion in the MMR. + // The SMT proof to verify the nullifier's inclusion in the nullifier tree. smt.SmtOpening opening = 2; } // Represents the result of getting block inputs. message GetBlockInputsResponse { // The latest block header. - block.BlockHeader block_header = 1; + block.BlockHeader latest_block_header = 1; - // Peaks of the above block's mmr, The `forest` value is equal to the block number. - repeated digest.Digest mmr_peaks = 2; + // Proof of each requested unauthenticated note's inclusion in a block, **if it existed in + // the store**. + repeated note.NoteInclusionInBlockProof unauthenticated_note_proofs = 2; - // The hashes of the requested accounts and their authentication paths. - repeated AccountBlockInputRecord account_states = 3; + // The serialized chain MMR which includes proofs for all blocks referenced by the + // above note inclusion proofs as well as proofs for inclusion of the requested blocks + // referenced by the batches in the block. + bytes chain_mmr = 3; - // The requested nullifiers and their authentication paths. - repeated NullifierBlockInputRecord nullifiers = 4; + // The state commitments of the requested accounts and their authentication paths. + repeated AccountWitness account_witnesses = 4; - // The list of requested notes which were found in the database. - note.NoteAuthenticationInfo found_unauthenticated_notes = 5; + // The requested nullifiers and their authentication paths. + repeated NullifierWitness nullifier_witnesses = 5; } // Represents the result of getting batch inputs. diff --git a/crates/store/src/errors.rs b/crates/store/src/errors.rs index aecf56f33..d9ba0110b 100644 --- a/crates/store/src/errors.rs +++ b/crates/store/src/errors.rs @@ -244,6 +244,10 @@ pub enum GetBlockInputsError { IncorrectChainMmrForestNumber { forest: usize, block_num: BlockNumber }, #[error("note inclusion proof MMR error")] NoteInclusionMmr(#[from] MmrError), + #[error("failed to select note inclusion proofs")] + SelectNoteInclusionProofError(#[source] DatabaseError), + #[error("failed to select block headers")] + SelectBlockHeaderError(#[source] DatabaseError), } impl From for GetBlockInputsError { diff --git a/crates/store/src/server/api.rs b/crates/store/src/server/api.rs index 0064668e6..a509e1723 100644 --- a/crates/store/src/server/api.rs +++ b/crates/store/src/server/api.rs @@ -349,15 +349,16 @@ impl api_server::Api for StoreApi { ) -> Result, Status> { let request = request.into_inner(); - let nullifiers = validate_nullifiers(&request.nullifiers)?; let account_ids = read_account_ids(&request.account_ids)?; + let nullifiers = validate_nullifiers(&request.nullifiers)?; let unauthenticated_notes = validate_notes(&request.unauthenticated_notes)?; + let reference_blocks = read_block_numbers(&request.reference_blocks); let unauthenticated_notes = unauthenticated_notes.into_iter().collect(); self.state - .get_block_inputs(&account_ids, &nullifiers, unauthenticated_notes) + .get_block_inputs(&account_ids, &nullifiers, unauthenticated_notes, reference_blocks) .await - .map(Into::into) + .map(GetBlockInputsResponse::from) .map(Response::new) .map_err(internal_error) } @@ -580,3 +581,8 @@ fn validate_notes(notes: &[generated::digest::Digest]) -> Result, St .collect::>() .map_err(|_| invalid_argument("Digest field is not in the modulus range")) } + +#[instrument(target = COMPONENT, skip_all)] +fn read_block_numbers(block_numbers: &[u32]) -> BTreeSet { + block_numbers.iter().map(|raw_number| BlockNumber::from(*raw_number)).collect() +} diff --git a/crates/store/src/state.rs b/crates/store/src/state.rs index 17ec2cf45..9b1e5e450 100644 --- a/crates/store/src/state.rs +++ b/crates/store/src/state.rs @@ -10,22 +10,18 @@ use std::{ }; use miden_node_proto::{ - convert, domain::{ account::{AccountInfo, AccountProofRequest, StorageMapKeysProof}, batch::BatchInputs, block::BlockInclusionProof, note::NoteAuthenticationInfo, }, - generated::responses::{ - AccountProofsResponse, AccountStateHeader, GetBlockInputsResponse, StorageSlotMapProof, - }, - AccountInputRecord, NullifierWitness, + generated::responses::{AccountProofsResponse, AccountStateHeader, StorageSlotMapProof}, }; use miden_node_utils::formatting::format_array; use miden_objects::{ account::{AccountDelta, AccountHeader, AccountId, StorageSlot}, - block::{BlockHeader, BlockNumber, ProvenBlock}, + block::{AccountWitness, BlockHeader, BlockInputs, BlockNumber, NullifierWitness, ProvenBlock}, crypto::{ hash::rpo::RpoDigest, merkle::{ @@ -58,37 +54,6 @@ use crate::{ // STRUCTURES // ================================================================================================ -/// Information needed from the store to validate and build a block -#[derive(Debug)] -pub struct BlockInputs { - /// Previous block header - pub block_header: BlockHeader, - - /// MMR peaks for the current chain state - pub chain_peaks: MmrPeaks, - - /// The hashes of the requested accounts and their authentication paths - pub account_states: Vec, - - /// The requested nullifiers and their authentication paths - pub nullifiers: Vec, - - /// List of notes found in the store - pub found_unauthenticated_notes: NoteAuthenticationInfo, -} - -impl From for GetBlockInputsResponse { - fn from(value: BlockInputs) -> Self { - Self { - block_header: Some(value.block_header.into()), - mmr_peaks: convert(value.chain_peaks.peaks()), - account_states: convert(value.account_states), - nullifiers: convert(value.nullifiers), - found_unauthenticated_notes: Some(value.found_unauthenticated_notes.into()), - } - } -} - #[derive(Debug)] pub struct TransactionInputs { pub account_hash: RpoDigest, @@ -150,12 +115,14 @@ impl Blockchain { &self.0 } - /// Returns the latest block number and partial mmr. + /// Creates a [`PartialMmr`] at the state of the latest block (i.e. the block's chain root will + /// match the hashed peaks of the returned partial MMR). This MMR will include authentication + /// paths for all blocks in the provided set. pub fn partial_mmr_from_blocks( &self, blocks: &BTreeSet, latest_block_number: BlockNumber, - ) -> Result { + ) -> PartialMmr { // Using latest block as the target forest means we take the state of the MMR one before // the latest block. This is because the latest block will be used as the reference // block of the batch and will be added to the MMR by the batch kernel. @@ -183,7 +150,8 @@ impl Blockchain { .track(block_num, leaf, &path) .expect("filling partial mmr with data from mmr should succeed"); } - Ok(partial_mmr) + + partial_mmr } } @@ -654,7 +622,7 @@ impl State { ( latest_block_num, - inner_state.blockchain.partial_mmr_from_blocks(&blocks, latest_block_num)?, + inner_state.blockchain.partial_mmr_from_blocks(&blocks, latest_block_num), ) }; @@ -783,61 +751,100 @@ impl State { account_ids: &[AccountId], nullifiers: &[Nullifier], unauthenticated_notes: BTreeSet, + reference_blocks: BTreeSet, ) -> Result { + // Get the note inclusion proofs from the DB. + // We do this first so we have to acquire the lock to the state where we have to get the + // note inclusion proof's block inclusion proof. + let unauthenticated_note_proofs = self + .db + .select_note_inclusion_proofs(unauthenticated_notes) + .await + .map_err(GetBlockInputsError::SelectNoteInclusionProofError)?; + + // The set of blocks that the notes are included in. + let note_proof_reference_blocks = + unauthenticated_note_proofs.values().map(|proof| proof.location().block_num()); + + // Collect all blocks we need to prove inclusion for, without duplicates. + let mut blocks = reference_blocks; + blocks.extend(note_proof_reference_blocks); + + // Acquire the lock to the inner state. While we hold the lock, we don't access the DB. let inner = self.inner.read().await; - let latest = self - .db - .select_block_header_by_block_num(None) - .await? - .ok_or(GetBlockInputsError::DbBlockHeaderEmpty)?; - - // sanity check - if inner.blockchain.chain_tip() != latest.block_num() { - return Err(GetBlockInputsError::IncorrectChainMmrForestNumber { - forest: inner.blockchain.chain_tip().as_usize(), - block_num: latest.block_num(), - }); - } + let latest_block_number = inner.latest_block_num(); - // using current block number gets us the peaks of the chain MMR as of one block ago; - // this is done so that latest.chain_root matches the returned peaks - let chain_peaks = - inner.blockchain.peaks_at(latest.block_num().as_usize()).map_err(|error| { - GetBlockInputsError::FailedToGetMmrPeaksForForest { - forest: latest.block_num().as_usize(), - error, - } - })?; - let account_states = account_ids + // The latest block is not yet in the chain MMR, so we can't (and don't need to) prove its + // inclusion in the chain. + blocks.remove(&latest_block_number); + + // Fetch the partial MMR with authentication paths for the set of blocks. + let partial_mmr = inner.blockchain.partial_mmr_from_blocks(&blocks, latest_block_number); + + // Fetch witnesses for all acounts. + let account_witnesses = account_ids .iter() .copied() .map(|account_id| { - let ValuePath { value: account_hash, path: proof } = - inner.account_tree.open(&LeafIndex::new_max_depth(account_id.prefix().into())); - Ok(AccountInputRecord { account_id, account_hash, proof }) + let ValuePath { + value: latest_state_commitment, + path: proof, + } = inner.account_tree.open(&account_id.into()); + (account_id, AccountWitness::new(latest_state_commitment, proof)) }) - .collect::>()?; + .collect::>(); - let nullifiers: Vec = nullifiers + // Fetch witnesses for all nullifiers. We don't check whether the nullifiers are spent or + // not as this is done as part of proposing the block. + let nullifier_witnesses: BTreeMap = nullifiers .iter() + .copied() .map(|nullifier| { - let proof = inner.nullifier_tree.open(nullifier); - - NullifierWitness { nullifier: *nullifier, proof } + let proof = inner.nullifier_tree.open(&nullifier); + (nullifier, NullifierWitness::new(proof)) }) .collect(); - let found_unauthenticated_notes = - self.get_note_authentication_info(unauthenticated_notes).await?; + // Release the lock. + std::mem::drop(inner); - Ok(BlockInputs { - block_header: latest, - chain_peaks, - account_states, - nullifiers, - found_unauthenticated_notes, - }) + // Fetch the block headers for all blocks in the partial MMR plus the latest one which will + // be used as the previous block header of the block being built. + let mut headers = self + .db + .select_block_headers(blocks.into_iter().chain(std::iter::once(latest_block_number))) + .await + .map_err(GetBlockInputsError::SelectBlockHeaderError)?; + + // Find and remove the latest block as we must not add it to the chain MMR, since it is + // not yet in the chain. + let latest_block_header_index = headers + .iter() + .enumerate() + .find_map(|(index, header)| { + (header.block_num() == latest_block_number).then_some(index) + }) + .expect("DB should have returned the header of the latest block header"); + + // The order doesn't matter for ChainMmr::new, so swap remove is fine. + let latest_block_header = headers.swap_remove(latest_block_header_index); + + // SAFETY: This should not error because: + // - we're passing exactly the block headers that we've added to the partial MMR, + // - so none of the block header's block numbers should exceed the chain length of the + // partial MMR, + // - and we've added blocks to a BTreeSet, so there can be no duplicates. + let chain_mmr = ChainMmr::new(partial_mmr, headers) + .expect("partial mmr and block headers should be consistent"); + + Ok(BlockInputs::new( + latest_block_header, + chain_mmr, + account_witnesses, + nullifier_witnesses, + unauthenticated_note_proofs, + )) } /// Returns data needed by the block producer to verify transactions validity. diff --git a/proto/requests.proto b/proto/requests.proto index f2323c56c..4441ce578 100644 --- a/proto/requests.proto +++ b/proto/requests.proto @@ -78,12 +78,25 @@ message SyncNoteRequest { // Returns data required to prove the next block. message GetBlockInputsRequest { - // ID of the account against which a transaction is executed. + // IDs of all accounts updated in the proposed block for which to retrieve account witnesses. repeated account.AccountId account_ids = 1; - // Set of nullifiers consumed by this transaction. + + // Nullifiers all notes consumed by the block for which to retrieve witnesses. + // + // Due to note erasure it will generally not be possible to know the exact set of nullifiers + // a block will create, unless we pre-execute note erasure. So in practice, this set of + // nullifiers will be the set of nullifiers of all proven batches in the block, which is a + // superset of the nullifiers the block may create. + // + // However, if it is known that a certain note will be erased, it would not be necessary to + // provide a nullifier witness for it. repeated digest.Digest nullifiers = 2; - // Array of note IDs to be checked for existence in the database. + + // Array of note IDs for which to retrieve note inclusion proofs, **if they exist**. repeated digest.Digest unauthenticated_notes = 3; + + // Array of block numbers referenced by all batches in the block. + repeated fixed32 reference_blocks = 4; } // Returns the inputs for a transaction batch. diff --git a/proto/responses.proto b/proto/responses.proto index f1dfe5f90..3a0d58d24 100644 --- a/proto/responses.proto +++ b/proto/responses.proto @@ -90,42 +90,46 @@ message SyncNoteResponse { } // An account returned as a response to the `GetBlockInputs`. -message AccountBlockInputRecord { +message AccountWitness { // The account ID. account.AccountId account_id = 1; - // The latest account hash, zero hash if the account doesn't exist. - digest.Digest account_hash = 2; + // The latest account state commitment used as the initial state of the requested block. + // This will be the zero digest if the account doesn't exist. + digest.Digest initial_state_commitment = 2; - // Merkle path to verify the account's inclusion in the MMR. + // Merkle path to verify the account's inclusion in the account tree. merkle.MerklePath proof = 3; } // A nullifier returned as a response to the `GetBlockInputs`. -message NullifierBlockInputRecord { - // The nullifier ID. +message NullifierWitness { + // The nullifier. digest.Digest nullifier = 1; - // Merkle path to verify the nullifier's inclusion in the MMR. + // The SMT proof to verify the nullifier's inclusion in the nullifier tree. smt.SmtOpening opening = 2; } // Represents the result of getting block inputs. message GetBlockInputsResponse { // The latest block header. - block.BlockHeader block_header = 1; + block.BlockHeader latest_block_header = 1; - // Peaks of the above block's mmr, The `forest` value is equal to the block number. - repeated digest.Digest mmr_peaks = 2; + // Proof of each requested unauthenticated note's inclusion in a block, **if it existed in + // the store**. + repeated note.NoteInclusionInBlockProof unauthenticated_note_proofs = 2; - // The hashes of the requested accounts and their authentication paths. - repeated AccountBlockInputRecord account_states = 3; + // The serialized chain MMR which includes proofs for all blocks referenced by the + // above note inclusion proofs as well as proofs for inclusion of the requested blocks + // referenced by the batches in the block. + bytes chain_mmr = 3; - // The requested nullifiers and their authentication paths. - repeated NullifierBlockInputRecord nullifiers = 4; + // The state commitments of the requested accounts and their authentication paths. + repeated AccountWitness account_witnesses = 4; - // The list of requested notes which were found in the database. - note.NoteAuthenticationInfo found_unauthenticated_notes = 5; + // The requested nullifiers and their authentication paths. + repeated NullifierWitness nullifier_witnesses = 5; } // Represents the result of getting batch inputs.