From 277ab587ee08019e3e4f25fa61bcc276c514d34a Mon Sep 17 00:00:00 2001 From: Philipp Gackstatter Date: Wed, 19 Feb 2025 12:05:09 +0100 Subject: [PATCH 01/32] chore: Temp rename of `ProvenBlock` to `ProvenBlockWrapper` --- crates/block-producer/src/block_builder/mod.rs | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/crates/block-producer/src/block_builder/mod.rs b/crates/block-producer/src/block_builder/mod.rs index bbdc23c5a..78dd05691 100644 --- a/crates/block-producer/src/block_builder/mod.rs +++ b/crates/block-producer/src/block_builder/mod.rs @@ -100,7 +100,7 @@ impl BlockBuilder { .then(|selected| self.get_block_inputs(selected)) .inspect_ok(BlockSummaryAndInputs::inject_telemetry) .and_then(|inputs| self.prove_block(inputs)) - .inspect_ok(ProvenBlock::inject_telemetry) + .inspect_ok(ProvenBlockWrapper::inject_telemetry) // Failure must be injected before the final pipeline stage i.e. before commit is called. The system cannot // handle errors after it considers the process complete (which makes sense). .and_then(|proven_block| async { self.inject_failure(proven_block) }) @@ -153,7 +153,7 @@ impl BlockBuilder { async fn prove_block( &self, preimage: BlockSummaryAndInputs, - ) -> Result { + ) -> Result { let BlockSummaryAndInputs { batches, summary, inputs } = preimage; let (block_header_witness, updated_accounts) = BlockWitness::new(inputs, &batches)?; @@ -169,14 +169,14 @@ impl BlockBuilder { self.simulate_proving().await; - Ok(ProvenBlock { block }) + Ok(ProvenBlockWrapper { block }) } #[instrument(target = COMPONENT, name = "block_builder.commit_block", skip_all, err)] async fn commit_block( &self, mempool: &SharedMempool, - proven_block: ProvenBlock, + proven_block: ProvenBlockWrapper, ) -> Result<(), BuildBlockError> { self.store .apply_block(&proven_block.block) @@ -275,7 +275,9 @@ struct BlockSummaryAndInputs { summary: BlockSummary, inputs: BlockInputs, } -struct ProvenBlock { + +// TODO: Is this still needed? If so, what should be its name? +struct ProvenBlockWrapper { block: Block, } @@ -314,7 +316,7 @@ impl BlockSummaryAndInputs { } } -impl ProvenBlock { +impl ProvenBlockWrapper { fn inject_telemetry(&self) { let span = Span::current(); let header = self.block.header(); From e5d0151b5e16cab16f335967e80fdd0a81290dee Mon Sep 17 00:00:00 2001 From: Philipp Gackstatter Date: Wed, 19 Feb 2025 12:43:14 +0100 Subject: [PATCH 02/32] chore: Update node to miden-base companion PR --- Cargo.lock | 140 +++++++++--------- Cargo.toml | 7 +- crates/block-producer/Cargo.toml | 2 +- .../block-producer/src/block_builder/mod.rs | 11 +- .../src/block_builder/prover/block_witness.rs | 10 +- .../src/block_builder/prover/mod.rs | 8 +- .../src/block_builder/prover/tests.rs | 18 +-- crates/block-producer/src/errors.rs | 11 +- crates/block-producer/src/store/mod.rs | 4 +- crates/block-producer/src/test_utils/batch.rs | 8 +- crates/block-producer/src/test_utils/block.rs | 28 ++-- crates/block-producer/src/test_utils/store.rs | 17 ++- crates/store/src/db/mod.rs | 6 +- crates/store/src/db/sql/mod.rs | 24 ++- crates/store/src/db/tests.rs | 10 +- crates/store/src/errors.rs | 8 +- crates/store/src/genesis.rs | 6 +- crates/store/src/server/api.rs | 8 +- crates/store/src/state.rs | 14 +- 19 files changed, 179 insertions(+), 161 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index a7eb2e6ce..ae688eaca 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -385,9 +385,9 @@ checksum = "8f68f53c83ab957f72c32642f3868eec03eb974d1fb82e453128456482613d36" [[package]] name = "blake3" -version = "1.5.5" +version = "1.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b8ee0c1824c4dea5b5f81736aff91bae041d2c07ee1192bec91054e10e3e601e" +checksum = "1230237285e3e10cde447185e8975408ae24deaa67205ce684805c25bc0c7937" dependencies = [ "arrayref", "arrayvec", @@ -463,9 +463,9 @@ dependencies = [ [[package]] name = "cc" -version = "1.2.11" +version = "1.2.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e4730490333d58093109dc02c23174c3f4d490998c3fed3cc8e82d57afedb9cf" +checksum = "0c3d1b2e905a3a7b00a6141adb0e4c0bb941d11caf55349d863942a1cc44e3c9" dependencies = [ "jobserver", "libc", @@ -524,9 +524,9 @@ dependencies = [ [[package]] name = "clap" -version = "4.5.28" +version = "4.5.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3e77c3243bd94243c03672cb5154667347c457ca271254724f9f393aee1c05ff" +checksum = "92b7b18d71fad5313a1e320fa9897994228ce274b60faa4d694fe0ea89cd9e6d" dependencies = [ "clap_builder", "clap_derive", @@ -534,9 +534,9 @@ dependencies = [ [[package]] name = "clap_builder" -version = "4.5.27" +version = "4.5.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1b26884eb4b57140e4d2d93652abfa49498b938b3c9179f9fc487b0acc3edad7" +checksum = "a35db2071778a7344791a4fb4f95308b5673d219dee3ae348b86642574ecc90c" dependencies = [ "anstream", "anstyle", @@ -826,9 +826,9 @@ dependencies = [ [[package]] name = "equivalent" -version = "1.0.1" +version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5" +checksum = "877a4ace8713b0bcf2a4e7eec82529c029f1d0619886d18145fea96c3ffe5c0f" [[package]] name = "errno" @@ -880,6 +880,12 @@ version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80" +[[package]] +name = "fixedbitset" +version = "0.5.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1d674e81391d1e1ab681a28d99df07927c6d4aa5b027d7da16ba32d1d21ecd99" + [[package]] name = "fnv" version = "1.0.7" @@ -1046,9 +1052,9 @@ checksum = "a8d1add55171497b4705a648c6b583acafb01d58050a51727785f0b2c8e0a2b2" [[package]] name = "h2" -version = "0.4.7" +version = "0.4.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ccae279728d634d083c00f6099cb58f01cc99c145b84b8be2f6c74618d79922e" +checksum = "5017294ff4bb30944501348f6f8e42e6ad28f42c8bbef7a74029aff064a4e3c2" dependencies = [ "atomic-waker", "bytes", @@ -1440,15 +1446,6 @@ dependencies = [ "either", ] -[[package]] -name = "itertools" -version = "0.13.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "413ee7dfc52ee1a4949ceeb7dbc8a33f2d6c088194d9f922fb8318faf1f01186" -dependencies = [ - "either", -] - [[package]] name = "itertools" version = "0.14.0" @@ -1503,7 +1500,7 @@ dependencies = [ "ena", "itertools 0.11.0", "lalrpop-util", - "petgraph", + "petgraph 0.6.5", "regex", "regex-syntax 0.8.5", "string_cache", @@ -1728,9 +1725,9 @@ dependencies = [ [[package]] name = "miden-crypto" -version = "0.13.2" +version = "0.13.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1945918276152bd9b8e8434643ad24d4968e075b68a5ed03927b53ac75490a79" +checksum = "1d8f76b64bfbb75705403ec3e2faad6a045544871d9c441758becc55415cfe64" dependencies = [ "blake3", "cc", @@ -1787,7 +1784,7 @@ dependencies = [ [[package]] name = "miden-lib" version = "0.8.0" -source = "git+https://github.com/0xPolygonMiden/miden-base?branch=next#66cf1bc8744cf739aa3ef726300c389796047394" +source = "git+https://github.com/0xPolygonMiden/miden-base?branch=pgackst-batch-expiration#fa7b5b1728f22d0c1b6264d62d96667002dbdb50" dependencies = [ "miden-assembly", "miden-objects", @@ -1983,7 +1980,7 @@ dependencies = [ [[package]] name = "miden-objects" version = "0.8.0" -source = "git+https://github.com/0xPolygonMiden/miden-base?branch=next#66cf1bc8744cf739aa3ef726300c389796047394" +source = "git+https://github.com/0xPolygonMiden/miden-base?branch=pgackst-batch-expiration#fa7b5b1728f22d0c1b6264d62d96667002dbdb50" dependencies = [ "getrandom 0.2.15", "miden-assembly", @@ -2042,7 +2039,7 @@ dependencies = [ [[package]] name = "miden-tx" version = "0.8.0" -source = "git+https://github.com/0xPolygonMiden/miden-base?branch=next#66cf1bc8744cf739aa3ef726300c389796047394" +source = "git+https://github.com/0xPolygonMiden/miden-base?branch=pgackst-batch-expiration#fa7b5b1728f22d0c1b6264d62d96667002dbdb50" dependencies = [ "async-trait", "miden-lib", @@ -2059,7 +2056,7 @@ dependencies = [ [[package]] name = "miden-tx-batch-prover" version = "0.8.0" -source = "git+https://github.com/0xPolygonMiden/miden-base?branch=next#66cf1bc8744cf739aa3ef726300c389796047394" +source = "git+https://github.com/0xPolygonMiden/miden-base?branch=pgackst-batch-expiration#fa7b5b1728f22d0c1b6264d62d96667002dbdb50" dependencies = [ "miden-core", "miden-crypto", @@ -2129,9 +2126,9 @@ checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a" [[package]] name = "miniz_oxide" -version = "0.8.3" +version = "0.8.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b8402cab7aefae129c6977bb0ff1b8fd9a04eb5b51efc50a70bea51cda0c7924" +checksum = "b3b1c9bd4fe1f0f8b387f6eb9eb3b4a1aa26185e5750efb9140301703f62cd1b" dependencies = [ "adler2", ] @@ -2300,9 +2297,9 @@ dependencies = [ [[package]] name = "once_cell" -version = "1.20.2" +version = "1.20.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1261fe7e33c73b354eab43b1273a57c8f967d0391e80353e51f764ac02cf6775" +checksum = "945462a4b81e43c4e3ba96bd7b49d834c6f61198356aa858733bc4acf3cbe62e" [[package]] name = "openssl-probe" @@ -2460,7 +2457,17 @@ version = "0.6.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b4c5cc86750666a3ed20bdaf5ca2a0344f9c67674cae0515bec2da16fbaa47db" dependencies = [ - "fixedbitset", + "fixedbitset 0.4.2", + "indexmap 2.7.1", +] + +[[package]] +name = "petgraph" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3672b37090dbd86368a4145bc067582552b29c27377cad4e0a306c97f9bd7772" +dependencies = [ + "fixedbitset 0.5.7", "indexmap 2.7.1", ] @@ -2596,9 +2603,9 @@ dependencies = [ [[package]] name = "prost" -version = "0.13.4" +version = "0.13.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2c0fef6c4230e4ccf618a35c59d7ede15dea37de8427500f50aff708806e42ec" +checksum = "2796faa41db3ec313a31f7624d9286acf277b52de526150b7e69f3debf891ee5" dependencies = [ "bytes", "prost-derive", @@ -2606,16 +2613,16 @@ dependencies = [ [[package]] name = "prost-build" -version = "0.13.4" +version = "0.13.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d0f3e5beed80eb580c68e2c600937ac2c4eedabdfd5ef1e5b7ea4f3fba84497b" +checksum = "be769465445e8c1474e9c5dac2018218498557af32d9ed057325ec9a41ae81bf" dependencies = [ "heck", - "itertools 0.13.0", + "itertools 0.14.0", "log", "multimap", "once_cell", - "petgraph", + "petgraph 0.7.1", "prettyplease", "prost", "prost-types", @@ -2626,12 +2633,12 @@ dependencies = [ [[package]] name = "prost-derive" -version = "0.13.4" +version = "0.13.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "157c5a9d7ea5c2ed2d9fb8f495b64759f7816c7eaea54ba3978f0d63000162e3" +checksum = "8a56d757972c98b346a9b766e3f02746cde6dd1cd1d1d563472929fdd74bec4d" dependencies = [ "anyhow", - "itertools 0.13.0", + "itertools 0.14.0", "proc-macro2", "quote", "syn", @@ -2639,9 +2646,9 @@ dependencies = [ [[package]] name = "prost-reflect" -version = "0.14.5" +version = "0.14.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e92b959d24e05a3e2da1d0beb55b48bc8a97059b8336ea617780bd6addbbfb5a" +checksum = "a7b318f733603136dcc61aa9e77c928d67f87d2436c34ec052ba3f1b5ca219de" dependencies = [ "logos", "miette", @@ -2652,9 +2659,9 @@ dependencies = [ [[package]] name = "prost-types" -version = "0.13.4" +version = "0.13.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cc2f1e56baa61e93533aebc21af4d2134b70f66275e0fcdf3cbe43d77ff7e8fc" +checksum = "52c2c1bf36ddb1a1c396b3601a3cec27c2462e45f07c386894ec3ccf5332bd16" dependencies = [ "prost", ] @@ -2835,15 +2842,14 @@ checksum = "2b15c43186be67a4fd63bee50d0303afffcef381492ebe2c5d87f324e1b8815c" [[package]] name = "ring" -version = "0.17.8" +version = "0.17.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c17fa4cb658e3583423e915b9f3acc01cceaee1860e33d59ebae66adc3a2dc0d" +checksum = "e75ec5e92c4d8aede845126adc388046234541629e76029599ed35a003c7ed24" dependencies = [ "cc", "cfg-if", "getrandom 0.2.15", "libc", - "spin", "untrusted", "windows-sys 0.52.0", ] @@ -2917,9 +2923,9 @@ dependencies = [ [[package]] name = "rustls" -version = "0.23.22" +version = "0.23.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9fb9263ab4eb695e42321db096e3b8fbd715a59b154d5c88d82db2175b681ba7" +checksum = "47796c98c480fce5406ef69d1c76378375492c3b0a0de587be0c1d9feb12f395" dependencies = [ "log", "once_cell", @@ -3174,9 +3180,9 @@ dependencies = [ [[package]] name = "smallvec" -version = "1.13.2" +version = "1.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3c5e1a9a646d36c3599cd173a41282daf47c44583ad367b8e6837255952e5c67" +checksum = "7fcf8323ef1faaee30a44a340193b1ac6814fd9b7b4e88e9d4519a3e4abe1cfd" [[package]] name = "smawk" @@ -3307,9 +3313,9 @@ checksum = "42a4d50cdb458045afc8131fd91b64904da29548bcb63c7236e0844936c13078" [[package]] name = "tempfile" -version = "3.16.0" +version = "3.17.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "38c246215d7d24f48ae091a2902398798e05d978b24315d6efbc00ede9a8bb91" +checksum = "22e5a0acb1f3f55f65cc4a866c361b2fb2a0ff6366785ae6fbb5f85df07ba230" dependencies = [ "cfg-if", "fastrand", @@ -3525,9 +3531,9 @@ dependencies = [ [[package]] name = "toml" -version = "0.8.19" +version = "0.8.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a1ed1f98e3fdc28d6d910e6737ae6ab1a93bf1985935a1193e68f93eeb68d24e" +checksum = "cd87a5cdd6ffab733b2f74bc4fd7ee5fff6634124999ac278c35fc78c6120148" dependencies = [ "serde", "serde_spanned", @@ -3546,9 +3552,9 @@ dependencies = [ [[package]] name = "toml_edit" -version = "0.22.23" +version = "0.22.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "02a8b472d1a3d7c18e2d61a489aee3453fd9031c33e4f55bd533f4a7adca1bee" +checksum = "17b4795ff5edd201c7cd6dca065ae59972ce77d1b80fa0a84d94950ece7d1474" dependencies = [ "indexmap 2.7.1", "serde", @@ -3834,9 +3840,9 @@ dependencies = [ [[package]] name = "typenum" -version = "1.17.0" +version = "1.18.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "42ff0bf0c66b8238c6f3b578df37d0b7848e55df8577b3f74f92a69acceeb825" +checksum = "1dccffe3ce07af9386bfd29e80c0ab1a8205a2fc34e4bcd40364df902cfa8f3f" [[package]] name = "unarray" @@ -3861,9 +3867,9 @@ checksum = "75b844d17643ee918803943289730bec8aac480150456169e647ed0b576ba539" [[package]] name = "unicode-ident" -version = "1.0.16" +version = "1.0.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a210d160f08b701c8721ba1c726c11662f877ea6b7094007e1ca9a1041945034" +checksum = "00e2473a93778eb0bad35909dff6a10d28e63f792f16ed15e404fca9d5eeedbe" [[package]] name = "unicode-linebreak" @@ -4357,18 +4363,18 @@ checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" [[package]] name = "winnow" -version = "0.7.1" +version = "0.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "86e376c75f4f43f44db463cf729e0d3acbf954d13e22c51e26e4c264b4ab545f" +checksum = "59690dea168f2198d1a3b0cac23b8063efcd11012f10ae4698f284808c8ef603" dependencies = [ "memchr", ] [[package]] name = "winter-air" -version = "0.11.0" +version = "0.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3a8fdb702503625f54dcaf9222aa2c7a0b2e868b3eb84b90d1837d68034bf999" +checksum = "827ef2aa5a5ab663936e0a6326286e0fc83321771df0d9ea20c46c72c8baa90d" dependencies = [ "libm", "winter-crypto", diff --git a/Cargo.toml b/Cargo.toml index 2c05af39a..33694e6ab 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -28,18 +28,17 @@ version = "0.8.0" assert_matches = { version = "1.5" } itertools = { version = "0.14" } miden-air = { version = "0.12" } -miden-lib = { git = "https://github.com/0xPolygonMiden/miden-base", branch = "next" } +miden-lib = { git = "https://github.com/0xPolygonMiden/miden-base", branch = "pgackst-batch-expiration" } miden-node-block-producer = { path = "crates/block-producer", version = "0.8" } miden-node-proto = { path = "crates/proto", version = "0.8" } miden-node-rpc = { path = "crates/rpc", version = "0.8" } miden-node-store = { path = "crates/store", version = "0.8" } miden-node-test-macro = { path = "crates/test-macro" } miden-node-utils = { path = "crates/utils", version = "0.8" } -miden-objects = { git = "https://github.com/0xPolygonMiden/miden-base", branch = "next" } +miden-objects = { git = "https://github.com/0xPolygonMiden/miden-base", branch = "pgackst-batch-expiration" } miden-processor = { version = "0.12" } miden-stdlib = { version = "0.12", default-features = false } -miden-tx = { git = "https://github.com/0xPolygonMiden/miden-base", branch = "next" } -miden-tx-batch-prover = { git = "https://github.com/0xPolygonMiden/miden-base.git", branch = "next" } +miden-tx = { git = "https://github.com/0xPolygonMiden/miden-base", branch = "pgackst-batch-expiration" } prost = { version = "0.13" } rand = { version = "0.8" } thiserror = { version = "2.0", default-features = false } diff --git a/crates/block-producer/Cargo.toml b/crates/block-producer/Cargo.toml index dabf8d7bd..89a3f0cbc 100644 --- a/crates/block-producer/Cargo.toml +++ b/crates/block-producer/Cargo.toml @@ -28,7 +28,7 @@ miden-objects = { workspace = true } miden-processor = { workspace = true } miden-stdlib = { workspace = true } miden-tx = { workspace = true } -miden-tx-batch-prover = { workspace = true } +miden-tx-batch-prover = { git = "https://github.com/0xPolygonMiden/miden-base.git", branch = "pgackst-batch-expiration" } rand = { version = "0.8" } serde = { version = "1.0", features = ["derive"] } thiserror = { workspace = true } diff --git a/crates/block-producer/src/block_builder/mod.rs b/crates/block-producer/src/block_builder/mod.rs index 78dd05691..738260312 100644 --- a/crates/block-producer/src/block_builder/mod.rs +++ b/crates/block-producer/src/block_builder/mod.rs @@ -8,7 +8,7 @@ use miden_node_utils::tracing::OpenTelemetrySpanExt; use miden_objects::{ account::AccountId, batch::ProvenBatch, - block::{Block, BlockNumber}, + block::{BlockNumber, ProvenBlock}, note::{NoteHeader, NoteId, Nullifier}, transaction::{InputNoteCommitment, OutputNote}, }; @@ -160,12 +160,13 @@ impl BlockBuilder { let new_block_header = self.block_kernel.prove(block_header_witness)?; - let block = Block::new( + // TODO: Update. Temporarily left in an incorrect state. + let block = ProvenBlock::new_unchecked( new_block_header, updated_accounts, - summary.output_notes, + vec![], summary.nullifiers, - )?; + ); self.simulate_proving().await; @@ -278,7 +279,7 @@ struct BlockSummaryAndInputs { // TODO: Is this still needed? If so, what should be its name? struct ProvenBlockWrapper { - block: Block, + block: ProvenBlock, } impl SelectedBlock { diff --git a/crates/block-producer/src/block_builder/prover/block_witness.rs b/crates/block-producer/src/block_builder/prover/block_witness.rs index a0972e541..eac670ad4 100644 --- a/crates/block-producer/src/block_builder/prover/block_witness.rs +++ b/crates/block-producer/src/block_builder/prover/block_witness.rs @@ -2,7 +2,7 @@ use std::collections::{BTreeMap, BTreeSet}; use miden_objects::{ account::{delta::AccountUpdateDetails, AccountId}, - batch::{BatchAccountUpdate, ProvenBatch}, + batch::{BatchAccountUpdate, BatchNoteTree, ProvenBatch}, block::{BlockAccountUpdate, BlockHeader}, crypto::merkle::{EmptySubtreeRoots, MerklePath, MerkleStore, MmrPeaks, SmtProof}, note::Nullifier, @@ -44,7 +44,13 @@ impl BlockWitness { .iter() .enumerate() .filter(|(_, batch)| !batch.output_notes().is_empty()) - .map(|(batch_index, batch)| (batch_index, batch.output_notes_tree().root())) + .map(|(batch_index, batch)| { + let batch_note_tree = BatchNoteTree::with_contiguous_leaves( + batch.output_notes().iter().map(|note| (note.id(), note.metadata())), + ) + .expect("number of output notes in batch should be within the allowed range"); + (batch_index, batch_note_tree.root()) + }) .collect(); // Order account updates by account ID and each update's initial state hash. diff --git a/crates/block-producer/src/block_builder/prover/mod.rs b/crates/block-producer/src/block_builder/prover/mod.rs index ded93a7d4..2f141c352 100644 --- a/crates/block-producer/src/block_builder/prover/mod.rs +++ b/crates/block-producer/src/block_builder/prover/mod.rs @@ -1,11 +1,7 @@ use std::time::{SystemTime, UNIX_EPOCH}; use miden_lib::transaction::TransactionKernel; -use miden_objects::{ - assembly::Assembler, - block::{compute_tx_hash, BlockHeader}, - Digest, -}; +use miden_objects::{assembly::Assembler, block::BlockHeader, Digest}; use miden_processor::{execute, DefaultHost, ExecutionOptions, MemAdviceProvider, Program}; use miden_stdlib::StdLibrary; @@ -57,7 +53,7 @@ impl BlockProver { let block_num = witness.prev_header.block_num() + 1; let version = witness.prev_header.version(); - let tx_hash = compute_tx_hash(witness.transactions()); + let tx_hash = BlockHeader::compute_tx_commitment(witness.transactions()); let (account_root, note_root, nullifier_root, chain_root) = self.compute_roots(witness)?; let proof_hash = Digest::default(); diff --git a/crates/block-producer/src/block_builder/prover/tests.rs b/crates/block-producer/src/block_builder/prover/tests.rs index f020faf62..b26f3895d 100644 --- a/crates/block-producer/src/block_builder/prover/tests.rs +++ b/crates/block-producer/src/block_builder/prover/tests.rs @@ -644,21 +644,9 @@ async fn compute_note_root_success() { // The first 2 txs were put in the first batch; the 3rd was put in the second let note_tree = BlockNoteTree::with_entries([ - ( - BlockNoteIndex::new(0, 0).unwrap(), - notes_created[0].id(), - *notes_created[0].metadata(), - ), - ( - BlockNoteIndex::new(0, 1).unwrap(), - notes_created[1].id(), - *notes_created[1].metadata(), - ), - ( - BlockNoteIndex::new(1, 0).unwrap(), - notes_created[2].id(), - *notes_created[2].metadata(), - ), + (BlockNoteIndex::new(0, 0), notes_created[0].id(), *notes_created[0].metadata()), + (BlockNoteIndex::new(0, 1), notes_created[1].id(), *notes_created[1].metadata()), + (BlockNoteIndex::new(1, 0), notes_created[2].id(), *notes_created[2].metadata()), ]) .unwrap(); diff --git a/crates/block-producer/src/errors.rs b/crates/block-producer/src/errors.rs index 02bc01568..ec20a6b1d 100644 --- a/crates/block-producer/src/errors.rs +++ b/crates/block-producer/src/errors.rs @@ -6,10 +6,10 @@ use miden_objects::{ crypto::merkle::MerkleError, note::{NoteId, Nullifier}, transaction::TransactionId, - AccountDeltaError, BlockError, Digest, ProposedBatchError, + AccountDeltaError, Digest, ProposedBatchError, }; use miden_processor::ExecutionError; -use miden_tx_batch_prover::errors::BatchProveError; +use miden_tx_batch_prover::errors::ProvenBatchError; use thiserror::Error; use tokio::task::JoinError; @@ -143,7 +143,7 @@ pub enum BuildBatchError { ProposeBatchError(#[source] ProposedBatchError), #[error("failed to prove proposed transaction batch")] - ProveBatchError(#[source] BatchProveError), + ProveBatchError(#[source] ProvenBatchError), } // Block prover errors @@ -187,8 +187,9 @@ pub enum BuildBlockError { account_id: AccountId, source: AccountDeltaError, }, - #[error("block construction failed")] - BlockConstructionError(#[from] BlockError), + // TODO: Check if needed. + // #[error("block construction failed")] + // BlockConstructionError, /// We sometimes randomly inject errors into the batch building process to test our failure /// responses. #[error("nothing actually went wrong, failure was injected on purpose")] diff --git a/crates/block-producer/src/store/mod.rs b/crates/block-producer/src/store/mod.rs index 3d8009cda..d367a7a75 100644 --- a/crates/block-producer/src/store/mod.rs +++ b/crates/block-producer/src/store/mod.rs @@ -22,7 +22,7 @@ use miden_node_proto::{ use miden_node_utils::{formatting::format_opt, tracing::grpc::OtelInterceptor}; use miden_objects::{ account::AccountId, - block::{Block, BlockHeader, BlockNumber}, + block::{BlockHeader, BlockNumber, ProvenBlock}, note::{NoteId, Nullifier}, transaction::ProvenTransaction, utils::Serializable, @@ -228,7 +228,7 @@ impl StoreClient { } #[instrument(target = COMPONENT, name = "store.client.apply_block", skip_all, err)] - pub async fn apply_block(&self, block: &Block) -> Result<(), StoreError> { + pub async fn apply_block(&self, block: &ProvenBlock) -> Result<(), StoreError> { let request = tonic::Request::new(ApplyBlockRequest { block: block.to_bytes() }); self.inner.clone().apply_block(request).await.map(|_| ()).map_err(Into::into) diff --git a/crates/block-producer/src/test_utils/batch.rs b/crates/block-producer/src/test_utils/batch.rs index 37c2041fb..8e4371a3f 100644 --- a/crates/block-producer/src/test_utils/batch.rs +++ b/crates/block-producer/src/test_utils/batch.rs @@ -1,7 +1,7 @@ use std::collections::BTreeMap; use miden_objects::{ - batch::{BatchAccountUpdate, BatchId, BatchNoteTree, ProvenBatch}, + batch::{BatchAccountUpdate, BatchId, ProvenBatch}, block::BlockNumber, transaction::{InputNotes, ProvenTransaction}, Digest, @@ -56,16 +56,12 @@ impl TransactionBatchConstructor for ProvenBatch { output_notes.extend(tx.output_notes().iter().cloned()); } - ProvenBatch::new( + ProvenBatch::new_unchecked( BatchId::from_transactions(txs.into_iter()), Digest::default(), BlockNumber::GENESIS, account_updates, InputNotes::new_unchecked(input_notes), - BatchNoteTree::with_contiguous_leaves( - output_notes.iter().map(|x| (x.id(), x.metadata())), - ) - .unwrap(), output_notes, BlockNumber::from(u32::MAX), ) diff --git a/crates/block-producer/src/test_utils/block.rs b/crates/block-producer/src/test_utils/block.rs index 03ea004f3..eb7aef93f 100644 --- a/crates/block-producer/src/test_utils/block.rs +++ b/crates/block-producer/src/test_utils/block.rs @@ -2,7 +2,10 @@ use std::iter; use miden_objects::{ batch::ProvenBatch, - block::{Block, BlockAccountUpdate, BlockHeader, BlockNoteIndex, BlockNoteTree, NoteBatch}, + block::{ + BlockAccountUpdate, BlockHeader, BlockNoteIndex, BlockNoteTree, OutputNoteBatch, + ProvenBlock, + }, crypto::merkle::{Mmr, SimpleSmt}, note::Nullifier, transaction::OutputNote, @@ -103,7 +106,7 @@ pub struct MockBlockBuilder { last_block_header: BlockHeader, updated_accounts: Option>, - created_notes: Option>, + created_notes: Option>, produced_nullifiers: Option>, } @@ -140,7 +143,7 @@ impl MockBlockBuilder { } #[must_use] - pub fn created_notes(mut self, created_notes: Vec) -> Self { + pub fn created_notes(mut self, created_notes: Vec) -> Self { self.created_notes = Some(created_notes); self @@ -153,7 +156,7 @@ impl MockBlockBuilder { self } - pub fn build(self) -> Block { + pub fn build(self) -> ProvenBlock { let created_notes = self.created_notes.unwrap_or_default(); let header = BlockHeader::new( @@ -170,28 +173,27 @@ impl MockBlockBuilder { 1, ); - Block::new( + ProvenBlock::new_unchecked( header, self.updated_accounts.unwrap_or_default(), created_notes, self.produced_nullifiers.unwrap_or_default(), ) - .unwrap() } } pub(crate) fn flatten_output_notes<'a>( - batches: impl Iterator, + batches: impl Iterator, ) -> impl Iterator { batches.enumerate().flat_map(|(batch_idx, batch)| { - batch.iter().enumerate().map(move |(note_idx_in_batch, note)| { - (BlockNoteIndex::new(batch_idx, note_idx_in_batch).unwrap(), note) + batch.iter().map(move |(note_idx_in_batch, note)| { + (BlockNoteIndex::new(batch_idx, *note_idx_in_batch), note) }) }) } pub(crate) fn note_created_smt_from_note_batches<'a>( - batches: impl Iterator, + batches: impl Iterator, ) -> BlockNoteTree { let note_leaf_iterator = flatten_output_notes(batches).map(|(index, note)| (index, note.id(), *note.metadata())); @@ -201,6 +203,8 @@ pub(crate) fn note_created_smt_from_note_batches<'a>( pub(crate) fn block_output_notes<'a>( batches: impl Iterator + Clone, -) -> Vec> { - batches.map(|batch| batch.output_notes().to_vec()).collect() +) -> Vec { + batches + .map(|batch| batch.output_notes().iter().cloned().enumerate().collect()) + .collect() } diff --git a/crates/block-producer/src/test_utils/store.rs b/crates/block-producer/src/test_utils/store.rs index af404acd9..080570f64 100644 --- a/crates/block-producer/src/test_utils/store.rs +++ b/crates/block-producer/src/test_utils/store.rs @@ -6,7 +6,7 @@ use std::{ use miden_node_proto::domain::{block::BlockInclusionProof, note::NoteAuthenticationInfo}; use miden_objects::{ batch::ProvenBatch, - block::{Block, BlockHeader, BlockNumber, NoteBatch}, + block::{BlockHeader, BlockNumber, OutputNoteBatch, ProvenBlock}, crypto::merkle::{Mmr, SimpleSmt, Smt, ValuePath}, note::{NoteId, NoteInclusionProof, Nullifier}, transaction::ProvenTransaction, @@ -28,7 +28,7 @@ use crate::{ #[derive(Debug)] pub struct MockStoreSuccessBuilder { accounts: Option>, - notes: Option>, + notes: Option>, produced_nullifiers: Option>, chain_mmr: Option, block_num: Option, @@ -76,7 +76,10 @@ impl MockStoreSuccessBuilder { } #[must_use] - pub fn initial_notes<'a>(mut self, notes: impl Iterator + Clone) -> Self { + pub fn initial_notes<'a>( + mut self, + notes: impl Iterator + Clone, + ) -> Self { self.notes = Some(notes.cloned().collect()); self @@ -191,7 +194,7 @@ impl MockStoreSuccess { locked_accounts.root() } - pub async fn apply_block(&self, block: &Block) -> Result<(), StoreError> { + pub async fn apply_block(&self, block: &ProvenBlock) -> Result<(), StoreError> { // Intentionally, we take and hold both locks, to prevent calls to `get_tx_inputs()` from // going through while we're updating the store's data structure let mut locked_accounts = self.accounts.write().await; @@ -206,7 +209,7 @@ impl MockStoreSuccess { debug_assert_eq!(locked_accounts.root(), header.account_root()); // update nullifiers - for nullifier in block.nullifiers() { + for nullifier in block.created_nullifiers() { locked_produced_nullifiers .insert(nullifier.inner(), [header.block_num().into(), ZERO, ZERO, ZERO]); } @@ -219,11 +222,11 @@ impl MockStoreSuccess { } // build note tree - let note_tree = block.build_note_tree(); + let note_tree = block.build_output_note_tree(); // update notes let mut locked_notes = self.notes.write().await; - for (note_index, note) in block.notes() { + for (note_index, note) in block.output_notes() { locked_notes.insert( note.id(), NoteInclusionProof::new( diff --git a/crates/store/src/db/mod.rs b/crates/store/src/db/mod.rs index 51a678e22..4c9ba6912 100644 --- a/crates/store/src/db/mod.rs +++ b/crates/store/src/db/mod.rs @@ -11,7 +11,7 @@ use miden_node_proto::{ }; use miden_objects::{ account::{AccountDelta, AccountId}, - block::{Block, BlockHeader, BlockNoteIndex, BlockNumber}, + block::{BlockHeader, BlockNoteIndex, BlockNumber, ProvenBlock}, crypto::{hash::rpo::RpoDigest, merkle::MerklePath, utils::Deserializable}, note::{NoteId, NoteInclusionProof, NoteMetadata, Nullifier}, transaction::TransactionId, @@ -408,7 +408,7 @@ impl Db { &self, allow_acquire: oneshot::Sender<()>, acquire_done: oneshot::Receiver<()>, - block: Block, + block: ProvenBlock, notes: Vec, ) -> Result<()> { self.pool @@ -423,7 +423,7 @@ impl Db { &transaction, &block.header(), ¬es, - block.nullifiers(), + block.created_nullifiers(), block.updated_accounts(), )?; diff --git a/crates/store/src/db/sql/mod.rs b/crates/store/src/db/sql/mod.rs index 7ade42670..479638ea8 100644 --- a/crates/store/src/db/sql/mod.rs +++ b/crates/store/src/db/sql/mod.rs @@ -747,6 +747,11 @@ pub fn select_all_notes(conn: &mut Connection) -> Result> { let mut notes = vec![]; while let Some(row) = rows.next()? { + let batch_idx = row.get(1)?; + let note_idx_in_batch = row.get(2)?; + // SAFETY: We can assume the batch and note indices stored in the DB are valid so this + // should never panic. + let note_index = BlockNoteIndex::new(batch_idx, note_idx_in_batch); let note_id_data = row.get_ref(3)?.as_blob()?; let note_id = RpoDigest::read_from_bytes(note_id_data)?; @@ -768,7 +773,7 @@ pub fn select_all_notes(conn: &mut Connection) -> Result> { notes.push(NoteRecord { block_num: read_block_number(row, 0)?, - note_index: BlockNoteIndex::new(row.get(1)?, row.get(2)?)?, + note_index, note_id, metadata, details, @@ -856,7 +861,11 @@ pub fn select_notes_since_block_by_tag_and_sender( let mut res = Vec::new(); while let Some(row) = rows.next()? { let block_num = read_block_number(row, 0)?; - let note_index = BlockNoteIndex::new(row.get(1)?, row.get(2)?)?; + let batch_idx = row.get(1)?; + let note_idx_in_batch = row.get(2)?; + // SAFETY: We can assume the batch and note indices stored in the DB are valid so this + // should never panic. + let note_index = BlockNoteIndex::new(batch_idx, note_idx_in_batch); let note_id = read_from_blob_column(row, 3)?; let note_type = row.get::<_, u8>(4)?; let sender = read_from_blob_column(row, 5)?; @@ -919,6 +928,11 @@ pub fn select_notes_by_id(conn: &mut Connection, note_ids: &[NoteId]) -> Result< let mut notes = Vec::new(); while let Some(row) = rows.next()? { + let batch_idx = row.get(1)?; + let note_idx_in_batch = row.get(2)?; + // SAFETY: We can assume the batch and note indices stored in the DB are valid so this + // should never panic. + let note_index = BlockNoteIndex::new(batch_idx, note_idx_in_batch); let note_id_data = row.get_ref(3)?.as_blob()?; let note_id = NoteId::read_from_bytes(note_id_data)?; @@ -939,7 +953,7 @@ pub fn select_notes_by_id(conn: &mut Connection, note_ids: &[NoteId]) -> Result< notes.push(NoteRecord { block_num: read_block_number(row, 0)?, - note_index: BlockNoteIndex::new(row.get(1)?, row.get(2)?)?, + note_index, details, note_id: note_id.into(), metadata, @@ -989,7 +1003,9 @@ pub fn select_note_inclusion_proofs( let batch_index = row.get(2)?; let note_index = row.get(3)?; - let node_index_in_block = BlockNoteIndex::new(batch_index, note_index)?.leaf_index_value(); + // SAFETY: We can assume the batch and note indices stored in the DB are valid so this + // should never panic. + let node_index_in_block = BlockNoteIndex::new(batch_index, note_index).leaf_index_value(); let merkle_path_data = row.get_ref(4)?.as_blob()?; let merkle_path = MerklePath::read_from_bytes(merkle_path_data)?; diff --git a/crates/store/src/db/tests.rs b/crates/store/src/db/tests.rs index fe8d1684c..62919ea8e 100644 --- a/crates/store/src/db/tests.rs +++ b/crates/store/src/db/tests.rs @@ -175,7 +175,7 @@ fn sql_select_notes() { for i in 0..10 { let note = NoteRecord { block_num, - note_index: BlockNoteIndex::new(0, i as usize).unwrap(), + note_index: BlockNoteIndex::new(0, i as usize), note_id: num_to_rpo_digest(u64::from(i)), metadata: NoteMetadata::new( ACCOUNT_ID_OFF_CHAIN_SENDER.try_into().unwrap(), @@ -215,7 +215,7 @@ fn sql_select_notes_different_execution_hints() { let note_none = NoteRecord { block_num, - note_index: BlockNoteIndex::new(0, 0).unwrap(), + note_index: BlockNoteIndex::new(0, 0), note_id: num_to_rpo_digest(0), metadata: NoteMetadata::new( ACCOUNT_ID_OFF_CHAIN_SENDER.try_into().unwrap(), @@ -239,7 +239,7 @@ fn sql_select_notes_different_execution_hints() { let note_always = NoteRecord { block_num, - note_index: BlockNoteIndex::new(0, 1).unwrap(), + note_index: BlockNoteIndex::new(0, 1), note_id: num_to_rpo_digest(1), metadata: NoteMetadata::new( ACCOUNT_ID_OFF_CHAIN_SENDER.try_into().unwrap(), @@ -263,7 +263,7 @@ fn sql_select_notes_different_execution_hints() { let note_after_block = NoteRecord { block_num, - note_index: BlockNoteIndex::new(0, 2).unwrap(), + note_index: BlockNoteIndex::new(0, 2), note_id: num_to_rpo_digest(2), metadata: NoteMetadata::new( ACCOUNT_ID_OFF_CHAIN_SENDER.try_into().unwrap(), @@ -880,7 +880,7 @@ fn notes() { assert!(res.is_empty()); // test insertion - let note_index = BlockNoteIndex::new(0, 2).unwrap(); + let note_index = BlockNoteIndex::new(0, 2); let note_id = num_to_rpo_digest(3); let tag = 5u32; let sender = AccountId::try_from(ACCOUNT_ID_OFF_CHAIN_SENDER).unwrap(); diff --git a/crates/store/src/errors.rs b/crates/store/src/errors.rs index 2c6fa9e36..aecf56f33 100644 --- a/crates/store/src/errors.rs +++ b/crates/store/src/errors.rs @@ -11,7 +11,7 @@ use miden_objects::{ }, note::Nullifier, transaction::OutputNote, - AccountDeltaError, AccountError, BlockError, NoteError, + AccountDeltaError, AccountError, NoteError, }; use rusqlite::types::FromSqlError; use thiserror::Error; @@ -41,8 +41,9 @@ pub enum DatabaseError { AccountError(#[from] AccountError), #[error("account delta error")] AccountDeltaError(#[from] AccountDeltaError), + // TODO: Check if needed. #[error("block error")] - BlockError(#[from] BlockError), + BlockError, #[error("closed channel")] ClosedChannel(#[from] RecvError), #[error("deserialization failed")] @@ -136,8 +137,9 @@ pub enum GenesisError { // --------------------------------------------------------------------------------------------- #[error("database error")] DatabaseError(#[from] DatabaseError), + // TODO: Check if needed. #[error("block error")] - BlockError(#[from] BlockError), + BlockError, #[error("merkle error")] MerkleError(#[from] MerkleError), #[error("failed to deserialize genesis file")] diff --git a/crates/store/src/genesis.rs b/crates/store/src/genesis.rs index de128879c..90783f503 100644 --- a/crates/store/src/genesis.rs +++ b/crates/store/src/genesis.rs @@ -1,7 +1,7 @@ use miden_lib::transaction::TransactionKernel; use miden_objects::{ account::{delta::AccountUpdateDetails, Account}, - block::{Block, BlockAccountUpdate, BlockHeader, BlockNumber}, + block::{BlockAccountUpdate, BlockHeader, BlockNumber, ProvenBlock}, crypto::merkle::{EmptySubtreeRoots, MmrPeaks, SimpleSmt, Smt}, utils::serde::{ByteReader, ByteWriter, Deserializable, DeserializationError, Serializable}, Digest, ACCOUNT_TREE_DEPTH, BLOCK_NOTE_TREE_DEPTH, @@ -26,7 +26,7 @@ impl GenesisState { } /// Returns the block header and the account SMT - pub fn into_block(self) -> Result { + pub fn into_block(self) -> Result { let accounts: Vec = self .accounts .iter() @@ -65,7 +65,7 @@ impl GenesisState { self.timestamp, ); - Block::new(header, accounts, vec![], vec![]).map_err(Into::into) + Ok(ProvenBlock::new_unchecked(header, accounts, vec![], vec![])) } } diff --git a/crates/store/src/server/api.rs b/crates/store/src/server/api.rs index 24cde86a5..0064668e6 100644 --- a/crates/store/src/server/api.rs +++ b/crates/store/src/server/api.rs @@ -29,7 +29,7 @@ use miden_node_proto::{ }; use miden_objects::{ account::AccountId, - block::{Block, BlockNumber}, + block::{BlockNumber, ProvenBlock}, crypto::hash::rpo::RpoDigest, note::{NoteId, Nullifier}, utils::{Deserializable, Serializable}, @@ -315,7 +315,7 @@ impl api_server::Api for StoreApi { debug!(target: COMPONENT, ?request); - let block = Block::read_from_bytes(&request.block).map_err(|err| { + let block = ProvenBlock::read_from_bytes(&request.block).map_err(|err| { Status::invalid_argument(format!("Block deserialization error: {err}")) })?; @@ -326,8 +326,8 @@ impl api_server::Api for StoreApi { block_num, block_hash = %block.hash(), account_count = block.updated_accounts().len(), - note_count = block.notes().count(), - nullifier_count = block.nullifiers().len(), + note_count = block.output_notes().count(), + nullifier_count = block.created_nullifiers().len(), ); self.state.apply_block(block).await?; diff --git a/crates/store/src/state.rs b/crates/store/src/state.rs index b08348e3a..17ec2cf45 100644 --- a/crates/store/src/state.rs +++ b/crates/store/src/state.rs @@ -25,7 +25,7 @@ use miden_node_proto::{ use miden_node_utils::formatting::format_array; use miden_objects::{ account::{AccountDelta, AccountHeader, AccountId, StorageSlot}, - block::{Block, BlockHeader, BlockNumber}, + block::{BlockHeader, BlockNumber, ProvenBlock}, crypto::{ hash::rpo::RpoDigest, merkle::{ @@ -268,12 +268,12 @@ impl State { /// released. // TODO: This span is logged in a root span, we should connect it to the parent span. #[instrument(target = COMPONENT, skip_all, err)] - pub async fn apply_block(&self, block: Block) -> Result<(), ApplyBlockError> { + pub async fn apply_block(&self, block: ProvenBlock) -> Result<(), ApplyBlockError> { let _lock = self.writer.try_lock().map_err(|_| ApplyBlockError::ConcurrentWrite)?; let header = block.header(); - let tx_hash = block.compute_tx_hash(); + let tx_hash = BlockHeader::compute_tx_commitment(block.transactions()); if header.tx_hash() != tx_hash { return Err(InvalidBlockError::InvalidBlockTxHash { expected: tx_hash, @@ -324,7 +324,7 @@ impl State { // nullifiers can be produced only once let duplicate_nullifiers: Vec<_> = block - .nullifiers() + .created_nullifiers() .iter() .filter(|&n| inner.nullifier_tree.get_block_num(n).is_some()) .copied() @@ -343,7 +343,7 @@ impl State { // compute update for nullifier tree let nullifier_tree_update = inner.nullifier_tree.compute_mutations( - block.nullifiers().iter().map(|nullifier| (*nullifier, block_num)), + block.created_nullifiers().iter().map(|nullifier| (*nullifier, block_num)), ); if nullifier_tree_update.root() != header.nullifier_root() { @@ -373,13 +373,13 @@ impl State { }; // build note tree - let note_tree = block.build_note_tree(); + let note_tree = block.build_output_note_tree(); if note_tree.root() != header.note_root() { return Err(InvalidBlockError::NewBlockInvalidNoteRoot.into()); } let notes = block - .notes() + .output_notes() .map(|(note_index, note)| { let details = match note { OutputNote::Full(note) => Some(note.to_bytes()), From 1a6ad2cbd299246dc75b208e6be7e397d09c2b4e Mon Sep 17 00:00:00 2001 From: Philipp Gackstatter Date: Thu, 20 Feb 2025 15:06:18 +0100 Subject: [PATCH 03/32] feat: Update `get_block_inputs` --- Cargo.lock | 20 +- crates/block-producer/Cargo.toml | 1 + crates/block-producer/src/block.rs | 106 -- .../block-producer/src/block_builder/mod.rs | 194 ++-- .../prover/asm/block_kernel.masm | 244 ----- .../src/block_builder/prover/block_witness.rs | 328 ------ .../src/block_builder/prover/mod.rs | 128 --- .../src/block_builder/prover/tests.rs | 939 ------------------ crates/block-producer/src/errors.rs | 45 +- crates/block-producer/src/lib.rs | 1 - crates/block-producer/src/store/mod.rs | 14 +- crates/block-producer/src/test_utils/block.rs | 55 +- crates/block-producer/src/test_utils/store.rs | 129 ++- crates/proto/src/domain/account.rs | 34 +- crates/proto/src/domain/block.rs | 107 +- crates/proto/src/domain/nullifier.rs | 22 +- crates/proto/src/generated/requests.rs | 17 +- crates/proto/src/generated/responses.rs | 44 +- crates/proto/src/lib.rs | 4 +- crates/rpc-proto/proto/requests.proto | 19 +- crates/rpc-proto/proto/responses.proto | 36 +- crates/store/src/errors.rs | 4 + crates/store/src/server/api.rs | 12 +- crates/store/src/state.rs | 169 ++-- proto/requests.proto | 19 +- proto/responses.proto | 36 +- 26 files changed, 545 insertions(+), 2182 deletions(-) delete mode 100644 crates/block-producer/src/block.rs delete mode 100644 crates/block-producer/src/block_builder/prover/asm/block_kernel.masm delete mode 100644 crates/block-producer/src/block_builder/prover/block_witness.rs delete mode 100644 crates/block-producer/src/block_builder/prover/mod.rs delete mode 100644 crates/block-producer/src/block_builder/prover/tests.rs diff --git a/Cargo.lock b/Cargo.lock index ae688eaca..e2ae127ad 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -837,7 +837,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "33d852cb9b869c2a9b3df2f71a3074817f01e1844f839a144f5fcef059a4eb5d" dependencies = [ "libc", - "windows-sys 0.59.0", + "windows-sys 0.52.0", ] [[package]] @@ -1703,6 +1703,17 @@ dependencies = [ "unicode-width 0.2.0", ] +[[package]] +name = "miden-block-prover" +version = "0.8.0" +source = "git+https://github.com/0xPolygonMiden/miden-base?branch=pgackst-batch-expiration#fa7b5b1728f22d0c1b6264d62d96667002dbdb50" +dependencies = [ + "miden-crypto", + "miden-lib", + "miden-objects", + "thiserror 2.0.11", +] + [[package]] name = "miden-core" version = "0.12.0" @@ -1867,6 +1878,7 @@ dependencies = [ "futures", "itertools 0.14.0", "miden-air", + "miden-block-prover", "miden-lib", "miden-node-proto", "miden-node-test-macro", @@ -2918,7 +2930,7 @@ dependencies = [ "errno", "libc", "linux-raw-sys", - "windows-sys 0.59.0", + "windows-sys 0.52.0", ] [[package]] @@ -3322,7 +3334,7 @@ dependencies = [ "getrandom 0.3.1", "once_cell", "rustix", - "windows-sys 0.59.0", + "windows-sys 0.52.0", ] [[package]] @@ -4131,7 +4143,7 @@ version = "0.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cf221c93e13a30d793f7645a0e7762c55d169dbb0a49671918a2319d289b10bb" dependencies = [ - "windows-sys 0.59.0", + "windows-sys 0.52.0", ] [[package]] diff --git a/crates/block-producer/Cargo.toml b/crates/block-producer/Cargo.toml index 89a3f0cbc..2b904e44d 100644 --- a/crates/block-producer/Cargo.toml +++ b/crates/block-producer/Cargo.toml @@ -21,6 +21,7 @@ tracing-forest = ["miden-node-utils/tracing-forest"] async-trait = { version = "0.1" } futures = { version = "0.3" } itertools = { workspace = true } +miden-block-prover = { git = "https://github.com/0xPolygonMiden/miden-base.git", branch = "pgackst-batch-expiration" } miden-lib = { workspace = true } miden-node-proto = { workspace = true } miden-node-utils = { workspace = true } diff --git a/crates/block-producer/src/block.rs b/crates/block-producer/src/block.rs deleted file mode 100644 index 857a5b018..000000000 --- a/crates/block-producer/src/block.rs +++ /dev/null @@ -1,106 +0,0 @@ -use std::collections::BTreeMap; - -use miden_node_proto::{ - domain::note::NoteAuthenticationInfo, - errors::{ConversionError, MissingFieldHelper}, - generated::responses::GetBlockInputsResponse, - AccountInputRecord, NullifierWitness, -}; -use miden_objects::{ - account::AccountId, - block::BlockHeader, - crypto::merkle::{MerklePath, MmrPeaks, SmtProof}, - note::Nullifier, - Digest, -}; - -// BLOCK INPUTS -// ================================================================================================ - -/// Information needed from the store to build a block -#[derive(Clone, Debug)] -pub struct BlockInputs { - /// Previous block header - pub block_header: BlockHeader, - - /// MMR peaks for the current chain state - pub chain_peaks: MmrPeaks, - - /// The hashes of the requested accounts and their authentication paths - pub accounts: BTreeMap, - - /// The requested nullifiers and their authentication paths - pub nullifiers: BTreeMap, - - /// List of unauthenticated notes found in the store - pub found_unauthenticated_notes: NoteAuthenticationInfo, -} - -#[derive(Clone, Debug, Default)] -pub struct AccountWitness { - pub hash: Digest, - pub proof: MerklePath, -} - -impl TryFrom for BlockInputs { - type Error = ConversionError; - - fn try_from(response: GetBlockInputsResponse) -> Result { - let block_header: BlockHeader = response - .block_header - .ok_or(miden_node_proto::generated::block::BlockHeader::missing_field("block_header"))? - .try_into()?; - - let chain_peaks = { - // setting the number of leaves to the current block number gives us one leaf less than - // what is currently in the chain MMR (i.e., chain MMR with block_num = 1 has 2 leave); - // this is because GetBlockInputs returns the state of the chain MMR as of one block - // ago so that block_header.chain_root matches the hash of MMR peaks. - let num_leaves = block_header.block_num().as_usize(); - - MmrPeaks::new( - num_leaves, - response - .mmr_peaks - .into_iter() - .map(TryInto::try_into) - .collect::>()?, - )? - }; - - let accounts = response - .account_states - .into_iter() - .map(|entry| { - let domain: AccountInputRecord = entry.try_into()?; - let witness = AccountWitness { - hash: domain.account_hash, - proof: domain.proof, - }; - Ok((domain.account_id, witness)) - }) - .collect::, ConversionError>>()?; - - let nullifiers = response - .nullifiers - .into_iter() - .map(|entry| { - let witness: NullifierWitness = entry.try_into()?; - Ok((witness.nullifier, witness.proof)) - }) - .collect::, ConversionError>>()?; - - let found_unauthenticated_notes = response - .found_unauthenticated_notes - .ok_or(GetBlockInputsResponse::missing_field("found_authenticated_notes"))? - .try_into()?; - - Ok(Self { - block_header, - chain_peaks, - accounts, - nullifiers, - found_unauthenticated_notes, - }) - } -} diff --git a/crates/block-producer/src/block_builder/mod.rs b/crates/block-producer/src/block_builder/mod.rs index 738260312..acaecf7fe 100644 --- a/crates/block-producer/src/block_builder/mod.rs +++ b/crates/block-producer/src/block_builder/mod.rs @@ -1,30 +1,23 @@ -use std::{ - collections::BTreeSet, - ops::{Add, Range}, -}; +use std::ops::Range; use futures::FutureExt; +use miden_block_prover::LocalBlockProver; use miden_node_utils::tracing::OpenTelemetrySpanExt; use miden_objects::{ - account::AccountId, batch::ProvenBatch, - block::{BlockNumber, ProvenBlock}, - note::{NoteHeader, NoteId, Nullifier}, - transaction::{InputNoteCommitment, OutputNote}, + block::{BlockInputs, BlockNumber, ProposedBlock, ProvenBlock}, + note::NoteHeader, + MIN_PROOF_SECURITY_LEVEL, }; use rand::Rng; use tokio::time::Duration; use tracing::{instrument, Span}; use crate::{ - block::BlockInputs, errors::BuildBlockError, mempool::SharedMempool, store::StoreClient, - COMPONENT, SERVER_BLOCK_FREQUENCY, + errors::BuildBlockError, mempool::SharedMempool, store::StoreClient, COMPONENT, + SERVER_BLOCK_FREQUENCY, }; -pub(crate) mod prover; - -use self::prover::{block_witness::BlockWitness, BlockProver}; - // BLOCK BUILDER // ================================================================================================= @@ -39,7 +32,9 @@ pub struct BlockBuilder { pub failure_rate: f64, pub store: StoreClient, - pub block_kernel: BlockProver, + + /// The prover used to prove a proposed block into a proven block. + pub block_prover: LocalBlockProver, } impl BlockBuilder { @@ -49,7 +44,7 @@ impl BlockBuilder { // Note: The range cannot be empty. simulated_proof_time: Duration::ZERO..Duration::from_millis(1), failure_rate: 0.0, - block_kernel: BlockProver::new(), + block_prover: LocalBlockProver::new(MIN_PROOF_SECURITY_LEVEL), store, } } @@ -98,9 +93,9 @@ impl BlockBuilder { Self::select_block(mempool) .inspect(SelectedBlock::inject_telemetry) .then(|selected| self.get_block_inputs(selected)) - .inspect_ok(BlockSummaryAndInputs::inject_telemetry) + .inspect_ok(BlockBatchesAndInputs::inject_telemetry) .and_then(|inputs| self.prove_block(inputs)) - .inspect_ok(ProvenBlockWrapper::inject_telemetry) + .inspect_ok(BuiltBlock::inject_telemetry) // Failure must be injected before the final pipeline stage i.e. before commit is called. The system cannot // handle errors after it considers the process complete (which makes sense). .and_then(|proven_block| async { self.inject_failure(proven_block) }) @@ -119,68 +114,91 @@ impl BlockBuilder { SelectedBlock { block_number, batches } } + /// Fetches block inputs from the store for the [`SelectedBlock`]. + /// + /// For a given set of batches, we need to get the following block inputs from the store: + /// + /// - Note inclusion proofs for unauthenticated notes (not required to be complete due to the + /// possibility of note erasure) + /// - A chain MMR with: + /// - All blocks referenced by batches + /// - All blocks referenced by note inclusion proofs + /// - Account witnesses for all accounts updated in the block + /// - Nullifier witnesses for all nullifiers created in the block + /// - Due to note erasure the set of nullifiers the block creates it not necessarily equal to + /// the union of sets of all nullifiers created in proven batches. However, since we don't + /// yet know which nullifiers the block will actually create, we fetch witnesses for all + /// nullifiers created by batches. If we knew that a certain note will be erased, we would + /// not have to supply a nullifier witness for it. #[instrument(target = COMPONENT, name = "block_builder.get_block_inputs", skip_all, err)] async fn get_block_inputs( &self, selected_block: SelectedBlock, - ) -> Result { + ) -> Result { let SelectedBlock { block_number: _, batches } = selected_block; - let summary = BlockSummary::summarize_batches(&batches); + + let batch_iter = batches.iter(); + + let unauthenticated_notes_iter = batch_iter.clone().flat_map(|batch| { + // Note: .cloned() shouldn't be necessary but not having it produces an odd lifetime + // error in BlockProducer::serve. Not sure if there's a better fix. Error: + // implementation of `FnOnce` is not general enough + // closure with signature `fn(&InputNoteCommitment) -> miden_objects::note::NoteId` must + // implement `FnOnce<(&InputNoteCommitment,)>` ...but it actually implements + // `FnOnce<(&InputNoteCommitment,)>` + batch + .input_notes() + .iter() + .cloned() + .filter_map(|note| note.header().map(NoteHeader::id)) + }); + let block_references_iter = batch_iter.clone().map(ProvenBatch::reference_block_num); + let account_ids = batch_iter.clone().flat_map(ProvenBatch::updated_accounts); + let created_nullifiers = batch_iter.flat_map(ProvenBatch::produced_nullifiers); let inputs = self .store .get_block_inputs( - summary.updated_accounts.iter().copied(), - summary.nullifiers.iter(), - summary.dangling_notes.iter(), + account_ids, + created_nullifiers, + unauthenticated_notes_iter, + block_references_iter, ) .await .map_err(BuildBlockError::GetBlockInputsFailed)?; - let missing_notes: Vec<_> = summary - .dangling_notes - .difference(&inputs.found_unauthenticated_notes.note_ids()) - .copied() - .collect(); - if !missing_notes.is_empty() { - return Err(BuildBlockError::UnauthenticatedNotesNotFound(missing_notes)); - } - - Ok(BlockSummaryAndInputs { batches, summary, inputs }) + Ok(BlockBatchesAndInputs { batches, inputs }) } #[instrument(target = COMPONENT, name = "block_builder.prove_block", skip_all, err)] async fn prove_block( &self, - preimage: BlockSummaryAndInputs, - ) -> Result { - let BlockSummaryAndInputs { batches, summary, inputs } = preimage; + preimage: BlockBatchesAndInputs, + ) -> Result { + let BlockBatchesAndInputs { batches, inputs } = preimage; - let (block_header_witness, updated_accounts) = BlockWitness::new(inputs, &batches)?; + // Question: Should we split proposing and proving in two stages for telemetry reasons? + let proposed_block = + ProposedBlock::new(inputs, batches).map_err(BuildBlockError::ProposeBlockFailed)?; - let new_block_header = self.block_kernel.prove(block_header_witness)?; - - // TODO: Update. Temporarily left in an incorrect state. - let block = ProvenBlock::new_unchecked( - new_block_header, - updated_accounts, - vec![], - summary.nullifiers, - ); + let proven_block = self + .block_prover + .prove(proposed_block) + .map_err(BuildBlockError::ProveBlockFailed)?; self.simulate_proving().await; - Ok(ProvenBlockWrapper { block }) + Ok(BuiltBlock { block: proven_block }) } #[instrument(target = COMPONENT, name = "block_builder.commit_block", skip_all, err)] async fn commit_block( &self, mempool: &SharedMempool, - proven_block: ProvenBlockWrapper, + built_block: BuiltBlock, ) -> Result<(), BuildBlockError> { self.store - .apply_block(&proven_block.block) + .apply_block(&built_block.block) .await .map_err(BuildBlockError::StoreApplyBlockFailed)?; @@ -220,65 +238,16 @@ impl BlockBuilder { } } -struct BlockSummary { - updated_accounts: BTreeSet, - nullifiers: Vec, - output_notes: Vec>, - dangling_notes: BTreeSet, -} - -impl BlockSummary { - #[instrument(target = COMPONENT, name = "block_builder.summarize_batches", skip_all)] - fn summarize_batches(batches: &[ProvenBatch]) -> Self { - let updated_accounts: BTreeSet = batches - .iter() - .flat_map(ProvenBatch::account_updates) - .map(|(account_id, _)| *account_id) - .collect(); - - let output_notes: Vec<_> = - batches.iter().map(|batch| batch.output_notes().to_vec()).collect(); - - let nullifiers: Vec = - batches.iter().flat_map(ProvenBatch::produced_nullifiers).collect(); - - // Populate set of output notes from all batches - let output_notes_set: BTreeSet<_> = output_notes - .iter() - .flat_map(|output_notes| output_notes.iter().map(OutputNote::id)) - .collect(); - - // Build a set of unauthenticated input notes for this block which do not have a - // matching output note produced in this block - let dangling_notes: BTreeSet<_> = batches - .iter() - .flat_map(ProvenBatch::input_notes) - .filter_map(InputNoteCommitment::header) - .map(NoteHeader::id) - .filter(|note_id| !output_notes_set.contains(note_id)) - .collect(); - - Self { - updated_accounts, - nullifiers, - output_notes, - dangling_notes, - } - } -} - struct SelectedBlock { block_number: BlockNumber, batches: Vec, } -struct BlockSummaryAndInputs { +struct BlockBatchesAndInputs { batches: Vec, - summary: BlockSummary, inputs: BlockInputs, } -// TODO: Is this still needed? If so, what should be its name? -struct ProvenBlockWrapper { +struct BuiltBlock { block: ProvenBlock, } @@ -290,34 +259,30 @@ impl SelectedBlock { } } -impl BlockSummaryAndInputs { +impl BlockBatchesAndInputs { fn inject_telemetry(&self) { let span = Span::current(); // SAFETY: We do not expect to have more than u32::MAX of any count per block. span.set_attribute( "block.updated_accounts.count", - i64::try_from(self.summary.updated_accounts.len()) + i64::try_from(self.inputs.account_witnesses().len()) .expect("less than u32::MAX account updates"), ); - span.set_attribute( - "block.output_notes.count", - i64::try_from(self.summary.output_notes.iter().fold(0, |acc, x| acc.add(x.len()))) - .expect("less than u32::MAX output notes"), - ); span.set_attribute( "block.nullifiers.count", - i64::try_from(self.summary.nullifiers.len()).expect("less than u32::MAX nullifiers"), + i64::try_from(self.inputs.nullifier_witnesses().len()) + .expect("less than u32::MAX nullifiers"), ); span.set_attribute( - "block.dangling_notes.count", - i64::try_from(self.summary.dangling_notes.len()) + "block.unauthenticated_notes.count", + i64::try_from(self.inputs.unauthenticated_note_proofs().len()) .expect("less than u32::MAX dangling notes"), ); } } -impl ProvenBlockWrapper { +impl BuiltBlock { fn inject_telemetry(&self) { let span = Span::current(); let header = self.block.header(); @@ -328,6 +293,13 @@ impl ProvenBlockWrapper { span.set_attribute("block.protocol.version", i64::from(header.version())); + // Question: Should this be here? (Moved here because output notes are no longer) + span.set_attribute( + "block.output_notes.count", + i64::try_from(self.block.output_notes().count()) + .expect("less than u32::MAX output notes"), + ); + span.set_attribute("block.commitments.kernel", header.kernel_root()); span.set_attribute("block.commitments.nullifier", header.nullifier_root()); span.set_attribute("block.commitments.account", header.account_root()); diff --git a/crates/block-producer/src/block_builder/prover/asm/block_kernel.masm b/crates/block-producer/src/block_builder/prover/asm/block_kernel.masm deleted file mode 100644 index 309501d59..000000000 --- a/crates/block-producer/src/block_builder/prover/asm/block_kernel.masm +++ /dev/null @@ -1,244 +0,0 @@ -#! Note: For now, the "block kernel" only computes the account root. Eventually, it will compute -#! the entire block header. -#! -#! Stack inputs: [num_accounts_updated, OLD_ACCOUNT_ROOT, NEW_ACCOUNT_HASH_0, account_id_0, ... , -#! NEW_ACCOUNT_HASH_n, account_id_n] - -use.std::collections::smt -use.std::collections::mmr -use.std::sys - -const.ACCOUNT_TREE_DEPTH=64 -const.BLOCK_NOTES_BATCH_TREE_DEPTH=6 -const.CHAIN_MMR_PTR=1000 - -#! Compute the account root -#! -#! Inputs: -#! Operand stack: [] -#! Advice stack: [num_accounts_updated, OLD_ACCOUNT_ROOT, [NEW_ACCOUNT_HASH_i, account_id_i]] -#! Outputs: -#! Operand stack: [NEW_ACCOUNT_ROOT] -proc.compute_account_root - # move the number of updated accounts and an old account root to the operand stack - adv_push.5 - # OS => [OLD_ACCOUNT_ROOT, num_accounts_updated] - # AS => [[NEW_ACCOUNT_HASH_i, account_id_i]] - - # assess if we should loop - dup.4 neq.0 - # OS => [flag, OLD_ACCOUNT_ROOT, num_accounts_updated] - # AS => [[NEW_ACCOUNT_HASH_i, account_id_i]] - - while.true - # num_accounts_updated here serves as a counter, so rename it accordingly - # old account root will be updated in each iteration, so rename it to the ROOT_i - # OS => [ROOT_i, counter] - # AS => [[NEW_ACCOUNT_HASH_i, account_id_i]] - - # move the account hash to the operand stack and move it below the root - adv_push.4 swapw - # OS => [ROOT_i, NEW_ACCOUNT_HASH_i, counter] - # AS => [account_id_i, [NEW_ACCOUNT_HASH_{i+1}, account_id_{i+1}]] - - # move the account id to the operand stack, push the account tree depth - adv_push.1 push.ACCOUNT_TREE_DEPTH - # OS => [account_tree_depth, account_id_i, ROOT_i, NEW_ACCOUNT_HASH_i, counter] - # AS => [[NEW_ACCOUNT_HASH_{i+1}, account_id_{i+1}]] - - # set new value in SMT - mtree_set dropw - # OS => [ROOT_{i+1}, counter] - # AS => [[NEW_ACCOUNT_HASH_{i+1}, account_id_{i+1}]] - - # loop counter - movup.4 sub.1 dup movdn.5 neq.0 - # OS => [flag, ROOT_{i+1}, counter] - # AS => [[NEW_ACCOUNT_HASH_{i+1}, account_id_{i+1}]] - end - - # drop the counter - movup.4 drop - # OS => [ROOT_{n-1}] - # AS => [] -end - -#! Compute the note root. -#! -#! Each batch contains a tree of depth 10 for its created notes. The block's created notes tree is -#! created by aggregating up to 2^6 tree roots coming from the batches contained in the block. -#! -#! `SMT_EMPTY_ROOT` must be `E16`, the root of the empty tree of depth 16. If less than 2^6 batches -#! are contained in the block, `E10` is used as the padding value; this is derived from the fact -#! that `SMT_EMPTY_ROOT` is `E16`, and that our tree has depth 6. -#! -#! Inputs: -#! Operand stack: [] -#! Advice stack: [num_notes_updated, SMT_EMPTY_ROOT, [BATCH_NOTE_TREE_ROOT_i, batch_note_root_idx_i]] -#! Outputs: -#! Operand stack: [NOTES_ROOT] -proc.compute_note_root - # move the number of updated notes and empty root to the operand stack - adv_push.5 - # OS => [SMT_EMPTY_ROOT, num_notes_updated] - # AS => [[BATCH_NOTE_TREE_ROOT_i, batch_note_root_idx_i]] - - # assess if we should loop - dup.4 neq.0 - # OS => [flag, SMT_EMPTY_ROOT, num_notes_updated] - # AS => [[BATCH_NOTE_TREE_ROOT_i, batch_note_root_idx_i]] - - while.true - # num_notes_updated here serves as a counter, so rename it accordingly - # empty root will be updated in each iteration, so rename it to the ROOT_i - # OS => [ROOT_i, counter] - # AS => [[BATCH_NOTE_TREE_ROOT_i, batch_note_root_idx_i]] - - # move the batch note tree root to the operand stack and move it below the root - adv_push.4 swapw - # OS => [ROOT_i, BATCH_NOTE_TREE_ROOT_i, counter] - # AS => [batch_note_root_idx_i, [BATCH_NOTE_TREE_ROOT_{i+1}, batch_note_root_idx_{i+1}]] - - # move the batch note root index to the operand stack, push the block notes batch tree depth - adv_push.1 push.BLOCK_NOTES_BATCH_TREE_DEPTH - # OS => [batch_tree_depth, batch_note_root_idx_i, ROOT_i, BATCH_NOTE_TREE_ROOT_i, counter] - # AS => [[BATCH_NOTE_TREE_ROOT_{i+1}, batch_note_root_idx_{i+1}]] - - # set new value in SMT - mtree_set dropw - # OS => [ROOT_{i+1}, counter] - # AS => [[BATCH_NOTE_TREE_ROOT_{i+1}, batch_note_root_idx_{i+1}]] - - # loop counter - movup.4 sub.1 dup movdn.5 neq.0 - # OS => [flag, ROOT_{i+1}, counter] - # AS => [[BATCH_NOTE_TREE_ROOT_{i+1}, batch_note_root_idx_{i+1}]] - end - - # drop the counter - movup.4 drop - # OS => [ROOT_{n-1}] - # AS => [] -end - -#! Compute the nullifier root. -#! -#! Inputs: -#! Operand stack: [] -#! Advice stack: [num_produced_nullifiers, OLD_NULLIFIER_ROOT, NULLIFIER_VALUE, [NULLIFIER_i]] -#! Outputs: -#! Operand stack: [NULLIFIER_ROOT] -proc.compute_nullifier_root - # move the number of produced nullifiers, old root and nullifier value to the operand stack; - # move nullifier value below the root - adv_push.9 swapw - # OS => [OLD_NULLIFIER_ROOT, NULLIFIER_VALUE, num_produced_nullifiers] - # AS => [[NULLIFIER_i]] - - # assess if we should loop - dup.8 neq.0 - # OS => [flag, OLD_NULLIFIER_ROOT, NULLIFIER_VALUE, num_produced_nullifiers] - # AS => [[NULLIFIER_i]] - - while.true - # num_produced_nullifiers here serves as a counter, so rename it accordingly - # old nullifier root will be updated in each iteration, so rename it to the ROOT_i - # OS => [ROOT_i, NULLIFIER_VALUE, counter] - # AS => [[NULLIFIER_i]] - - # move the nullifier hash to the operand stack - adv_push.4 - # OS => [NULLIFIER_i, ROOT_i, NULLIFIER_VALUE, counter] - # AS => [[NULLIFIER_{i+1}]] - - # dup the nullifier value - dupw.2 - # OS => [NULLIFIER_VALUE, NULLIFIER_i, ROOT_i, NULLIFIER_VALUE, counter] - # AS => [[NULLIFIER_{i+1}]] - - exec.smt::set - # OS => [OLD_VALUE, ROOT_{i+1}, NULLIFIER_VALUE, counter] - # AS => [[NULLIFIER_{i+1}]] - - # Check that OLD_VALUE == 0 (i.e. that nullifier was indeed not previously produced) - assertz assertz assertz assertz - # OS => [ROOT_{i+1}, NULLIFIER_VALUE, counter] - # AS => [[NULLIFIER_{i+1}]] - - # loop counter - movup.8 sub.1 dup movdn.9 neq.0 - # OS => [flag, ROOT_{i+1}, NULLIFIER_VALUE, counter] - # AS => [[NULLIFIER_{i+1}]] - end - - # drop the counter and the nullifier value - swapw dropw movup.4 drop - # OS => [ROOT_{n-1}] - # AS => [] -end - -#! Compute the chain MMR root -#! -#! Inputs: -#! Operand stack: [] -#! Advice stack: [PREV_BLOCK_HASH_TO_INSERT, PREV_CHAIN_MMR_HASH] -#! Advice map: { -#! PREV_CHAIN_MMR_HASH: [NUM_LEAVES, [peak_i], ] -#! } -#! Outputs: -#! Operand stack: [CHAIN_MMR_ROOT] -proc.compute_chain_mmr_root - # move the previous block hash and chain MMR hash to the operand stack - adv_push.8 - # OS => [PREV_CHAIN_MMR_HASH, PREV_BLOCK_HASH_TO_INSERT] - # AS => [] - - # push chain MMR pointer to the operand stack - push.CHAIN_MMR_PTR movdn.4 - # OS => [PREV_CHAIN_MMR_HASH, chain_mmr_ptr, PREV_BLOCK_HASH_TO_INSERT] - - # load the chain MMR (as of previous block) at memory location CHAIN_MMR_PTR - exec.mmr::unpack - # OS => [PREV_BLOCK_HASH_TO_INSERT] - - # push chain MMR pointer to the operand stack - push.CHAIN_MMR_PTR movdn.4 - # OS => [PREV_BLOCK_HASH_TO_INSERT, chain_mmr_ptr] - - # add PREV_BLOCK_HASH_TO_INSERT to chain MMR - exec.mmr::add - # OS => [] - - # Compute new MMR root - push.CHAIN_MMR_PTR exec.mmr::pack - # OS => [CHAIN_MMR_ROOT] -end - -#! Inputs: -#! Operand stack: [] -#! Advice stack: [, , , ] -#! Advice map: { -#! PREV_CHAIN_MMR_HASH: [NUM_LEAVES, [peak_i], ] -#! } -#! Outputs: -#! Operand stack: [ACCOUNT_ROOT, NOTE_ROOT, NULLIFIER_ROOT, CHAIN_MMR_ROOT] -begin - exec.compute_account_root mem_storew.0 dropw - # => [, , ] - - exec.compute_note_root mem_storew.4 dropw - # => [, ] - - exec.compute_nullifier_root mem_storew.8 dropw - # => [] - - exec.compute_chain_mmr_root - # => [CHAIN_MMR_ROOT] - - # Load output on stack - padw mem_loadw.8 padw mem_loadw.4 padw mem_loadw.0 - # => [ACCOUNT_ROOT, NOTE_ROOT, NULLIFIER_ROOT, CHAIN_MMR_ROOT] - - # truncate the stack - exec.sys::truncate_stack -end diff --git a/crates/block-producer/src/block_builder/prover/block_witness.rs b/crates/block-producer/src/block_builder/prover/block_witness.rs deleted file mode 100644 index eac670ad4..000000000 --- a/crates/block-producer/src/block_builder/prover/block_witness.rs +++ /dev/null @@ -1,328 +0,0 @@ -use std::collections::{BTreeMap, BTreeSet}; - -use miden_objects::{ - account::{delta::AccountUpdateDetails, AccountId}, - batch::{BatchAccountUpdate, BatchNoteTree, ProvenBatch}, - block::{BlockAccountUpdate, BlockHeader}, - crypto::merkle::{EmptySubtreeRoots, MerklePath, MerkleStore, MmrPeaks, SmtProof}, - note::Nullifier, - transaction::TransactionId, - vm::{AdviceInputs, StackInputs}, - Digest, Felt, BLOCK_NOTE_TREE_DEPTH, MAX_BATCHES_PER_BLOCK, ZERO, -}; - -use crate::{ - block::BlockInputs, - errors::{BlockProverError, BuildBlockError}, -}; - -// BLOCK WITNESS -// ================================================================================================= - -/// Provides inputs to the `BlockKernel` so that it can generate the new header. -#[derive(Debug, PartialEq)] -pub struct BlockWitness { - pub(super) updated_accounts: Vec<(AccountId, AccountUpdateWitness)>, - /// (`batch_index`, `created_notes_root`) for batches that contain notes - pub(super) batch_created_notes_roots: BTreeMap, - pub(super) produced_nullifiers: BTreeMap, - pub(super) chain_peaks: MmrPeaks, - pub(super) prev_header: BlockHeader, -} - -impl BlockWitness { - pub fn new( - mut block_inputs: BlockInputs, - batches: &[ProvenBatch], - ) -> Result<(Self, Vec), BuildBlockError> { - // This limit should be enforced by the mempool. - assert!(batches.len() <= MAX_BATCHES_PER_BLOCK); - - Self::validate_nullifiers(&block_inputs, batches)?; - - let batch_created_notes_roots = batches - .iter() - .enumerate() - .filter(|(_, batch)| !batch.output_notes().is_empty()) - .map(|(batch_index, batch)| { - let batch_note_tree = BatchNoteTree::with_contiguous_leaves( - batch.output_notes().iter().map(|note| (note.id(), note.metadata())), - ) - .expect("number of output notes in batch should be within the allowed range"); - (batch_index, batch_note_tree.root()) - }) - .collect(); - - // Order account updates by account ID and each update's initial state hash. - // - // This let's us chronologically order the updates per account across batches. - let mut updated_accounts = - BTreeMap::>::new(); - for (account_id, update) in batches.iter().flat_map(ProvenBatch::account_updates) { - updated_accounts - .entry(*account_id) - .or_default() - .insert(update.initial_state_commitment(), update.clone()); - } - - // Build account witnesses. - let mut account_witnesses = Vec::with_capacity(updated_accounts.len()); - let mut block_updates = Vec::with_capacity(updated_accounts.len()); - - for (account_id, mut updates) in updated_accounts { - let (initial_state_hash, proof) = block_inputs - .accounts - .remove(&account_id) - .map(|witness| (witness.hash, witness.proof)) - .ok_or(BuildBlockError::MissingAccountInput(account_id))?; - - let mut details: Option = None; - - // Chronologically chain updates for this account together using the state hashes to - // link them. - let mut transactions = Vec::new(); - let mut current_hash = initial_state_hash; - while !updates.is_empty() { - let update = updates.remove(¤t_hash).ok_or_else(|| { - BuildBlockError::InconsistentAccountStateTransition( - account_id, - current_hash, - updates.keys().copied().collect(), - ) - })?; - - current_hash = update.final_state_commitment(); - let (update_transactions, update_details) = update.into_parts(); - transactions.extend(update_transactions); - - details = Some(match details { - None => update_details, - Some(details) => details.merge(update_details).map_err(|source| { - BuildBlockError::AccountUpdateError { account_id, source } - })?, - }); - } - - account_witnesses.push(( - account_id, - AccountUpdateWitness { - initial_state_hash, - final_state_hash: current_hash, - proof, - transactions: transactions.clone(), - }, - )); - - block_updates.push(BlockAccountUpdate::new( - account_id, - current_hash, - details.expect("Must be some by now"), - transactions, - )); - } - - if !block_inputs.accounts.is_empty() { - return Err(BuildBlockError::ExtraStoreData( - block_inputs.accounts.keys().copied().collect(), - )); - } - - Ok(( - Self { - updated_accounts: account_witnesses, - batch_created_notes_roots, - produced_nullifiers: block_inputs.nullifiers, - chain_peaks: block_inputs.chain_peaks, - prev_header: block_inputs.block_header, - }, - block_updates, - )) - } - - /// Converts [`BlockWitness`] into inputs to the block kernel program - pub(super) fn into_program_inputs( - self, - ) -> Result<(AdviceInputs, StackInputs), BlockProverError> { - let advice_inputs = self.build_advice_inputs()?; - - Ok((advice_inputs, StackInputs::default())) - } - - /// Returns an iterator over all transactions which affected accounts in the block with - /// corresponding account IDs. - pub(super) fn transactions(&self) -> impl Iterator + '_ { - self.updated_accounts.iter().flat_map(|(account_id, update)| { - update.transactions.iter().map(move |tx_id| (*tx_id, *account_id)) - }) - } - - // HELPERS - // --------------------------------------------------------------------------------------------- - - /// Validates that the nullifiers returned from the store are the same the produced nullifiers - /// in the batches. Note that validation that the value of the nullifiers is `0` will be - /// done in MASM. - fn validate_nullifiers( - block_inputs: &BlockInputs, - batches: &[ProvenBatch], - ) -> Result<(), BuildBlockError> { - let produced_nullifiers_from_store: BTreeSet = - block_inputs.nullifiers.keys().copied().collect(); - - let produced_nullifiers_from_batches: BTreeSet = - batches.iter().flat_map(ProvenBatch::produced_nullifiers).collect(); - - if produced_nullifiers_from_store == produced_nullifiers_from_batches { - Ok(()) - } else { - let differing_nullifiers: Vec = produced_nullifiers_from_store - .symmetric_difference(&produced_nullifiers_from_batches) - .copied() - .collect(); - - Err(BuildBlockError::InconsistentNullifiers(differing_nullifiers)) - } - } - - /// Builds the advice inputs to the block kernel - fn build_advice_inputs(self) -> Result { - let advice_stack = { - let mut advice_stack = Vec::new(); - - // add account stack inputs to the advice stack - { - let mut account_data = Vec::new(); - let mut num_accounts_updated: u64 = 0; - for (idx, (account_id, account_update)) in self.updated_accounts.iter().enumerate() - { - account_data.extend(account_update.final_state_hash); - account_data.push(account_id.prefix().as_felt()); - - let idx = u64::try_from(idx).expect("can't be more than 2^64 - 1 accounts"); - num_accounts_updated = idx + 1; - } - - // append number of accounts updated - advice_stack.push(num_accounts_updated.try_into().expect( - "updated accounts number is greater than or equal to the field modulus", - )); - - // append initial account root - advice_stack.extend(self.prev_header.account_root()); - - // append the updated accounts data - advice_stack.extend(account_data); - } - - // add notes stack inputs to the advice stack - { - // append the number of updated notes - advice_stack - .push(Felt::try_from(self.batch_created_notes_roots.len() as u64).expect( - "notes roots number is greater than or equal to the field modulus", - )); - - // append the empty root - let empty_root = EmptySubtreeRoots::entry(BLOCK_NOTE_TREE_DEPTH, 0); - advice_stack.extend(*empty_root); - - for (batch_index, batch_created_notes_root) in &self.batch_created_notes_roots { - advice_stack.extend(batch_created_notes_root.iter()); - - let batch_index = Felt::try_from(*batch_index as u64) - .expect("batch index is greater than or equal to the field modulus"); - advice_stack.push(batch_index); - } - } - - // Nullifiers stack inputs - { - let num_produced_nullifiers: Felt = (self.produced_nullifiers.len() as u64) - .try_into() - .expect("nullifiers number is greater than or equal to the field modulus"); - - // append number of nullifiers - advice_stack.push(num_produced_nullifiers); - - // append initial nullifier root - advice_stack.extend(self.prev_header.nullifier_root()); - - // append nullifier value (`[block_num, 0, 0, 0]`) - let block_num = self.prev_header.block_num() + 1; - advice_stack.extend([block_num.into(), ZERO, ZERO, ZERO]); - - for nullifier in self.produced_nullifiers.keys() { - advice_stack.extend(nullifier.inner()); - } - } - - // Chain MMR stack inputs - { - advice_stack.extend(self.prev_header.hash()); - advice_stack.extend(self.chain_peaks.hash_peaks()); - } - - advice_stack - }; - - let merkle_store = { - let mut merkle_store = MerkleStore::default(); - - // add accounts merkle paths - merkle_store - .add_merkle_paths(self.updated_accounts.into_iter().map( - |(account_id, AccountUpdateWitness { initial_state_hash, proof, .. })| { - (account_id.prefix().into(), initial_state_hash, proof) - }, - )) - .map_err(BlockProverError::InvalidMerklePaths)?; - - // add nullifiers merkle paths - merkle_store - .add_merkle_paths(self.produced_nullifiers.iter().map(|(nullifier, proof)| { - // Note: the initial value for all nullifiers in the tree is `[0, 0, 0, 0]` - ( - u64::from(nullifier.most_significant_felt()), - Digest::default(), - proof.path().clone(), - ) - })) - .map_err(BlockProverError::InvalidMerklePaths)?; - - merkle_store - }; - - let advice_map: Vec<_> = self - .produced_nullifiers - .values() - .map(|proof| (proof.leaf().hash(), proof.leaf().to_elements())) - .chain(std::iter::once(mmr_peaks_advice_map_key_value(&self.chain_peaks))) - .collect(); - - let advice_inputs = AdviceInputs::default() - .with_merkle_store(merkle_store) - .with_map(advice_map) - .with_stack(advice_stack); - - Ok(advice_inputs) - } -} - -#[derive(Debug, PartialEq, Eq)] -pub(super) struct AccountUpdateWitness { - pub initial_state_hash: Digest, - pub final_state_hash: Digest, - pub proof: MerklePath, - pub transactions: Vec, -} - -// HELPERS -// ================================================================================================= - -// Generates the advice map key/value for Mmr peaks -fn mmr_peaks_advice_map_key_value(peaks: &MmrPeaks) -> (Digest, Vec) { - let mut elements = vec![Felt::new(peaks.num_leaves() as u64), ZERO, ZERO, ZERO]; - elements.extend(peaks.flatten_and_pad_peaks()); - - (peaks.hash_peaks(), elements) -} diff --git a/crates/block-producer/src/block_builder/prover/mod.rs b/crates/block-producer/src/block_builder/prover/mod.rs deleted file mode 100644 index 2f141c352..000000000 --- a/crates/block-producer/src/block_builder/prover/mod.rs +++ /dev/null @@ -1,128 +0,0 @@ -use std::time::{SystemTime, UNIX_EPOCH}; - -use miden_lib::transaction::TransactionKernel; -use miden_objects::{assembly::Assembler, block::BlockHeader, Digest}; -use miden_processor::{execute, DefaultHost, ExecutionOptions, MemAdviceProvider, Program}; -use miden_stdlib::StdLibrary; - -use self::block_witness::BlockWitness; -use crate::errors::{BlockProverError, BuildBlockError}; - -/// The index of the word at which the account root is stored on the output stack. -pub const ACCOUNT_ROOT_WORD_IDX: usize = 0; - -/// The index of the word at which the note root is stored on the output stack. -pub const NOTE_ROOT_WORD_IDX: usize = 4; - -/// The index of the word at which the nullifier root is stored on the output stack. -pub const NULLIFIER_ROOT_WORD_IDX: usize = 8; - -/// The index of the word at which the note root is stored on the output stack. -pub const CHAIN_MMR_ROOT_WORD_IDX: usize = 12; - -pub mod block_witness; - -#[cfg(test)] -mod tests; - -const BLOCK_KERNEL_MASM: &str = include_str!("asm/block_kernel.masm"); - -#[derive(Debug)] -pub(crate) struct BlockProver { - kernel: Program, -} - -impl BlockProver { - pub fn new() -> Self { - let account_program = { - let assembler = Assembler::default() - .with_library(StdLibrary::default()) - .expect("failed to load std-lib"); - - assembler - .assemble_program(BLOCK_KERNEL_MASM) - .expect("failed to load account update program") - }; - - Self { kernel: account_program } - } - - // Note: this will eventually all be done in the VM, and also return an `ExecutionProof` - pub fn prove(&self, witness: BlockWitness) -> Result { - let prev_hash = witness.prev_header.hash(); - let block_num = witness.prev_header.block_num() + 1; - let version = witness.prev_header.version(); - - let tx_hash = BlockHeader::compute_tx_commitment(witness.transactions()); - let (account_root, note_root, nullifier_root, chain_root) = self.compute_roots(witness)?; - - let proof_hash = Digest::default(); - let timestamp = SystemTime::now() - .duration_since(UNIX_EPOCH) - .expect("today is expected to be after 1970") - .as_secs() - .try_into() - .expect("timestamp must fit in a `u32`"); - - Ok(BlockHeader::new( - version, - prev_hash, - block_num, - chain_root, - account_root, - nullifier_root, - note_root, - tx_hash, - TransactionKernel::kernel_root(), - proof_hash, - timestamp, - )) - } - - fn compute_roots( - &self, - witness: BlockWitness, - ) -> Result<(Digest, Digest, Digest, Digest), BlockProverError> { - let (advice_inputs, stack_inputs) = witness.into_program_inputs()?; - let mut host = { - let advice_provider = MemAdviceProvider::from(advice_inputs); - - let mut host = DefaultHost::new(advice_provider); - host.load_mast_forest(StdLibrary::default().mast_forest().clone()) - .expect("failed to load mast forest"); - - host - }; - - let execution_output = - execute(&self.kernel, stack_inputs, &mut host, ExecutionOptions::default()) - .map_err(BlockProverError::ProgramExecutionFailed)?; - - let new_account_root = execution_output - .stack_outputs() - .get_stack_word(ACCOUNT_ROOT_WORD_IDX) - .ok_or(BlockProverError::InvalidRootOutput("account"))?; - - let new_note_root = execution_output - .stack_outputs() - .get_stack_word(NOTE_ROOT_WORD_IDX) - .ok_or(BlockProverError::InvalidRootOutput("note"))?; - - let new_nullifier_root = execution_output - .stack_outputs() - .get_stack_word(NULLIFIER_ROOT_WORD_IDX) - .ok_or(BlockProverError::InvalidRootOutput("nullifier"))?; - - let new_chain_mmr_root = execution_output - .stack_outputs() - .get_stack_word(CHAIN_MMR_ROOT_WORD_IDX) - .ok_or(BlockProverError::InvalidRootOutput("chain mmr"))?; - - Ok(( - new_account_root.into(), - new_note_root.into(), - new_nullifier_root.into(), - new_chain_mmr_root.into(), - )) - } -} diff --git a/crates/block-producer/src/block_builder/prover/tests.rs b/crates/block-producer/src/block_builder/prover/tests.rs deleted file mode 100644 index b26f3895d..000000000 --- a/crates/block-producer/src/block_builder/prover/tests.rs +++ /dev/null @@ -1,939 +0,0 @@ -use std::{collections::BTreeMap, iter}; - -use assert_matches::assert_matches; -use miden_node_proto::domain::note::NoteAuthenticationInfo; -use miden_objects::{ - account::{ - delta::AccountUpdateDetails, AccountId, AccountIdVersion, AccountStorageMode, AccountType, - }, - batch::ProvenBatch, - block::{BlockAccountUpdate, BlockNoteIndex, BlockNoteTree, BlockNumber}, - crypto::merkle::{ - EmptySubtreeRoots, LeafIndex, MerklePath, Mmr, MmrPeaks, Smt, SmtLeaf, SmtProof, SMT_DEPTH, - }, - note::{NoteExecutionHint, NoteHeader, NoteMetadata, NoteTag, NoteType, Nullifier}, - testing::account_id::{ - ACCOUNT_ID_OFF_CHAIN_SENDER, ACCOUNT_ID_REGULAR_ACCOUNT_UPDATABLE_CODE_OFF_CHAIN, - }, - transaction::{OutputNote, ProvenTransaction}, - Felt, BATCH_NOTE_TREE_DEPTH, BLOCK_NOTE_TREE_DEPTH, ONE, ZERO, -}; - -use self::block_witness::AccountUpdateWitness; -use super::*; -use crate::{ - block::{AccountWitness, BlockInputs}, - test_utils::{ - batch::TransactionBatchConstructor, - block::{build_actual_block_header, build_expected_block_header, MockBlockBuilder}, - MockProvenTxBuilder, MockStoreSuccessBuilder, - }, -}; - -// BLOCK WITNESS TESTS -// ================================================================================================= - -/// Tests that `BlockWitness` constructor fails if the store and transaction batches contain a -/// different set of account ids. -/// -/// The store will contain accounts 1 & 2, while the transaction batches will contain 2 & 3. -#[test] -fn block_witness_validation_inconsistent_account_ids() { - let account_id_1 = AccountId::dummy( - [0; 15], - AccountIdVersion::Version0, - AccountType::RegularAccountImmutableCode, - miden_objects::account::AccountStorageMode::Private, - ); - let account_id_2 = AccountId::dummy( - [1; 15], - AccountIdVersion::Version0, - AccountType::RegularAccountImmutableCode, - miden_objects::account::AccountStorageMode::Private, - ); - let account_id_3 = AccountId::dummy( - [2; 15], - AccountIdVersion::Version0, - AccountType::RegularAccountImmutableCode, - miden_objects::account::AccountStorageMode::Private, - ); - - let block_inputs_from_store: BlockInputs = { - let block_header = BlockHeader::mock(0, None, None, &[], Digest::default()); - let chain_peaks = MmrPeaks::new(0, Vec::new()).unwrap(); - - let accounts = BTreeMap::from_iter(vec![ - (account_id_1, AccountWitness::default()), - (account_id_2, AccountWitness::default()), - ]); - - BlockInputs { - block_header, - chain_peaks, - accounts, - nullifiers: BTreeMap::default(), - found_unauthenticated_notes: NoteAuthenticationInfo::default(), - } - }; - - let batches: Vec = { - let batch_1 = { - let tx = MockProvenTxBuilder::with_account( - account_id_2, - Digest::default(), - Digest::default(), - ) - .build(); - - ProvenBatch::mocked_from_transactions([&tx]) - }; - - let batch_2 = { - let tx = MockProvenTxBuilder::with_account( - account_id_3, - Digest::default(), - Digest::default(), - ) - .build(); - - ProvenBatch::mocked_from_transactions([&tx]) - }; - - vec![batch_1, batch_2] - }; - - let block_witness_result = BlockWitness::new(block_inputs_from_store, &batches); - - assert!(block_witness_result.is_err()); -} - -/// Tests that `BlockWitness` constructor fails if the store and transaction batches contain a -/// different at least 1 account who's state hash is different. -/// -/// Only account 1 will have a different state hash -#[test] -fn block_witness_validation_inconsistent_account_hashes() { - let account_id_1 = - AccountId::try_from(ACCOUNT_ID_REGULAR_ACCOUNT_UPDATABLE_CODE_OFF_CHAIN).unwrap(); - let account_id_2 = AccountId::try_from(ACCOUNT_ID_OFF_CHAIN_SENDER).unwrap(); - - let account_1_hash_store = - Digest::new([Felt::new(1u64), Felt::new(2u64), Felt::new(3u64), Felt::new(4u64)]); - let account_1_hash_batches = - Digest::new([Felt::new(4u64), Felt::new(3u64), Felt::new(2u64), Felt::new(1u64)]); - - let block_inputs_from_store: BlockInputs = { - let block_header = BlockHeader::mock(0, None, None, &[], Digest::default()); - let chain_peaks = MmrPeaks::new(0, Vec::new()).unwrap(); - - let accounts = BTreeMap::from_iter(vec![ - ( - account_id_1, - AccountWitness { - hash: account_1_hash_store, - proof: MerklePath::default(), - }, - ), - (account_id_2, AccountWitness::default()), - ]); - - BlockInputs { - block_header, - chain_peaks, - accounts, - nullifiers: BTreeMap::default(), - found_unauthenticated_notes: NoteAuthenticationInfo::default(), - } - }; - - let batches = { - let batch_1 = ProvenBatch::mocked_from_transactions([&MockProvenTxBuilder::with_account( - account_id_1, - account_1_hash_batches, - Digest::default(), - ) - .build()]); - - let batch_2 = ProvenBatch::mocked_from_transactions([&MockProvenTxBuilder::with_account( - account_id_2, - Digest::default(), - Digest::default(), - ) - .build()]); - - vec![batch_1, batch_2] - }; - - let block_witness_result = BlockWitness::new(block_inputs_from_store, &batches); - - assert_matches!( - block_witness_result, - Err(BuildBlockError::InconsistentAccountStateTransition( - account_id, - account_hash_store, - account_hash_batches - )) => { - assert_eq!(account_id, account_id_1); - assert_eq!(account_hash_store, account_1_hash_store); - assert_eq!(account_hash_batches, vec![account_1_hash_batches]); - } - ); -} - -/// Creates two batches which each update the same pair of accounts. -/// -/// The transactions are ordered such that the batches cannot be chronologically ordered -/// themselves: `[tx_x0, tx_y1], [tx_y0, tx_x1]`. This test ensures that the witness is -/// produced correctly as if for a single batch: `[tx_x0, tx_x1, tx_y0, tx_y1]`. -#[test] -fn block_witness_multiple_batches_per_account() { - let x_account_id = - AccountId::try_from(ACCOUNT_ID_REGULAR_ACCOUNT_UPDATABLE_CODE_OFF_CHAIN).unwrap(); - let y_account_id = AccountId::try_from(ACCOUNT_ID_OFF_CHAIN_SENDER).unwrap(); - - let x_hashes = [ - Digest::new((0..4).map(Felt::new).collect::>().try_into().unwrap()), - Digest::new((4..8).map(Felt::new).collect::>().try_into().unwrap()), - Digest::new((8..12).map(Felt::new).collect::>().try_into().unwrap()), - ]; - let y_hashes = [ - Digest::new((12..16).map(Felt::new).collect::>().try_into().unwrap()), - Digest::new((16..20).map(Felt::new).collect::>().try_into().unwrap()), - Digest::new((20..24).map(Felt::new).collect::>().try_into().unwrap()), - ]; - - let x_txs = [ - MockProvenTxBuilder::with_account(x_account_id, x_hashes[0], x_hashes[1]).build(), - MockProvenTxBuilder::with_account(x_account_id, x_hashes[1], x_hashes[2]).build(), - ]; - let y_txs = [ - MockProvenTxBuilder::with_account(y_account_id, y_hashes[0], y_hashes[1]).build(), - MockProvenTxBuilder::with_account(y_account_id, y_hashes[1], y_hashes[2]).build(), - ]; - - let x_proof = MerklePath::new(vec![Digest::new( - (24..28).map(Felt::new).collect::>().try_into().unwrap(), - )]); - let y_proof = MerklePath::new(vec![Digest::new( - (28..32).map(Felt::new).collect::>().try_into().unwrap(), - )]); - - let block_inputs_from_store: BlockInputs = { - let block_header = BlockHeader::mock(0, None, None, &[], Digest::default()); - let chain_peaks = MmrPeaks::new(0, Vec::new()).unwrap(); - - let x_witness = AccountWitness { - hash: x_hashes[0], - proof: x_proof.clone(), - }; - let y_witness = AccountWitness { - hash: y_hashes[0], - proof: y_proof.clone(), - }; - let accounts = BTreeMap::from_iter([(x_account_id, x_witness), (y_account_id, y_witness)]); - - BlockInputs { - block_header, - chain_peaks, - accounts, - nullifiers: BTreeMap::default(), - found_unauthenticated_notes: NoteAuthenticationInfo::default(), - } - }; - - let batches = { - let batch_1 = ProvenBatch::mocked_from_transactions([&x_txs[0], &y_txs[1]]); - let batch_2 = ProvenBatch::mocked_from_transactions([&y_txs[0], &x_txs[1]]); - - vec![batch_1, batch_2] - }; - - let (block_witness, _) = BlockWitness::new(block_inputs_from_store, &batches).unwrap(); - let account_witnesses = block_witness.updated_accounts.into_iter().collect::>(); - - let x_expected = AccountUpdateWitness { - initial_state_hash: x_hashes[0], - final_state_hash: *x_hashes.last().unwrap(), - proof: x_proof, - transactions: x_txs.iter().map(ProvenTransaction::id).collect(), - }; - - let y_expected = AccountUpdateWitness { - initial_state_hash: y_hashes[0], - final_state_hash: *y_hashes.last().unwrap(), - proof: y_proof, - transactions: y_txs.iter().map(ProvenTransaction::id).collect(), - }; - - let expected = [(x_account_id, x_expected), (y_account_id, y_expected)].into(); - - assert_eq!(account_witnesses, expected); -} - -// ACCOUNT ROOT TESTS -// ================================================================================================= - -/// Tests that the `BlockProver` computes the proper account root. -/// -/// We assume an initial store with 5 accounts, and all will be updated. -#[tokio::test] -#[miden_node_test_macro::enable_logging] -async fn compute_account_root_success() { - // Set up account states - // --------------------------------------------------------------------------------------------- - let account_ids = [ - AccountId::dummy( - [0; 15], - AccountIdVersion::Version0, - AccountType::RegularAccountImmutableCode, - miden_objects::account::AccountStorageMode::Private, - ), - AccountId::dummy( - [1; 15], - AccountIdVersion::Version0, - AccountType::RegularAccountImmutableCode, - miden_objects::account::AccountStorageMode::Private, - ), - AccountId::dummy( - [2; 15], - AccountIdVersion::Version0, - AccountType::RegularAccountImmutableCode, - miden_objects::account::AccountStorageMode::Private, - ), - AccountId::dummy( - [3; 15], - AccountIdVersion::Version0, - AccountType::RegularAccountImmutableCode, - miden_objects::account::AccountStorageMode::Private, - ), - AccountId::dummy( - [4; 15], - AccountIdVersion::Version0, - AccountType::RegularAccountImmutableCode, - miden_objects::account::AccountStorageMode::Private, - ), - ]; - - let account_initial_states = [ - [Felt::new(1u64), Felt::new(1u64), Felt::new(1u64), Felt::new(1u64)], - [Felt::new(2u64), Felt::new(2u64), Felt::new(2u64), Felt::new(2u64)], - [Felt::new(3u64), Felt::new(3u64), Felt::new(3u64), Felt::new(3u64)], - [Felt::new(4u64), Felt::new(4u64), Felt::new(4u64), Felt::new(4u64)], - [Felt::new(5u64), Felt::new(5u64), Felt::new(5u64), Felt::new(5u64)], - ]; - - let account_final_states = [ - [Felt::new(2u64), Felt::new(2u64), Felt::new(2u64), Felt::new(2u64)], - [Felt::new(3u64), Felt::new(3u64), Felt::new(3u64), Felt::new(3u64)], - [Felt::new(4u64), Felt::new(4u64), Felt::new(4u64), Felt::new(4u64)], - [Felt::new(5u64), Felt::new(5u64), Felt::new(5u64), Felt::new(5u64)], - [Felt::new(1u64), Felt::new(1u64), Felt::new(1u64), Felt::new(1u64)], - ]; - - // Set up store's account SMT - // --------------------------------------------------------------------------------------------- - - let store = MockStoreSuccessBuilder::from_accounts( - account_ids - .iter() - .zip(account_initial_states.iter()) - .map(|(&account_id, &account_hash)| (account_id, account_hash.into())), - ) - .build(); - - // Block prover - // --------------------------------------------------------------------------------------------- - - // Block inputs is initialized with all the accounts and their initial state - let block_inputs_from_store: BlockInputs = store - .get_block_inputs(account_ids.into_iter(), std::iter::empty(), std::iter::empty()) - .await - .unwrap(); - - let batches: Vec = { - let txs: Vec<_> = account_ids - .iter() - .enumerate() - .map(|(idx, &account_id)| { - MockProvenTxBuilder::with_account( - account_id, - account_initial_states[idx].into(), - account_final_states[idx].into(), - ) - .build() - }) - .collect(); - - let batch_1 = ProvenBatch::mocked_from_transactions(&txs[..2]); - let batch_2 = ProvenBatch::mocked_from_transactions(&txs[2..]); - - vec![batch_1, batch_2] - }; - - let (block_witness, _) = BlockWitness::new(block_inputs_from_store, &batches).unwrap(); - - let block_prover = BlockProver::new(); - let block_header = block_prover.prove(block_witness).unwrap(); - - // Update SMT by hand to get new root - // --------------------------------------------------------------------------------------------- - let block = MockBlockBuilder::new(&store) - .await - .account_updates( - account_ids - .iter() - .zip(account_final_states.iter()) - .map(|(&account_id, &account_hash)| { - BlockAccountUpdate::new( - account_id, - account_hash.into(), - AccountUpdateDetails::Private, - vec![], - ) - }) - .collect(), - ) - .build(); - - // Compare roots - // --------------------------------------------------------------------------------------------- - assert_eq!(block_header.account_root(), block.header().account_root()); -} - -/// Test that the current account root is returned if the batches are empty -#[tokio::test] -#[miden_node_test_macro::enable_logging] -async fn compute_account_root_empty_batches() { - // Set up account states - // --------------------------------------------------------------------------------------------- - let account_ids = [ - AccountId::dummy( - [0; 15], - AccountIdVersion::Version0, - AccountType::RegularAccountImmutableCode, - AccountStorageMode::Private, - ), - AccountId::dummy( - [1; 15], - AccountIdVersion::Version0, - AccountType::RegularAccountImmutableCode, - AccountStorageMode::Private, - ), - AccountId::dummy( - [2; 15], - AccountIdVersion::Version0, - AccountType::RegularAccountImmutableCode, - AccountStorageMode::Private, - ), - AccountId::dummy( - [3; 15], - AccountIdVersion::Version0, - AccountType::RegularAccountImmutableCode, - AccountStorageMode::Private, - ), - AccountId::dummy( - [4; 15], - AccountIdVersion::Version0, - AccountType::RegularAccountImmutableCode, - AccountStorageMode::Private, - ), - ]; - - let account_initial_states = [ - [Felt::new(1u64), Felt::new(1u64), Felt::new(1u64), Felt::new(1u64)], - [Felt::new(2u64), Felt::new(2u64), Felt::new(2u64), Felt::new(2u64)], - [Felt::new(3u64), Felt::new(3u64), Felt::new(3u64), Felt::new(3u64)], - [Felt::new(4u64), Felt::new(4u64), Felt::new(4u64), Felt::new(4u64)], - [Felt::new(5u64), Felt::new(5u64), Felt::new(5u64), Felt::new(5u64)], - ]; - - // Set up store's account SMT - // --------------------------------------------------------------------------------------------- - - let store = MockStoreSuccessBuilder::from_accounts( - account_ids - .iter() - .zip(account_initial_states.iter()) - .map(|(&account_id, &account_hash)| (account_id, account_hash.into())), - ) - .build(); - - // Block prover - // --------------------------------------------------------------------------------------------- - - // Block inputs is initialized with all the accounts and their initial state - let block_inputs_from_store: BlockInputs = store - .get_block_inputs(std::iter::empty(), std::iter::empty(), std::iter::empty()) - .await - .unwrap(); - - let batches = Vec::new(); - let (block_witness, _) = BlockWitness::new(block_inputs_from_store, &batches).unwrap(); - - let block_prover = BlockProver::new(); - let block_header = block_prover.prove(block_witness).unwrap(); - - // Compare roots - // --------------------------------------------------------------------------------------------- - assert_eq!(block_header.account_root(), store.account_root().await); -} - -// NOTE ROOT TESTS -// ================================================================================================= - -/// Tests that the block kernel returns the empty tree (depth 20) if no notes were created, and -/// contains no batches -#[tokio::test] -#[miden_node_test_macro::enable_logging] -async fn compute_note_root_empty_batches_success() { - // Set up store - // --------------------------------------------------------------------------------------------- - - let store = MockStoreSuccessBuilder::from_batches(iter::empty()).build(); - - // Block prover - // --------------------------------------------------------------------------------------------- - - // Block inputs is initialized with all the accounts and their initial state - let block_inputs_from_store: BlockInputs = store - .get_block_inputs(std::iter::empty(), std::iter::empty(), std::iter::empty()) - .await - .unwrap(); - - let batches: Vec = Vec::new(); - - let (block_witness, _) = BlockWitness::new(block_inputs_from_store, &batches).unwrap(); - - let block_prover = BlockProver::new(); - let block_header = block_prover.prove(block_witness).unwrap(); - - // Compare roots - // --------------------------------------------------------------------------------------------- - let created_notes_empty_root = EmptySubtreeRoots::entry(BLOCK_NOTE_TREE_DEPTH, 0); - assert_eq!(block_header.note_root(), *created_notes_empty_root); -} - -/// Tests that the block kernel returns the empty tree (depth 20) if no notes were created, but -/// which contains at least 1 batch. -#[tokio::test] -#[miden_node_test_macro::enable_logging] -async fn compute_note_root_empty_notes_success() { - // Set up store - // --------------------------------------------------------------------------------------------- - - let store = MockStoreSuccessBuilder::from_batches(iter::empty()).build(); - - // Block prover - // --------------------------------------------------------------------------------------------- - - // Block inputs is initialized with all the accounts and their initial state - let block_inputs_from_store: BlockInputs = store - .get_block_inputs(std::iter::empty(), std::iter::empty(), std::iter::empty()) - .await - .unwrap(); - - let batches: Vec = { - let batch = ProvenBatch::mocked_from_transactions(vec![]); - vec![batch] - }; - - let (block_witness, _) = BlockWitness::new(block_inputs_from_store, &batches).unwrap(); - - let block_prover = BlockProver::new(); - let block_header = block_prover.prove(block_witness).unwrap(); - - // Compare roots - // --------------------------------------------------------------------------------------------- - let created_notes_empty_root = EmptySubtreeRoots::entry(BLOCK_NOTE_TREE_DEPTH, 0); - assert_eq!(block_header.note_root(), *created_notes_empty_root); -} - -/// Tests that the block kernel returns the expected tree when multiple notes were created across -/// many batches. -#[tokio::test] -#[miden_node_test_macro::enable_logging] -async fn compute_note_root_success() { - let account_ids = [ - AccountId::dummy( - [0; 15], - AccountIdVersion::Version0, - AccountType::RegularAccountImmutableCode, - miden_objects::account::AccountStorageMode::Private, - ), - AccountId::dummy( - [1; 15], - AccountIdVersion::Version0, - AccountType::RegularAccountImmutableCode, - miden_objects::account::AccountStorageMode::Private, - ), - AccountId::dummy( - [2; 15], - AccountIdVersion::Version0, - AccountType::RegularAccountImmutableCode, - miden_objects::account::AccountStorageMode::Private, - ), - ]; - - let notes_created: Vec = [ - Digest::from([Felt::new(1u64), Felt::new(1u64), Felt::new(1u64), Felt::new(1u64)]), - Digest::from([Felt::new(2u64), Felt::new(2u64), Felt::new(2u64), Felt::new(2u64)]), - Digest::from([Felt::new(3u64), Felt::new(3u64), Felt::new(3u64), Felt::new(3u64)]), - ] - .into_iter() - .zip(account_ids.iter()) - .map(|(note_digest, &account_id)| { - NoteHeader::new( - note_digest.into(), - NoteMetadata::new( - account_id, - NoteType::Private, - NoteTag::for_local_use_case(0u16, 0u16).unwrap(), - NoteExecutionHint::none(), - ONE, - ) - .unwrap(), - ) - }) - .collect(); - - // Set up store - // --------------------------------------------------------------------------------------------- - - let store = MockStoreSuccessBuilder::from_batches(iter::empty()).build(); - - // Block prover - // --------------------------------------------------------------------------------------------- - - // Block inputs is initialized with all the accounts and their initial state - let block_inputs_from_store: BlockInputs = store - .get_block_inputs(account_ids.into_iter(), std::iter::empty(), std::iter::empty()) - .await - .unwrap(); - - let batches: Vec = { - let txs: Vec<_> = notes_created - .iter() - .zip(account_ids.iter()) - .map(|(note, &account_id)| { - let note = OutputNote::Header(*note); - MockProvenTxBuilder::with_account(account_id, Digest::default(), Digest::default()) - .output_notes(vec![note]) - .build() - }) - .collect(); - - let batch_1 = ProvenBatch::mocked_from_transactions(&txs[..2]); - let batch_2 = ProvenBatch::mocked_from_transactions(&txs[2..]); - - vec![batch_1, batch_2] - }; - - let (block_witness, _) = BlockWitness::new(block_inputs_from_store, &batches).unwrap(); - - let block_prover = BlockProver::new(); - let block_header = block_prover.prove(block_witness).unwrap(); - - // Create block note tree to get new root - // --------------------------------------------------------------------------------------------- - - // The current logic is hardcoded to a depth of 6 - // Specifically, we assume the block has up to 2^6 batches, and each batch up to 2^10 created - // notes, where each note is stored at depth 10 in the batch tree. - #[allow(clippy::items_after_statements, reason = "assert belongs to this section")] - const _: () = assert!(BLOCK_NOTE_TREE_DEPTH - BATCH_NOTE_TREE_DEPTH == 6); - - // The first 2 txs were put in the first batch; the 3rd was put in the second - let note_tree = BlockNoteTree::with_entries([ - (BlockNoteIndex::new(0, 0), notes_created[0].id(), *notes_created[0].metadata()), - (BlockNoteIndex::new(0, 1), notes_created[1].id(), *notes_created[1].metadata()), - (BlockNoteIndex::new(1, 0), notes_created[2].id(), *notes_created[2].metadata()), - ]) - .unwrap(); - - // Compare roots - // --------------------------------------------------------------------------------------------- - assert_eq!(block_header.note_root(), note_tree.root()); -} - -// NULLIFIER ROOT TESTS -// ================================================================================================= - -/// Tests that `BlockWitness` constructor fails if the store and transaction batches contain a -/// different set of nullifiers. -/// -/// The transaction batches will contain nullifiers 1 & 2, while the store will contain 2 & 3. -#[test] -fn block_witness_validation_inconsistent_nullifiers() { - let batches: Vec = { - let batch_1 = { - let tx = MockProvenTxBuilder::with_account_index(0).nullifiers_range(0..1).build(); - - ProvenBatch::mocked_from_transactions([&tx]) - }; - - let batch_2 = { - let tx = MockProvenTxBuilder::with_account_index(1).nullifiers_range(1..2).build(); - - ProvenBatch::mocked_from_transactions([&tx]) - }; - - vec![batch_1, batch_2] - }; - - let nullifier_1 = batches[0].produced_nullifiers().next().unwrap(); - let nullifier_2 = batches[1].produced_nullifiers().next().unwrap(); - let nullifier_3 = - Nullifier::from([101_u32.into(), 102_u32.into(), 103_u32.into(), 104_u32.into()]); - - let block_inputs_from_store: BlockInputs = { - let block_header = BlockHeader::mock(0, None, None, &[], Digest::default()); - let chain_peaks = MmrPeaks::new(0, Vec::new()).unwrap(); - - let accounts = batches - .iter() - .flat_map(|batch| { - batch - .account_updates() - .iter() - .map(|(account_id, update)| (*account_id, update.initial_state_commitment())) - }) - .map(|(account_id, hash)| { - (account_id, AccountWitness { hash, proof: MerklePath::default() }) - }) - .collect(); - - let nullifiers = BTreeMap::from_iter(vec![ - ( - nullifier_2, - SmtProof::new( - MerklePath::new(vec![Digest::default(); SMT_DEPTH as usize]), - SmtLeaf::new_empty(LeafIndex::new_max_depth( - nullifier_2.most_significant_felt().into(), - )), - ) - .unwrap(), - ), - ( - nullifier_3, - SmtProof::new( - MerklePath::new(vec![Digest::default(); SMT_DEPTH as usize]), - SmtLeaf::new_empty(LeafIndex::new_max_depth( - nullifier_3.most_significant_felt().into(), - )), - ) - .unwrap(), - ), - ]); - - BlockInputs { - block_header, - chain_peaks, - accounts, - nullifiers, - found_unauthenticated_notes: NoteAuthenticationInfo::default(), - } - }; - - let block_witness_result = BlockWitness::new(block_inputs_from_store, &batches); - - assert_matches!( - block_witness_result, - Err(BuildBlockError::InconsistentNullifiers(nullifiers)) => { - assert_eq!(nullifiers, vec![nullifier_1, nullifier_3]); - } - ); -} - -/// Tests that the block kernel returns the expected nullifier tree when no nullifiers are present -/// in the transaction -#[tokio::test] -async fn compute_nullifier_root_empty_success() { - let batches: Vec = { - let batch_1 = { - let tx = MockProvenTxBuilder::with_account_index(0).build(); - - ProvenBatch::mocked_from_transactions([&tx]) - }; - - let batch_2 = { - let tx = MockProvenTxBuilder::with_account_index(1).build(); - - ProvenBatch::mocked_from_transactions([&tx]) - }; - - vec![batch_1, batch_2] - }; - - let account_ids: Vec = batches - .iter() - .flat_map(|batch| { - batch - .account_updates() - .iter() - .map(|(account_id, update)| (*account_id, update.initial_state_commitment())) - }) - .map(|(account_id, _)| account_id) - .collect(); - - // Set up store - // --------------------------------------------------------------------------------------------- - - let store = MockStoreSuccessBuilder::from_batches(batches.iter()).build(); - - // Block prover - // --------------------------------------------------------------------------------------------- - - // Block inputs is initialized with all the accounts and their initial state - let block_inputs_from_store: BlockInputs = store - .get_block_inputs(account_ids.into_iter(), std::iter::empty(), std::iter::empty()) - .await - .unwrap(); - - let (block_witness, _) = BlockWitness::new(block_inputs_from_store, &batches).unwrap(); - - let block_prover = BlockProver::new(); - let block_header = block_prover.prove(block_witness).unwrap(); - - // Create SMT by hand to get new root - // --------------------------------------------------------------------------------------------- - let nullifier_smt = Smt::new(); - - // Compare roots - // --------------------------------------------------------------------------------------------- - assert_eq!(block_header.nullifier_root(), nullifier_smt.root()); -} - -/// Tests that the block kernel returns the expected nullifier tree when multiple nullifiers are -/// present in the transaction -#[tokio::test] -async fn compute_nullifier_root_success() { - let batches: Vec = { - let batch_1 = { - let tx = MockProvenTxBuilder::with_account_index(0).nullifiers_range(0..1).build(); - - ProvenBatch::mocked_from_transactions([&tx]) - }; - - let batch_2 = { - let tx = MockProvenTxBuilder::with_account_index(1).nullifiers_range(1..2).build(); - - ProvenBatch::mocked_from_transactions([&tx]) - }; - - vec![batch_1, batch_2] - }; - - let account_ids: Vec = batches - .iter() - .flat_map(|batch| { - batch - .account_updates() - .iter() - .map(|(account_id, update)| (*account_id, update.initial_state_commitment())) - }) - .map(|(account_id, _)| account_id) - .collect(); - - let nullifiers = [ - batches[0].produced_nullifiers().next().unwrap(), - batches[1].produced_nullifiers().next().unwrap(), - ]; - - // Set up store - // --------------------------------------------------------------------------------------------- - let initial_block_num = BlockNumber::from(42); - - let store = MockStoreSuccessBuilder::from_batches(batches.iter()) - .initial_block_num(initial_block_num) - .build(); - - // Block prover - // --------------------------------------------------------------------------------------------- - - // Block inputs is initialized with all the accounts and their initial state - let block_inputs_from_store: BlockInputs = store - .get_block_inputs(account_ids.into_iter(), nullifiers.iter(), std::iter::empty()) - .await - .unwrap(); - - let (block_witness, _) = BlockWitness::new(block_inputs_from_store, &batches).unwrap(); - - let block_prover = BlockProver::new(); - let block_header = block_prover.prove(block_witness).unwrap(); - - // Create SMT by hand to get new root - // --------------------------------------------------------------------------------------------- - - // Note that the block number in store is 42; the nullifiers get added to the next block (i.e. - // block number 43) - let nullifier_smt = - Smt::with_entries(nullifiers.into_iter().map(|nullifier| { - (nullifier.inner(), [(initial_block_num + 1).into(), ZERO, ZERO, ZERO]) - })) - .unwrap(); - - // Compare roots - // --------------------------------------------------------------------------------------------- - assert_eq!(block_header.nullifier_root(), nullifier_smt.root()); -} - -// CHAIN MMR ROOT TESTS -// ================================================================================================= - -/// Test that the chain mmr root is as expected if the batches are empty -#[tokio::test] -#[miden_node_test_macro::enable_logging] -async fn compute_chain_mmr_root_empty_mmr() { - let store = MockStoreSuccessBuilder::from_batches(iter::empty()).build(); - - let expected_block_header = build_expected_block_header(&store, &[]).await; - let actual_block_header = build_actual_block_header(&store, Vec::new()).await; - - assert_eq!(actual_block_header.chain_root(), expected_block_header.chain_root()); -} - -/// add header to non-empty MMR (1 peak), and check that we get the expected commitment -#[tokio::test] -#[miden_node_test_macro::enable_logging] -async fn compute_chain_mmr_root_mmr_1_peak() { - let initial_chain_mmr = { - let mut mmr = Mmr::new(); - mmr.add(Digest::default()); - - mmr - }; - - let store = MockStoreSuccessBuilder::from_batches(iter::empty()) - .initial_chain_mmr(initial_chain_mmr) - .build(); - - let expected_block_header = build_expected_block_header(&store, &[]).await; - let actual_block_header = build_actual_block_header(&store, Vec::new()).await; - - assert_eq!(actual_block_header.chain_root(), expected_block_header.chain_root()); -} - -/// add header to an MMR with 17 peaks, and check that we get the expected commitment -#[tokio::test] -#[miden_node_test_macro::enable_logging] -async fn compute_chain_mmr_root_mmr_17_peaks() { - let initial_chain_mmr = { - let mut mmr = Mmr::new(); - for _ in 0..(2_u32.pow(17) - 1) { - mmr.add(Digest::default()); - } - - assert_eq!(mmr.peaks().peaks().len(), 17); - - mmr - }; - - let store = MockStoreSuccessBuilder::from_batches(iter::empty()) - .initial_chain_mmr(initial_chain_mmr) - .build(); - - let expected_block_header = build_expected_block_header(&store, &[]).await; - let actual_block_header = build_actual_block_header(&store, Vec::new()).await; - - assert_eq!(actual_block_header.chain_root(), expected_block_header.chain_root()); -} diff --git a/crates/block-producer/src/errors.rs b/crates/block-producer/src/errors.rs index ec20a6b1d..6c9de3e37 100644 --- a/crates/block-producer/src/errors.rs +++ b/crates/block-producer/src/errors.rs @@ -1,14 +1,12 @@ +use miden_block_prover::ProvenBlockError; use miden_node_proto::errors::ConversionError; use miden_node_utils::formatting::format_opt; use miden_objects::{ - account::AccountId, block::BlockNumber, - crypto::merkle::MerkleError, note::{NoteId, Nullifier}, transaction::TransactionId, - AccountDeltaError, Digest, ProposedBatchError, + Digest, ProposedBatchError, ProposedBlockError, }; -use miden_processor::ExecutionError; use miden_tx_batch_prover::errors::ProvenBatchError; use thiserror::Error; use tokio::task::JoinError; @@ -146,50 +144,19 @@ pub enum BuildBatchError { ProveBatchError(#[source] ProvenBatchError), } -// Block prover errors -// ================================================================================================= - -#[derive(Debug, Error)] -pub enum BlockProverError { - #[error("received invalid merkle path")] - InvalidMerklePaths(#[source] MerkleError), - #[error("program execution failed")] - ProgramExecutionFailed(#[source] ExecutionError), - #[error("failed to retrieve {0} root from stack outputs")] - InvalidRootOutput(&'static str), -} - // Block building errors // ================================================================================================= #[derive(Debug, Error)] pub enum BuildBlockError { - #[error("failed to compute new block")] - BlockProverFailed(#[from] BlockProverError), #[error("failed to apply block to store")] StoreApplyBlockFailed(#[source] StoreError), #[error("failed to get block inputs from store")] GetBlockInputsFailed(#[source] StoreError), - #[error("block inputs from store did not contain data for account {0}")] - MissingAccountInput(AccountId), - #[error("block inputs from store contained extra data for accounts {0:?}")] - ExtraStoreData(Vec), - #[error("account {0} with state {1} cannot transaction to remaining states {2:?}")] - InconsistentAccountStateTransition(AccountId, Digest, Vec), - #[error( - "block inputs from store and transaction batches produced different nullifiers: {0:?}" - )] - InconsistentNullifiers(Vec), - #[error("unauthenticated transaction notes not found in the store or in outputs of other transactions in the block: {0:?}")] - UnauthenticatedNotesNotFound(Vec), - #[error("failed to merge transaction delta into account {account_id}")] - AccountUpdateError { - account_id: AccountId, - source: AccountDeltaError, - }, - // TODO: Check if needed. - // #[error("block construction failed")] - // BlockConstructionError, + #[error("failed to propose block")] + ProposeBlockFailed(#[source] ProposedBlockError), + #[error("failed to prove block")] + ProveBlockFailed(#[source] ProvenBlockError), /// We sometimes randomly inject errors into the batch building process to test our failure /// responses. #[error("nothing actually went wrong, failure was injected on purpose")] diff --git a/crates/block-producer/src/lib.rs b/crates/block-producer/src/lib.rs index 1cb3b62c9..8aa594232 100644 --- a/crates/block-producer/src/lib.rs +++ b/crates/block-producer/src/lib.rs @@ -10,7 +10,6 @@ mod errors; mod mempool; mod store; -pub mod block; pub mod config; pub mod server; diff --git a/crates/block-producer/src/store/mod.rs b/crates/block-producer/src/store/mod.rs index d367a7a75..9d4ad4d40 100644 --- a/crates/block-producer/src/store/mod.rs +++ b/crates/block-producer/src/store/mod.rs @@ -22,7 +22,7 @@ use miden_node_proto::{ use miden_node_utils::{formatting::format_opt, tracing::grpc::OtelInterceptor}; use miden_objects::{ account::AccountId, - block::{BlockHeader, BlockNumber, ProvenBlock}, + block::{BlockHeader, BlockInputs, BlockNumber, ProvenBlock}, note::{NoteId, Nullifier}, transaction::ProvenTransaction, utils::Serializable, @@ -32,7 +32,7 @@ use miden_processor::crypto::RpoDigest; use tonic::{service::interceptor::InterceptedService, transport::Channel}; use tracing::{debug, info, instrument}; -use crate::{block::BlockInputs, errors::StoreError, COMPONENT}; +use crate::{errors::StoreError, COMPONENT}; // TRANSACTION INPUTS // ================================================================================================ @@ -197,13 +197,15 @@ impl StoreClient { pub async fn get_block_inputs( &self, updated_accounts: impl Iterator + Send, - produced_nullifiers: impl Iterator + Send, - notes: impl Iterator + Send, + created_nullifiers: impl Iterator + Send, + unauthenticated_notes: impl Iterator + Send, + reference_blocks: impl Iterator + Send, ) -> Result { let request = tonic::Request::new(GetBlockInputsRequest { account_ids: updated_accounts.map(Into::into).collect(), - nullifiers: produced_nullifiers.map(digest::Digest::from).collect(), - unauthenticated_notes: notes.map(digest::Digest::from).collect(), + nullifiers: created_nullifiers.map(digest::Digest::from).collect(), + unauthenticated_notes: unauthenticated_notes.map(digest::Digest::from).collect(), + reference_blocks: reference_blocks.map(|block_num| block_num.as_u32()).collect(), }); let store_response = self.inner.clone().get_block_inputs(request).await?.into_inner(); diff --git a/crates/block-producer/src/test_utils/block.rs b/crates/block-producer/src/test_utils/block.rs index eb7aef93f..6293f5428 100644 --- a/crates/block-producer/src/test_utils/block.rs +++ b/crates/block-producer/src/test_utils/block.rs @@ -1,5 +1,3 @@ -use std::iter; - use miden_objects::{ batch::ProvenBatch, block::{ @@ -13,10 +11,6 @@ use miden_objects::{ }; use super::MockStoreSuccess; -use crate::{ - block::BlockInputs, - block_builder::prover::{block_witness::BlockWitness, BlockProver}, -}; /// Constructs the block we expect to be built given the store state, and a set of transaction /// batches to be applied @@ -74,30 +68,31 @@ pub async fn build_expected_block_header( ) } -/// Builds the "actual" block header; i.e. the block header built using the Miden VM, used in the -/// node -pub async fn build_actual_block_header( - store: &MockStoreSuccess, - batches: Vec, -) -> BlockHeader { - let updated_accounts: Vec<_> = - batches.iter().flat_map(|batch| batch.account_updates().iter()).collect(); - let produced_nullifiers: Vec = - batches.iter().flat_map(ProvenBatch::produced_nullifiers).collect(); - - let block_inputs_from_store: BlockInputs = store - .get_block_inputs( - updated_accounts.iter().map(|(&account_id, _)| account_id), - produced_nullifiers.iter(), - iter::empty(), - ) - .await - .unwrap(); - - let (block_witness, _) = BlockWitness::new(block_inputs_from_store, &batches).unwrap(); - - BlockProver::new().prove(block_witness).unwrap() -} +// Note: Commented as it is unused atm. Is it worth fixing it? +// /// Builds the "actual" block header; i.e. the block header built using the Miden VM, used in the +// /// node +// pub async fn build_actual_block_header( +// store: &MockStoreSuccess, +// batches: Vec, +// ) -> BlockHeader { +// let updated_accounts: Vec<_> = +// batches.iter().flat_map(|batch| batch.account_updates().iter()).collect(); +// let produced_nullifiers: Vec = +// batches.iter().flat_map(ProvenBatch::produced_nullifiers).collect(); + +// let block_inputs_from_store: BlockInputs = store +// .get_block_inputs( +// updated_accounts.iter().map(|(&account_id, _)| account_id), +// produced_nullifiers.iter(), +// iter::empty(), +// ) +// .await +// .unwrap(); + +// let (block_witness, _) = BlockWitness::new(block_inputs_from_store, &batches).unwrap(); + +// BlockProver::new().prove(block_witness).unwrap() +// } #[derive(Debug)] pub struct MockBlockBuilder { diff --git a/crates/block-producer/src/test_utils/store.rs b/crates/block-producer/src/test_utils/store.rs index 080570f64..edf7cff4b 100644 --- a/crates/block-producer/src/test_utils/store.rs +++ b/crates/block-producer/src/test_utils/store.rs @@ -3,12 +3,11 @@ use std::{ num::NonZeroU32, }; -use miden_node_proto::domain::{block::BlockInclusionProof, note::NoteAuthenticationInfo}; use miden_objects::{ batch::ProvenBatch, block::{BlockHeader, BlockNumber, OutputNoteBatch, ProvenBlock}, - crypto::merkle::{Mmr, SimpleSmt, Smt, ValuePath}, - note::{NoteId, NoteInclusionProof, Nullifier}, + crypto::merkle::{Mmr, SimpleSmt, Smt}, + note::{NoteId, NoteInclusionProof}, transaction::ProvenTransaction, ACCOUNT_TREE_DEPTH, EMPTY_WORD, ZERO, }; @@ -16,7 +15,6 @@ use tokio::sync::RwLock; use super::*; use crate::{ - block::{AccountWitness, BlockInputs}, errors::StoreError, store::TransactionInputs, test_utils::block::{ @@ -293,65 +291,66 @@ impl MockStoreSuccess { }) } - pub async fn get_block_inputs( - &self, - updated_accounts: impl Iterator + Send, - produced_nullifiers: impl Iterator + Send, - notes: impl Iterator + Send, - ) -> Result { - let locked_accounts = self.accounts.read().await; - let locked_produced_nullifiers = self.produced_nullifiers.read().await; - - let chain_peaks = { - let locked_chain_mmr = self.chain_mmr.read().await; - locked_chain_mmr.peaks() - }; - - let accounts = { - updated_accounts - .map(|account_id| { - let ValuePath { value: hash, path: proof } = - locked_accounts.open(&account_id.into()); - - (account_id, AccountWitness { hash, proof }) - }) - .collect() - }; - - let nullifiers = produced_nullifiers - .map(|nullifier| (*nullifier, locked_produced_nullifiers.open(&nullifier.inner()))) - .collect(); - - let locked_notes = self.notes.read().await; - let note_proofs = notes - .filter_map(|id| locked_notes.get(id).map(|proof| (*id, proof.clone()))) - .collect::>(); - - let locked_headers = self.block_headers.read().await; - let latest_header = - *locked_headers.iter().max_by_key(|(block_num, _)| *block_num).unwrap().1; - - let locked_chain_mmr = self.chain_mmr.read().await; - let chain_length = latest_header.block_num(); - let block_proofs = note_proofs - .values() - .map(|note_proof| { - let block_num = note_proof.location().block_num(); - let block_header = *locked_headers.get(&block_num).unwrap(); - let mmr_path = locked_chain_mmr.open(block_num.as_usize()).unwrap().merkle_path; - - BlockInclusionProof { block_header, mmr_path, chain_length } - }) - .collect(); - - let found_unauthenticated_notes = NoteAuthenticationInfo { block_proofs, note_proofs }; - - Ok(BlockInputs { - block_header: latest_header, - chain_peaks, - accounts, - nullifiers, - found_unauthenticated_notes, - }) - } + // Note: Commented as it is unused atm. Is it worth fixing it? + // pub async fn get_block_inputs( + // &self, + // updated_accounts: impl Iterator + Send, + // produced_nullifiers: impl Iterator + Send, + // notes: impl Iterator + Send, + // ) -> Result { + // let locked_accounts = self.accounts.read().await; + // let locked_produced_nullifiers = self.produced_nullifiers.read().await; + + // let chain_peaks = { + // let locked_chain_mmr = self.chain_mmr.read().await; + // locked_chain_mmr.peaks() + // }; + + // let accounts = { + // updated_accounts + // .map(|account_id| { + // let ValuePath { value: hash, path: proof } = + // locked_accounts.open(&account_id.into()); + + // (account_id, AccountWitness { hash, proof }) + // }) + // .collect() + // }; + + // let nullifiers = produced_nullifiers + // .map(|nullifier| (*nullifier, locked_produced_nullifiers.open(&nullifier.inner()))) + // .collect(); + + // let locked_notes = self.notes.read().await; + // let note_proofs = notes + // .filter_map(|id| locked_notes.get(id).map(|proof| (*id, proof.clone()))) + // .collect::>(); + + // let locked_headers = self.block_headers.read().await; + // let latest_header = + // *locked_headers.iter().max_by_key(|(block_num, _)| *block_num).unwrap().1; + + // let locked_chain_mmr = self.chain_mmr.read().await; + // let chain_length = latest_header.block_num(); + // let block_proofs = note_proofs + // .values() + // .map(|note_proof| { + // let block_num = note_proof.location().block_num(); + // let block_header = *locked_headers.get(&block_num).unwrap(); + // let mmr_path = locked_chain_mmr.open(block_num.as_usize()).unwrap().merkle_path; + + // BlockInclusionProof { block_header, mmr_path, chain_length } + // }) + // .collect(); + + // let found_unauthenticated_notes = NoteAuthenticationInfo { block_proofs, note_proofs }; + + // Ok(BlockInputs { + // block_header: latest_header, + // chain_peaks, + // accounts, + // nullifiers, + // found_unauthenticated_notes, + // }) + // } } diff --git a/crates/proto/src/domain/account.rs b/crates/proto/src/domain/account.rs index 3d6680065..36fadc4a0 100644 --- a/crates/proto/src/domain/account.rs +++ b/crates/proto/src/domain/account.rs @@ -149,49 +149,45 @@ impl TryInto for proto::requests::get_account_proofs_reques } } -// ACCOUNT INPUT RECORD +// ACCOUNT WITNESS RECORD // ================================================================================================ #[derive(Clone, Debug)] -pub struct AccountInputRecord { +pub struct AccountWitnessRecord { pub account_id: AccountId, - pub account_hash: Digest, + pub initial_state_commitment: Digest, pub proof: MerklePath, } -impl From for proto::responses::AccountBlockInputRecord { - fn from(from: AccountInputRecord) -> Self { +impl From for proto::responses::AccountWitness { + fn from(from: AccountWitnessRecord) -> Self { Self { account_id: Some(from.account_id.into()), - account_hash: Some(from.account_hash.into()), + initial_state_commitment: Some(from.initial_state_commitment.into()), proof: Some(Into::into(&from.proof)), } } } -impl TryFrom for AccountInputRecord { +impl TryFrom for AccountWitnessRecord { type Error = ConversionError; fn try_from( - account_input_record: proto::responses::AccountBlockInputRecord, + account_witness_record: proto::responses::AccountWitness, ) -> Result { Ok(Self { - account_id: account_input_record + account_id: account_witness_record .account_id - .ok_or(proto::responses::AccountBlockInputRecord::missing_field(stringify!( - account_id - )))? + .ok_or(proto::responses::AccountWitness::missing_field(stringify!(account_id)))? .try_into()?, - account_hash: account_input_record - .account_hash - .ok_or(proto::responses::AccountBlockInputRecord::missing_field(stringify!( - account_hash - )))? + initial_state_commitment: account_witness_record + .initial_state_commitment + .ok_or(proto::responses::AccountWitness::missing_field(stringify!(account_hash)))? .try_into()?, - proof: account_input_record + proof: account_witness_record .proof .as_ref() - .ok_or(proto::responses::AccountBlockInputRecord::missing_field(stringify!(proof)))? + .ok_or(proto::responses::AccountWitness::missing_field(stringify!(proof)))? .try_into()?, }) } diff --git a/crates/proto/src/domain/block.rs b/crates/proto/src/domain/block.rs index fa7e4bcfb..43b7f2f1a 100644 --- a/crates/proto/src/domain/block.rs +++ b/crates/proto/src/domain/block.rs @@ -1,11 +1,19 @@ +use std::collections::BTreeMap; + use miden_objects::{ - block::{BlockHeader, BlockNumber}, + block::{AccountWitness, BlockHeader, BlockInputs, BlockNumber, NullifierWitness}, crypto::merkle::MerklePath, + note::{NoteId, NoteInclusionProof}, + transaction::ChainMmr, + utils::{Deserializable, Serializable}, }; use crate::{ errors::{ConversionError, MissingFieldHelper}, - generated::block as proto, + generated::{ + block as proto, note::NoteInclusionInBlockProof, responses::GetBlockInputsResponse, + }, + AccountWitnessRecord, NullifierWitnessRecord, }; // BLOCK HEADER @@ -124,3 +132,98 @@ impl TryFrom for BlockInclusionProof { Ok(result) } } + +// BLOCK INPUTS +// ================================================================================================ + +impl From for GetBlockInputsResponse { + fn from(inputs: BlockInputs) -> Self { + let ( + prev_block_header, + chain_mmr, + account_witnesses, + nullifier_witnesses, + unauthenticated_note_proofs, + ) = inputs.into_parts(); + + GetBlockInputsResponse { + latest_block_header: Some(prev_block_header.into()), + account_witnesses: account_witnesses + .into_iter() + .map(|(id, witness)| { + let (initial_state_commitment, proof) = witness.into_parts(); + AccountWitnessRecord { + account_id: id, + initial_state_commitment, + proof, + } + .into() + }) + .collect(), + nullifier_witnesses: nullifier_witnesses + .into_iter() + .map(|(nullifier, witness)| { + let proof = witness.into_proof(); + NullifierWitnessRecord { nullifier, proof }.into() + }) + .collect(), + chain_mmr: chain_mmr.to_bytes(), + unauthenticated_note_proofs: unauthenticated_note_proofs + .iter() + .map(NoteInclusionInBlockProof::from) + .collect(), + } + } +} + +impl TryFrom for BlockInputs { + type Error = ConversionError; + + fn try_from(response: GetBlockInputsResponse) -> Result { + let latest_block_header: BlockHeader = response + .latest_block_header + .ok_or(proto::BlockHeader::missing_field("block_header"))? + .try_into()?; + + let account_witnesses = response + .account_witnesses + .into_iter() + .map(|entry| { + let witness_record: AccountWitnessRecord = entry.try_into()?; + Ok(( + witness_record.account_id, + AccountWitness::new( + witness_record.initial_state_commitment, + witness_record.proof, + ), + )) + }) + .collect::, ConversionError>>()?; + + let nullifier_witnesses = response + .nullifier_witnesses + .into_iter() + .map(|entry| { + let witness: NullifierWitnessRecord = entry.try_into()?; + Ok((witness.nullifier, NullifierWitness::new(witness.proof))) + }) + .collect::, ConversionError>>()?; + + let unauthenticated_note_proofs = response + .unauthenticated_note_proofs + .iter() + .map(<(NoteId, NoteInclusionProof)>::try_from) + .collect::>()?; + + let chain_mmr = ChainMmr::read_from_bytes(&response.chain_mmr) + .map_err(|source| ConversionError::deserialization_error("ChainMmr", source))?; + + Ok(BlockInputs::new( + latest_block_header, + chain_mmr, + account_witnesses, + nullifier_witnesses, + unauthenticated_note_proofs, + )) + } +} diff --git a/crates/proto/src/domain/nullifier.rs b/crates/proto/src/domain/nullifier.rs index 482183a0f..c78fd3da0 100644 --- a/crates/proto/src/domain/nullifier.rs +++ b/crates/proto/src/domain/nullifier.rs @@ -39,36 +39,32 @@ impl TryFrom for Nullifier { // ================================================================================================ #[derive(Clone, Debug)] -pub struct NullifierWitness { +pub struct NullifierWitnessRecord { pub nullifier: Nullifier, pub proof: SmtProof, } -impl TryFrom for NullifierWitness { +impl TryFrom for NullifierWitnessRecord { type Error = ConversionError; fn try_from( - nullifier_input_record: proto::responses::NullifierBlockInputRecord, + nullifier_witness_record: proto::responses::NullifierWitness, ) -> Result { Ok(Self { - nullifier: nullifier_input_record + nullifier: nullifier_witness_record .nullifier - .ok_or(proto::responses::NullifierBlockInputRecord::missing_field(stringify!( - nullifier - )))? + .ok_or(proto::responses::NullifierWitness::missing_field(stringify!(nullifier)))? .try_into()?, - proof: nullifier_input_record + proof: nullifier_witness_record .opening - .ok_or(proto::responses::NullifierBlockInputRecord::missing_field(stringify!( - opening - )))? + .ok_or(proto::responses::NullifierWitness::missing_field(stringify!(opening)))? .try_into()?, }) } } -impl From for proto::responses::NullifierBlockInputRecord { - fn from(value: NullifierWitness) -> Self { +impl From for proto::responses::NullifierWitness { + fn from(value: NullifierWitnessRecord) -> Self { Self { nullifier: Some(value.nullifier.into()), opening: Some(value.proof.into()), diff --git a/crates/proto/src/generated/requests.rs b/crates/proto/src/generated/requests.rs index c8e19bb29..8f18373d4 100644 --- a/crates/proto/src/generated/requests.rs +++ b/crates/proto/src/generated/requests.rs @@ -82,15 +82,26 @@ pub struct SyncNoteRequest { /// Returns data required to prove the next block. #[derive(Clone, PartialEq, ::prost::Message)] pub struct GetBlockInputsRequest { - /// ID of the account against which a transaction is executed. + /// IDs of all accounts updated in the proposed block for which to retrieve account witnesses. #[prost(message, repeated, tag = "1")] pub account_ids: ::prost::alloc::vec::Vec, - /// Set of nullifiers consumed by this transaction. + /// Nullifiers all notes consumed by the block for which to retrieve witnesses. + /// + /// Due to note erasure it will generally not be possible to know the exact set of nullifiers + /// a block will create, unless we pre-execute note erasure. So in practice, this set of + /// nullifiers will be the set of nullifiers of all proven batches in the block, which is a + /// superset of the nullifiers the block may create. + /// + /// However, if it is known that a certain note will be erased, it would not be necessary to + /// provide a nullifier witness for it. #[prost(message, repeated, tag = "2")] pub nullifiers: ::prost::alloc::vec::Vec, - /// Array of note IDs to be checked for existence in the database. + /// Array of note IDs for which to retrieve note inclusion proofs, **if they exist**. #[prost(message, repeated, tag = "3")] pub unauthenticated_notes: ::prost::alloc::vec::Vec, + /// Array of block numbers referenced by all batches in the block. + #[prost(fixed32, repeated, tag = "4")] + pub reference_blocks: ::prost::alloc::vec::Vec, } /// Returns the inputs for a transaction batch. #[derive(Clone, PartialEq, ::prost::Message)] diff --git a/crates/proto/src/generated/responses.rs b/crates/proto/src/generated/responses.rs index c3a8f5f20..35f5bd220 100644 --- a/crates/proto/src/generated/responses.rs +++ b/crates/proto/src/generated/responses.rs @@ -86,24 +86,25 @@ pub struct SyncNoteResponse { } /// An account returned as a response to the `GetBlockInputs`. #[derive(Clone, PartialEq, ::prost::Message)] -pub struct AccountBlockInputRecord { +pub struct AccountWitness { /// The account ID. #[prost(message, optional, tag = "1")] pub account_id: ::core::option::Option, - /// The latest account hash, zero hash if the account doesn't exist. + /// The latest account state commitment used as the initial state of the requested block. + /// This will be the zero digest if the account doesn't exist. #[prost(message, optional, tag = "2")] - pub account_hash: ::core::option::Option, - /// Merkle path to verify the account's inclusion in the MMR. + pub initial_state_commitment: ::core::option::Option, + /// Merkle path to verify the account's inclusion in the account tree. #[prost(message, optional, tag = "3")] pub proof: ::core::option::Option, } /// A nullifier returned as a response to the `GetBlockInputs`. #[derive(Clone, PartialEq, ::prost::Message)] -pub struct NullifierBlockInputRecord { - /// The nullifier ID. +pub struct NullifierWitness { + /// The nullifier. #[prost(message, optional, tag = "1")] pub nullifier: ::core::option::Option, - /// Merkle path to verify the nullifier's inclusion in the MMR. + /// The SMT proof to verify the nullifier's inclusion in the nullifier tree. #[prost(message, optional, tag = "2")] pub opening: ::core::option::Option, } @@ -112,21 +113,24 @@ pub struct NullifierBlockInputRecord { pub struct GetBlockInputsResponse { /// The latest block header. #[prost(message, optional, tag = "1")] - pub block_header: ::core::option::Option, - /// Peaks of the above block's mmr, The `forest` value is equal to the block number. + pub latest_block_header: ::core::option::Option, + /// Proof of each requested unauthenticated note's inclusion in a block, **if it existed in + /// the store**. #[prost(message, repeated, tag = "2")] - pub mmr_peaks: ::prost::alloc::vec::Vec, - /// The hashes of the requested accounts and their authentication paths. - #[prost(message, repeated, tag = "3")] - pub account_states: ::prost::alloc::vec::Vec, - /// The requested nullifiers and their authentication paths. - #[prost(message, repeated, tag = "4")] - pub nullifiers: ::prost::alloc::vec::Vec, - /// The list of requested notes which were found in the database. - #[prost(message, optional, tag = "5")] - pub found_unauthenticated_notes: ::core::option::Option< - super::note::NoteAuthenticationInfo, + pub unauthenticated_note_proofs: ::prost::alloc::vec::Vec< + super::note::NoteInclusionInBlockProof, >, + /// The serialized chain MMR which includes proofs for all blocks referenced by the + /// above note inclusion proofs as well as proofs for inclusion of the requested blocks + /// referenced by the batches in the block. + #[prost(bytes = "vec", tag = "3")] + pub chain_mmr: ::prost::alloc::vec::Vec, + /// The state commitments of the requested accounts and their authentication paths. + #[prost(message, repeated, tag = "4")] + pub account_witnesses: ::prost::alloc::vec::Vec, + /// The requested nullifiers and their authentication paths. + #[prost(message, repeated, tag = "5")] + pub nullifier_witnesses: ::prost::alloc::vec::Vec, } /// Represents the result of getting batch inputs. #[derive(Clone, PartialEq, ::prost::Message)] diff --git a/crates/proto/src/lib.rs b/crates/proto/src/lib.rs index 9290fc739..ca950f020 100644 --- a/crates/proto/src/lib.rs +++ b/crates/proto/src/lib.rs @@ -8,8 +8,8 @@ pub mod generated; // ================================================================================================ pub use domain::{ - account::{AccountInputRecord, AccountState}, + account::{AccountState, AccountWitnessRecord}, convert, - nullifier::NullifierWitness, + nullifier::NullifierWitnessRecord, try_convert, }; diff --git a/crates/rpc-proto/proto/requests.proto b/crates/rpc-proto/proto/requests.proto index f2323c56c..4441ce578 100644 --- a/crates/rpc-proto/proto/requests.proto +++ b/crates/rpc-proto/proto/requests.proto @@ -78,12 +78,25 @@ message SyncNoteRequest { // Returns data required to prove the next block. message GetBlockInputsRequest { - // ID of the account against which a transaction is executed. + // IDs of all accounts updated in the proposed block for which to retrieve account witnesses. repeated account.AccountId account_ids = 1; - // Set of nullifiers consumed by this transaction. + + // Nullifiers all notes consumed by the block for which to retrieve witnesses. + // + // Due to note erasure it will generally not be possible to know the exact set of nullifiers + // a block will create, unless we pre-execute note erasure. So in practice, this set of + // nullifiers will be the set of nullifiers of all proven batches in the block, which is a + // superset of the nullifiers the block may create. + // + // However, if it is known that a certain note will be erased, it would not be necessary to + // provide a nullifier witness for it. repeated digest.Digest nullifiers = 2; - // Array of note IDs to be checked for existence in the database. + + // Array of note IDs for which to retrieve note inclusion proofs, **if they exist**. repeated digest.Digest unauthenticated_notes = 3; + + // Array of block numbers referenced by all batches in the block. + repeated fixed32 reference_blocks = 4; } // Returns the inputs for a transaction batch. diff --git a/crates/rpc-proto/proto/responses.proto b/crates/rpc-proto/proto/responses.proto index f1dfe5f90..3a0d58d24 100644 --- a/crates/rpc-proto/proto/responses.proto +++ b/crates/rpc-proto/proto/responses.proto @@ -90,42 +90,46 @@ message SyncNoteResponse { } // An account returned as a response to the `GetBlockInputs`. -message AccountBlockInputRecord { +message AccountWitness { // The account ID. account.AccountId account_id = 1; - // The latest account hash, zero hash if the account doesn't exist. - digest.Digest account_hash = 2; + // The latest account state commitment used as the initial state of the requested block. + // This will be the zero digest if the account doesn't exist. + digest.Digest initial_state_commitment = 2; - // Merkle path to verify the account's inclusion in the MMR. + // Merkle path to verify the account's inclusion in the account tree. merkle.MerklePath proof = 3; } // A nullifier returned as a response to the `GetBlockInputs`. -message NullifierBlockInputRecord { - // The nullifier ID. +message NullifierWitness { + // The nullifier. digest.Digest nullifier = 1; - // Merkle path to verify the nullifier's inclusion in the MMR. + // The SMT proof to verify the nullifier's inclusion in the nullifier tree. smt.SmtOpening opening = 2; } // Represents the result of getting block inputs. message GetBlockInputsResponse { // The latest block header. - block.BlockHeader block_header = 1; + block.BlockHeader latest_block_header = 1; - // Peaks of the above block's mmr, The `forest` value is equal to the block number. - repeated digest.Digest mmr_peaks = 2; + // Proof of each requested unauthenticated note's inclusion in a block, **if it existed in + // the store**. + repeated note.NoteInclusionInBlockProof unauthenticated_note_proofs = 2; - // The hashes of the requested accounts and their authentication paths. - repeated AccountBlockInputRecord account_states = 3; + // The serialized chain MMR which includes proofs for all blocks referenced by the + // above note inclusion proofs as well as proofs for inclusion of the requested blocks + // referenced by the batches in the block. + bytes chain_mmr = 3; - // The requested nullifiers and their authentication paths. - repeated NullifierBlockInputRecord nullifiers = 4; + // The state commitments of the requested accounts and their authentication paths. + repeated AccountWitness account_witnesses = 4; - // The list of requested notes which were found in the database. - note.NoteAuthenticationInfo found_unauthenticated_notes = 5; + // The requested nullifiers and their authentication paths. + repeated NullifierWitness nullifier_witnesses = 5; } // Represents the result of getting batch inputs. diff --git a/crates/store/src/errors.rs b/crates/store/src/errors.rs index aecf56f33..d9ba0110b 100644 --- a/crates/store/src/errors.rs +++ b/crates/store/src/errors.rs @@ -244,6 +244,10 @@ pub enum GetBlockInputsError { IncorrectChainMmrForestNumber { forest: usize, block_num: BlockNumber }, #[error("note inclusion proof MMR error")] NoteInclusionMmr(#[from] MmrError), + #[error("failed to select note inclusion proofs")] + SelectNoteInclusionProofError(#[source] DatabaseError), + #[error("failed to select block headers")] + SelectBlockHeaderError(#[source] DatabaseError), } impl From for GetBlockInputsError { diff --git a/crates/store/src/server/api.rs b/crates/store/src/server/api.rs index 0064668e6..a509e1723 100644 --- a/crates/store/src/server/api.rs +++ b/crates/store/src/server/api.rs @@ -349,15 +349,16 @@ impl api_server::Api for StoreApi { ) -> Result, Status> { let request = request.into_inner(); - let nullifiers = validate_nullifiers(&request.nullifiers)?; let account_ids = read_account_ids(&request.account_ids)?; + let nullifiers = validate_nullifiers(&request.nullifiers)?; let unauthenticated_notes = validate_notes(&request.unauthenticated_notes)?; + let reference_blocks = read_block_numbers(&request.reference_blocks); let unauthenticated_notes = unauthenticated_notes.into_iter().collect(); self.state - .get_block_inputs(&account_ids, &nullifiers, unauthenticated_notes) + .get_block_inputs(&account_ids, &nullifiers, unauthenticated_notes, reference_blocks) .await - .map(Into::into) + .map(GetBlockInputsResponse::from) .map(Response::new) .map_err(internal_error) } @@ -580,3 +581,8 @@ fn validate_notes(notes: &[generated::digest::Digest]) -> Result, St .collect::>() .map_err(|_| invalid_argument("Digest field is not in the modulus range")) } + +#[instrument(target = COMPONENT, skip_all)] +fn read_block_numbers(block_numbers: &[u32]) -> BTreeSet { + block_numbers.iter().map(|raw_number| BlockNumber::from(*raw_number)).collect() +} diff --git a/crates/store/src/state.rs b/crates/store/src/state.rs index 17ec2cf45..9b1e5e450 100644 --- a/crates/store/src/state.rs +++ b/crates/store/src/state.rs @@ -10,22 +10,18 @@ use std::{ }; use miden_node_proto::{ - convert, domain::{ account::{AccountInfo, AccountProofRequest, StorageMapKeysProof}, batch::BatchInputs, block::BlockInclusionProof, note::NoteAuthenticationInfo, }, - generated::responses::{ - AccountProofsResponse, AccountStateHeader, GetBlockInputsResponse, StorageSlotMapProof, - }, - AccountInputRecord, NullifierWitness, + generated::responses::{AccountProofsResponse, AccountStateHeader, StorageSlotMapProof}, }; use miden_node_utils::formatting::format_array; use miden_objects::{ account::{AccountDelta, AccountHeader, AccountId, StorageSlot}, - block::{BlockHeader, BlockNumber, ProvenBlock}, + block::{AccountWitness, BlockHeader, BlockInputs, BlockNumber, NullifierWitness, ProvenBlock}, crypto::{ hash::rpo::RpoDigest, merkle::{ @@ -58,37 +54,6 @@ use crate::{ // STRUCTURES // ================================================================================================ -/// Information needed from the store to validate and build a block -#[derive(Debug)] -pub struct BlockInputs { - /// Previous block header - pub block_header: BlockHeader, - - /// MMR peaks for the current chain state - pub chain_peaks: MmrPeaks, - - /// The hashes of the requested accounts and their authentication paths - pub account_states: Vec, - - /// The requested nullifiers and their authentication paths - pub nullifiers: Vec, - - /// List of notes found in the store - pub found_unauthenticated_notes: NoteAuthenticationInfo, -} - -impl From for GetBlockInputsResponse { - fn from(value: BlockInputs) -> Self { - Self { - block_header: Some(value.block_header.into()), - mmr_peaks: convert(value.chain_peaks.peaks()), - account_states: convert(value.account_states), - nullifiers: convert(value.nullifiers), - found_unauthenticated_notes: Some(value.found_unauthenticated_notes.into()), - } - } -} - #[derive(Debug)] pub struct TransactionInputs { pub account_hash: RpoDigest, @@ -150,12 +115,14 @@ impl Blockchain { &self.0 } - /// Returns the latest block number and partial mmr. + /// Creates a [`PartialMmr`] at the state of the latest block (i.e. the block's chain root will + /// match the hashed peaks of the returned partial MMR). This MMR will include authentication + /// paths for all blocks in the provided set. pub fn partial_mmr_from_blocks( &self, blocks: &BTreeSet, latest_block_number: BlockNumber, - ) -> Result { + ) -> PartialMmr { // Using latest block as the target forest means we take the state of the MMR one before // the latest block. This is because the latest block will be used as the reference // block of the batch and will be added to the MMR by the batch kernel. @@ -183,7 +150,8 @@ impl Blockchain { .track(block_num, leaf, &path) .expect("filling partial mmr with data from mmr should succeed"); } - Ok(partial_mmr) + + partial_mmr } } @@ -654,7 +622,7 @@ impl State { ( latest_block_num, - inner_state.blockchain.partial_mmr_from_blocks(&blocks, latest_block_num)?, + inner_state.blockchain.partial_mmr_from_blocks(&blocks, latest_block_num), ) }; @@ -783,61 +751,100 @@ impl State { account_ids: &[AccountId], nullifiers: &[Nullifier], unauthenticated_notes: BTreeSet, + reference_blocks: BTreeSet, ) -> Result { + // Get the note inclusion proofs from the DB. + // We do this first so we have to acquire the lock to the state where we have to get the + // note inclusion proof's block inclusion proof. + let unauthenticated_note_proofs = self + .db + .select_note_inclusion_proofs(unauthenticated_notes) + .await + .map_err(GetBlockInputsError::SelectNoteInclusionProofError)?; + + // The set of blocks that the notes are included in. + let note_proof_reference_blocks = + unauthenticated_note_proofs.values().map(|proof| proof.location().block_num()); + + // Collect all blocks we need to prove inclusion for, without duplicates. + let mut blocks = reference_blocks; + blocks.extend(note_proof_reference_blocks); + + // Acquire the lock to the inner state. While we hold the lock, we don't access the DB. let inner = self.inner.read().await; - let latest = self - .db - .select_block_header_by_block_num(None) - .await? - .ok_or(GetBlockInputsError::DbBlockHeaderEmpty)?; - - // sanity check - if inner.blockchain.chain_tip() != latest.block_num() { - return Err(GetBlockInputsError::IncorrectChainMmrForestNumber { - forest: inner.blockchain.chain_tip().as_usize(), - block_num: latest.block_num(), - }); - } + let latest_block_number = inner.latest_block_num(); - // using current block number gets us the peaks of the chain MMR as of one block ago; - // this is done so that latest.chain_root matches the returned peaks - let chain_peaks = - inner.blockchain.peaks_at(latest.block_num().as_usize()).map_err(|error| { - GetBlockInputsError::FailedToGetMmrPeaksForForest { - forest: latest.block_num().as_usize(), - error, - } - })?; - let account_states = account_ids + // The latest block is not yet in the chain MMR, so we can't (and don't need to) prove its + // inclusion in the chain. + blocks.remove(&latest_block_number); + + // Fetch the partial MMR with authentication paths for the set of blocks. + let partial_mmr = inner.blockchain.partial_mmr_from_blocks(&blocks, latest_block_number); + + // Fetch witnesses for all acounts. + let account_witnesses = account_ids .iter() .copied() .map(|account_id| { - let ValuePath { value: account_hash, path: proof } = - inner.account_tree.open(&LeafIndex::new_max_depth(account_id.prefix().into())); - Ok(AccountInputRecord { account_id, account_hash, proof }) + let ValuePath { + value: latest_state_commitment, + path: proof, + } = inner.account_tree.open(&account_id.into()); + (account_id, AccountWitness::new(latest_state_commitment, proof)) }) - .collect::>()?; + .collect::>(); - let nullifiers: Vec = nullifiers + // Fetch witnesses for all nullifiers. We don't check whether the nullifiers are spent or + // not as this is done as part of proposing the block. + let nullifier_witnesses: BTreeMap = nullifiers .iter() + .copied() .map(|nullifier| { - let proof = inner.nullifier_tree.open(nullifier); - - NullifierWitness { nullifier: *nullifier, proof } + let proof = inner.nullifier_tree.open(&nullifier); + (nullifier, NullifierWitness::new(proof)) }) .collect(); - let found_unauthenticated_notes = - self.get_note_authentication_info(unauthenticated_notes).await?; + // Release the lock. + std::mem::drop(inner); - Ok(BlockInputs { - block_header: latest, - chain_peaks, - account_states, - nullifiers, - found_unauthenticated_notes, - }) + // Fetch the block headers for all blocks in the partial MMR plus the latest one which will + // be used as the previous block header of the block being built. + let mut headers = self + .db + .select_block_headers(blocks.into_iter().chain(std::iter::once(latest_block_number))) + .await + .map_err(GetBlockInputsError::SelectBlockHeaderError)?; + + // Find and remove the latest block as we must not add it to the chain MMR, since it is + // not yet in the chain. + let latest_block_header_index = headers + .iter() + .enumerate() + .find_map(|(index, header)| { + (header.block_num() == latest_block_number).then_some(index) + }) + .expect("DB should have returned the header of the latest block header"); + + // The order doesn't matter for ChainMmr::new, so swap remove is fine. + let latest_block_header = headers.swap_remove(latest_block_header_index); + + // SAFETY: This should not error because: + // - we're passing exactly the block headers that we've added to the partial MMR, + // - so none of the block header's block numbers should exceed the chain length of the + // partial MMR, + // - and we've added blocks to a BTreeSet, so there can be no duplicates. + let chain_mmr = ChainMmr::new(partial_mmr, headers) + .expect("partial mmr and block headers should be consistent"); + + Ok(BlockInputs::new( + latest_block_header, + chain_mmr, + account_witnesses, + nullifier_witnesses, + unauthenticated_note_proofs, + )) } /// Returns data needed by the block producer to verify transactions validity. diff --git a/proto/requests.proto b/proto/requests.proto index f2323c56c..4441ce578 100644 --- a/proto/requests.proto +++ b/proto/requests.proto @@ -78,12 +78,25 @@ message SyncNoteRequest { // Returns data required to prove the next block. message GetBlockInputsRequest { - // ID of the account against which a transaction is executed. + // IDs of all accounts updated in the proposed block for which to retrieve account witnesses. repeated account.AccountId account_ids = 1; - // Set of nullifiers consumed by this transaction. + + // Nullifiers all notes consumed by the block for which to retrieve witnesses. + // + // Due to note erasure it will generally not be possible to know the exact set of nullifiers + // a block will create, unless we pre-execute note erasure. So in practice, this set of + // nullifiers will be the set of nullifiers of all proven batches in the block, which is a + // superset of the nullifiers the block may create. + // + // However, if it is known that a certain note will be erased, it would not be necessary to + // provide a nullifier witness for it. repeated digest.Digest nullifiers = 2; - // Array of note IDs to be checked for existence in the database. + + // Array of note IDs for which to retrieve note inclusion proofs, **if they exist**. repeated digest.Digest unauthenticated_notes = 3; + + // Array of block numbers referenced by all batches in the block. + repeated fixed32 reference_blocks = 4; } // Returns the inputs for a transaction batch. diff --git a/proto/responses.proto b/proto/responses.proto index f1dfe5f90..3a0d58d24 100644 --- a/proto/responses.proto +++ b/proto/responses.proto @@ -90,42 +90,46 @@ message SyncNoteResponse { } // An account returned as a response to the `GetBlockInputs`. -message AccountBlockInputRecord { +message AccountWitness { // The account ID. account.AccountId account_id = 1; - // The latest account hash, zero hash if the account doesn't exist. - digest.Digest account_hash = 2; + // The latest account state commitment used as the initial state of the requested block. + // This will be the zero digest if the account doesn't exist. + digest.Digest initial_state_commitment = 2; - // Merkle path to verify the account's inclusion in the MMR. + // Merkle path to verify the account's inclusion in the account tree. merkle.MerklePath proof = 3; } // A nullifier returned as a response to the `GetBlockInputs`. -message NullifierBlockInputRecord { - // The nullifier ID. +message NullifierWitness { + // The nullifier. digest.Digest nullifier = 1; - // Merkle path to verify the nullifier's inclusion in the MMR. + // The SMT proof to verify the nullifier's inclusion in the nullifier tree. smt.SmtOpening opening = 2; } // Represents the result of getting block inputs. message GetBlockInputsResponse { // The latest block header. - block.BlockHeader block_header = 1; + block.BlockHeader latest_block_header = 1; - // Peaks of the above block's mmr, The `forest` value is equal to the block number. - repeated digest.Digest mmr_peaks = 2; + // Proof of each requested unauthenticated note's inclusion in a block, **if it existed in + // the store**. + repeated note.NoteInclusionInBlockProof unauthenticated_note_proofs = 2; - // The hashes of the requested accounts and their authentication paths. - repeated AccountBlockInputRecord account_states = 3; + // The serialized chain MMR which includes proofs for all blocks referenced by the + // above note inclusion proofs as well as proofs for inclusion of the requested blocks + // referenced by the batches in the block. + bytes chain_mmr = 3; - // The requested nullifiers and their authentication paths. - repeated NullifierBlockInputRecord nullifiers = 4; + // The state commitments of the requested accounts and their authentication paths. + repeated AccountWitness account_witnesses = 4; - // The list of requested notes which were found in the database. - note.NoteAuthenticationInfo found_unauthenticated_notes = 5; + // The requested nullifiers and their authentication paths. + repeated NullifierWitness nullifier_witnesses = 5; } // Represents the result of getting batch inputs. From dbaa4f9ca7e60582ac137604f959017b6a9dafd7 Mon Sep 17 00:00:00 2001 From: Philipp Gackstatter Date: Fri, 21 Feb 2025 08:28:59 +0100 Subject: [PATCH 04/32] chore: Use branch that allows empty blocks --- Cargo.lock | 46 ++++++++++++++++---------------- Cargo.toml | 6 ++--- crates/block-producer/Cargo.toml | 4 +-- 3 files changed, 28 insertions(+), 28 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index e2ae127ad..3546e46ca 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -105,9 +105,9 @@ dependencies = [ [[package]] name = "anyhow" -version = "1.0.95" +version = "1.0.96" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "34ac096ce696dc2fcabef30516bb13c0a68a11d30131d3df6f04711467681b04" +checksum = "6b964d184e89d9b6b67dd2715bc8e74cf3107fb2b529990c90cf517326150bf4" [[package]] name = "arrayref" @@ -837,7 +837,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "33d852cb9b869c2a9b3df2f71a3074817f01e1844f839a144f5fcef059a4eb5d" dependencies = [ "libc", - "windows-sys 0.52.0", + "windows-sys 0.59.0", ] [[package]] @@ -1596,9 +1596,9 @@ dependencies = [ [[package]] name = "log" -version = "0.4.25" +version = "0.4.26" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "04cbf5b083de1c7e0222a7a51dbfdba1cbe1c6ab0b15e29fff3f6c077fd9cd9f" +checksum = "30bde2b3dc3671ae49d8e2e9f044c7c005836e7a023ee57cffa25ab82764bb9e" [[package]] name = "logos" @@ -1706,7 +1706,7 @@ dependencies = [ [[package]] name = "miden-block-prover" version = "0.8.0" -source = "git+https://github.com/0xPolygonMiden/miden-base?branch=pgackst-batch-expiration#fa7b5b1728f22d0c1b6264d62d96667002dbdb50" +source = "git+https://github.com/0xPolygonMiden/miden-base?branch=pgackst-empty-blocks#d0f63c00de1dd6c74ea7d9953ff4178b9895a247" dependencies = [ "miden-crypto", "miden-lib", @@ -1795,7 +1795,7 @@ dependencies = [ [[package]] name = "miden-lib" version = "0.8.0" -source = "git+https://github.com/0xPolygonMiden/miden-base?branch=pgackst-batch-expiration#fa7b5b1728f22d0c1b6264d62d96667002dbdb50" +source = "git+https://github.com/0xPolygonMiden/miden-base?branch=pgackst-empty-blocks#d0f63c00de1dd6c74ea7d9953ff4178b9895a247" dependencies = [ "miden-assembly", "miden-objects", @@ -1992,7 +1992,7 @@ dependencies = [ [[package]] name = "miden-objects" version = "0.8.0" -source = "git+https://github.com/0xPolygonMiden/miden-base?branch=pgackst-batch-expiration#fa7b5b1728f22d0c1b6264d62d96667002dbdb50" +source = "git+https://github.com/0xPolygonMiden/miden-base?branch=pgackst-empty-blocks#d0f63c00de1dd6c74ea7d9953ff4178b9895a247" dependencies = [ "getrandom 0.2.15", "miden-assembly", @@ -2051,7 +2051,7 @@ dependencies = [ [[package]] name = "miden-tx" version = "0.8.0" -source = "git+https://github.com/0xPolygonMiden/miden-base?branch=pgackst-batch-expiration#fa7b5b1728f22d0c1b6264d62d96667002dbdb50" +source = "git+https://github.com/0xPolygonMiden/miden-base?branch=pgackst-empty-blocks#d0f63c00de1dd6c74ea7d9953ff4178b9895a247" dependencies = [ "async-trait", "miden-lib", @@ -2068,7 +2068,7 @@ dependencies = [ [[package]] name = "miden-tx-batch-prover" version = "0.8.0" -source = "git+https://github.com/0xPolygonMiden/miden-base?branch=pgackst-batch-expiration#fa7b5b1728f22d0c1b6264d62d96667002dbdb50" +source = "git+https://github.com/0xPolygonMiden/miden-base?branch=pgackst-empty-blocks#d0f63c00de1dd6c74ea7d9953ff4178b9895a247" dependencies = [ "miden-core", "miden-crypto", @@ -2930,7 +2930,7 @@ dependencies = [ "errno", "libc", "linux-raw-sys", - "windows-sys 0.52.0", + "windows-sys 0.59.0", ] [[package]] @@ -3089,18 +3089,18 @@ checksum = "388a1df253eca08550bef6c72392cfe7c30914bf41df5269b68cbd6ff8f570a3" [[package]] name = "serde" -version = "1.0.217" +version = "1.0.218" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "02fc4265df13d6fa1d00ecff087228cc0a2b5f3c0e87e258d8b94a156e984c70" +checksum = "e8dfc9d19bdbf6d17e22319da49161d5d0108e4188e8b680aef6299eed22df60" dependencies = [ "serde_derive", ] [[package]] name = "serde_derive" -version = "1.0.217" +version = "1.0.218" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a9bf7cf98d04a2b28aead066b7496853d4779c9cc183c440dbac457641e19a0" +checksum = "f09503e191f4e797cb8aac08e9a4a4695c5edf6a2e70e376d961ddd5c969f82b" dependencies = [ "proc-macro2", "quote", @@ -3109,9 +3109,9 @@ dependencies = [ [[package]] name = "serde_json" -version = "1.0.138" +version = "1.0.139" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d434192e7da787e94a6ea7e9670b26a036d0ca41e0b7efb2676dd32bae872949" +checksum = "44f86c3acccc9c65b153fe1b85a3be07fe5515274ec9f0653b4a0875731c72a6" dependencies = [ "itoa", "memchr", @@ -3319,9 +3319,9 @@ dependencies = [ [[package]] name = "target-triple" -version = "0.1.3" +version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "42a4d50cdb458045afc8131fd91b64904da29548bcb63c7236e0844936c13078" +checksum = "1ac9aa371f599d22256307c24a9d748c041e548cbf599f35d890f9d365361790" [[package]] name = "tempfile" @@ -3334,7 +3334,7 @@ dependencies = [ "getrandom 0.3.1", "once_cell", "rustix", - "windows-sys 0.52.0", + "windows-sys 0.59.0", ] [[package]] @@ -4143,7 +4143,7 @@ version = "0.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cf221c93e13a30d793f7645a0e7762c55d169dbb0a49671918a2319d289b10bb" dependencies = [ - "windows-sys 0.52.0", + "windows-sys 0.59.0", ] [[package]] @@ -4375,9 +4375,9 @@ checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" [[package]] name = "winnow" -version = "0.7.2" +version = "0.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "59690dea168f2198d1a3b0cac23b8063efcd11012f10ae4698f284808c8ef603" +checksum = "0e7f4ea97f6f78012141bcdb6a216b2609f0979ada50b20ca5b52dde2eac2bb1" dependencies = [ "memchr", ] diff --git a/Cargo.toml b/Cargo.toml index 33694e6ab..bbe3b1295 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -28,17 +28,17 @@ version = "0.8.0" assert_matches = { version = "1.5" } itertools = { version = "0.14" } miden-air = { version = "0.12" } -miden-lib = { git = "https://github.com/0xPolygonMiden/miden-base", branch = "pgackst-batch-expiration" } +miden-lib = { git = "https://github.com/0xPolygonMiden/miden-base", branch = "pgackst-empty-blocks" } miden-node-block-producer = { path = "crates/block-producer", version = "0.8" } miden-node-proto = { path = "crates/proto", version = "0.8" } miden-node-rpc = { path = "crates/rpc", version = "0.8" } miden-node-store = { path = "crates/store", version = "0.8" } miden-node-test-macro = { path = "crates/test-macro" } miden-node-utils = { path = "crates/utils", version = "0.8" } -miden-objects = { git = "https://github.com/0xPolygonMiden/miden-base", branch = "pgackst-batch-expiration" } +miden-objects = { git = "https://github.com/0xPolygonMiden/miden-base", branch = "pgackst-empty-blocks" } miden-processor = { version = "0.12" } miden-stdlib = { version = "0.12", default-features = false } -miden-tx = { git = "https://github.com/0xPolygonMiden/miden-base", branch = "pgackst-batch-expiration" } +miden-tx = { git = "https://github.com/0xPolygonMiden/miden-base", branch = "pgackst-empty-blocks" } prost = { version = "0.13" } rand = { version = "0.8" } thiserror = { version = "2.0", default-features = false } diff --git a/crates/block-producer/Cargo.toml b/crates/block-producer/Cargo.toml index 2b904e44d..886952fbd 100644 --- a/crates/block-producer/Cargo.toml +++ b/crates/block-producer/Cargo.toml @@ -21,7 +21,7 @@ tracing-forest = ["miden-node-utils/tracing-forest"] async-trait = { version = "0.1" } futures = { version = "0.3" } itertools = { workspace = true } -miden-block-prover = { git = "https://github.com/0xPolygonMiden/miden-base.git", branch = "pgackst-batch-expiration" } +miden-block-prover = { git = "https://github.com/0xPolygonMiden/miden-base.git", branch = "pgackst-empty-blocks" } miden-lib = { workspace = true } miden-node-proto = { workspace = true } miden-node-utils = { workspace = true } @@ -29,7 +29,7 @@ miden-objects = { workspace = true } miden-processor = { workspace = true } miden-stdlib = { workspace = true } miden-tx = { workspace = true } -miden-tx-batch-prover = { git = "https://github.com/0xPolygonMiden/miden-base.git", branch = "pgackst-batch-expiration" } +miden-tx-batch-prover = { git = "https://github.com/0xPolygonMiden/miden-base.git", branch = "pgackst-empty-blocks" } rand = { version = "0.8" } serde = { version = "1.0", features = ["derive"] } thiserror = { workspace = true } From 0d1f43c9e02b4cf032724a39a56ac15b1663b5b6 Mon Sep 17 00:00:00 2001 From: Philipp Gackstatter Date: Fri, 21 Feb 2025 08:31:31 +0100 Subject: [PATCH 05/32] chore: Add changelog entry --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 8a3e87b2a..865b8f932 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -16,6 +16,7 @@ - [BREAKING] Updated minimum Rust version to 1.84. - [BREAKING] `Endpoint` configuration simplified to a single string (#654). +- [BREAKING] Update `GetBlockInputs` RPC and use `LocalBlockProver` for block building (#709). ### Enhancements From afdcd6518bdeb47f13c177a8d0209f15f7f71ab2 Mon Sep 17 00:00:00 2001 From: Philipp Gackstatter Date: Fri, 21 Feb 2025 08:40:48 +0100 Subject: [PATCH 06/32] chore: Simplify nullifier witness comment --- crates/block-producer/src/block_builder/mod.rs | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/crates/block-producer/src/block_builder/mod.rs b/crates/block-producer/src/block_builder/mod.rs index acaecf7fe..dade0d42c 100644 --- a/crates/block-producer/src/block_builder/mod.rs +++ b/crates/block-producer/src/block_builder/mod.rs @@ -126,10 +126,10 @@ impl BlockBuilder { /// - Account witnesses for all accounts updated in the block /// - Nullifier witnesses for all nullifiers created in the block /// - Due to note erasure the set of nullifiers the block creates it not necessarily equal to - /// the union of sets of all nullifiers created in proven batches. However, since we don't - /// yet know which nullifiers the block will actually create, we fetch witnesses for all - /// nullifiers created by batches. If we knew that a certain note will be erased, we would - /// not have to supply a nullifier witness for it. + /// the union of all nullifiers created in proven batches. However, since we don't yet know + /// which nullifiers the block will actually create, we fetch witnesses for all nullifiers + /// created by batches. If we knew that a certain note will be erased, we would not have to + /// supply a nullifier witness for it. #[instrument(target = COMPONENT, name = "block_builder.get_block_inputs", skip_all, err)] async fn get_block_inputs( &self, From d7ba7359742458aace1d915b8015e539fa20de09 Mon Sep 17 00:00:00 2001 From: Philipp Gackstatter Date: Fri, 21 Feb 2025 08:43:00 +0100 Subject: [PATCH 07/32] chore: Update `created_nullifiers` naming --- Cargo.lock | 10 +++++----- crates/block-producer/src/block_builder/mod.rs | 2 +- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 3546e46ca..c685b4eb1 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1706,7 +1706,7 @@ dependencies = [ [[package]] name = "miden-block-prover" version = "0.8.0" -source = "git+https://github.com/0xPolygonMiden/miden-base?branch=pgackst-empty-blocks#d0f63c00de1dd6c74ea7d9953ff4178b9895a247" +source = "git+https://github.com/0xPolygonMiden/miden-base?branch=pgackst-empty-blocks#ce76f0f097623d85576cd6fea9739df73134c9fc" dependencies = [ "miden-crypto", "miden-lib", @@ -1795,7 +1795,7 @@ dependencies = [ [[package]] name = "miden-lib" version = "0.8.0" -source = "git+https://github.com/0xPolygonMiden/miden-base?branch=pgackst-empty-blocks#d0f63c00de1dd6c74ea7d9953ff4178b9895a247" +source = "git+https://github.com/0xPolygonMiden/miden-base?branch=pgackst-empty-blocks#ce76f0f097623d85576cd6fea9739df73134c9fc" dependencies = [ "miden-assembly", "miden-objects", @@ -1992,7 +1992,7 @@ dependencies = [ [[package]] name = "miden-objects" version = "0.8.0" -source = "git+https://github.com/0xPolygonMiden/miden-base?branch=pgackst-empty-blocks#d0f63c00de1dd6c74ea7d9953ff4178b9895a247" +source = "git+https://github.com/0xPolygonMiden/miden-base?branch=pgackst-empty-blocks#ce76f0f097623d85576cd6fea9739df73134c9fc" dependencies = [ "getrandom 0.2.15", "miden-assembly", @@ -2051,7 +2051,7 @@ dependencies = [ [[package]] name = "miden-tx" version = "0.8.0" -source = "git+https://github.com/0xPolygonMiden/miden-base?branch=pgackst-empty-blocks#d0f63c00de1dd6c74ea7d9953ff4178b9895a247" +source = "git+https://github.com/0xPolygonMiden/miden-base?branch=pgackst-empty-blocks#ce76f0f097623d85576cd6fea9739df73134c9fc" dependencies = [ "async-trait", "miden-lib", @@ -2068,7 +2068,7 @@ dependencies = [ [[package]] name = "miden-tx-batch-prover" version = "0.8.0" -source = "git+https://github.com/0xPolygonMiden/miden-base?branch=pgackst-empty-blocks#d0f63c00de1dd6c74ea7d9953ff4178b9895a247" +source = "git+https://github.com/0xPolygonMiden/miden-base?branch=pgackst-empty-blocks#ce76f0f097623d85576cd6fea9739df73134c9fc" dependencies = [ "miden-core", "miden-crypto", diff --git a/crates/block-producer/src/block_builder/mod.rs b/crates/block-producer/src/block_builder/mod.rs index dade0d42c..76c85cdb3 100644 --- a/crates/block-producer/src/block_builder/mod.rs +++ b/crates/block-producer/src/block_builder/mod.rs @@ -154,7 +154,7 @@ impl BlockBuilder { }); let block_references_iter = batch_iter.clone().map(ProvenBatch::reference_block_num); let account_ids = batch_iter.clone().flat_map(ProvenBatch::updated_accounts); - let created_nullifiers = batch_iter.flat_map(ProvenBatch::produced_nullifiers); + let created_nullifiers = batch_iter.flat_map(ProvenBatch::created_nullifiers); let inputs = self .store From 8238dea99ef554cfaf916700cf22a3428ff2fc8e Mon Sep 17 00:00:00 2001 From: Philipp Gackstatter Date: Fri, 21 Feb 2025 08:45:08 +0100 Subject: [PATCH 08/32] chore: Use consistent `_iter` naming --- crates/block-producer/src/block_builder/mod.rs | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/crates/block-producer/src/block_builder/mod.rs b/crates/block-producer/src/block_builder/mod.rs index 76c85cdb3..9d7854c49 100644 --- a/crates/block-producer/src/block_builder/mod.rs +++ b/crates/block-producer/src/block_builder/mod.rs @@ -153,14 +153,14 @@ impl BlockBuilder { .filter_map(|note| note.header().map(NoteHeader::id)) }); let block_references_iter = batch_iter.clone().map(ProvenBatch::reference_block_num); - let account_ids = batch_iter.clone().flat_map(ProvenBatch::updated_accounts); - let created_nullifiers = batch_iter.flat_map(ProvenBatch::created_nullifiers); + let account_ids_iter = batch_iter.clone().flat_map(ProvenBatch::updated_accounts); + let created_nullifiers_iter = batch_iter.flat_map(ProvenBatch::created_nullifiers); let inputs = self .store .get_block_inputs( - account_ids, - created_nullifiers, + account_ids_iter, + created_nullifiers_iter, unauthenticated_notes_iter, block_references_iter, ) From 5e3c1a68157ab37e77fee9148cbbfb8bfb8e7b99 Mon Sep 17 00:00:00 2001 From: Philipp Gackstatter Date: Fri, 21 Feb 2025 08:45:48 +0100 Subject: [PATCH 09/32] chore: Rename `preimage` var name --- crates/block-producer/src/block_builder/mod.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/crates/block-producer/src/block_builder/mod.rs b/crates/block-producer/src/block_builder/mod.rs index 9d7854c49..98a1650c0 100644 --- a/crates/block-producer/src/block_builder/mod.rs +++ b/crates/block-producer/src/block_builder/mod.rs @@ -173,9 +173,9 @@ impl BlockBuilder { #[instrument(target = COMPONENT, name = "block_builder.prove_block", skip_all, err)] async fn prove_block( &self, - preimage: BlockBatchesAndInputs, + batches_inputs: BlockBatchesAndInputs, ) -> Result { - let BlockBatchesAndInputs { batches, inputs } = preimage; + let BlockBatchesAndInputs { batches, inputs } = batches_inputs; // Question: Should we split proposing and proving in two stages for telemetry reasons? let proposed_block = From 6e6d72b011c987567201a85a85228e9d80c7e617 Mon Sep 17 00:00:00 2001 From: Philipp Gackstatter Date: Fri, 21 Feb 2025 09:12:23 +0100 Subject: [PATCH 10/32] feat: Check if highest batch block num is greater than latest block --- .../block-producer/src/block_builder/mod.rs | 2 +- crates/proto/src/domain/nullifier.rs | 2 +- crates/store/src/errors.rs | 29 ++++--------------- crates/store/src/state.rs | 12 +++++++- 4 files changed, 18 insertions(+), 27 deletions(-) diff --git a/crates/block-producer/src/block_builder/mod.rs b/crates/block-producer/src/block_builder/mod.rs index 98a1650c0..6d4023224 100644 --- a/crates/block-producer/src/block_builder/mod.rs +++ b/crates/block-producer/src/block_builder/mod.rs @@ -293,7 +293,7 @@ impl BuiltBlock { span.set_attribute("block.protocol.version", i64::from(header.version())); - // Question: Should this be here? (Moved here because output notes are no longer) + // Question: Should this be here? span.set_attribute( "block.output_notes.count", i64::try_from(self.block.output_notes().count()) diff --git a/crates/proto/src/domain/nullifier.rs b/crates/proto/src/domain/nullifier.rs index c78fd3da0..ef19397ca 100644 --- a/crates/proto/src/domain/nullifier.rs +++ b/crates/proto/src/domain/nullifier.rs @@ -35,7 +35,7 @@ impl TryFrom for Nullifier { } } -// NULLIFIER INPUT RECORD +// NULLIFIER WITNESS RECORD // ================================================================================================ #[derive(Clone, Debug)] diff --git a/crates/store/src/errors.rs b/crates/store/src/errors.rs index d9ba0110b..97e58ac16 100644 --- a/crates/store/src/errors.rs +++ b/crates/store/src/errors.rs @@ -41,9 +41,6 @@ pub enum DatabaseError { AccountError(#[from] AccountError), #[error("account delta error")] AccountDeltaError(#[from] AccountDeltaError), - // TODO: Check if needed. - #[error("block error")] - BlockError, #[error("closed channel")] ClosedChannel(#[from] RecvError), #[error("deserialization failed")] @@ -232,31 +229,15 @@ pub enum GetBlockHeaderError { #[derive(Error, Debug)] pub enum GetBlockInputsError { - #[error("account error")] - AccountError(#[from] AccountError), - #[error("database error")] - DatabaseError(#[from] DatabaseError), - #[error("database doesn't have any block header data")] - DbBlockHeaderEmpty, - #[error("failed to get MMR peaks for forest ({forest}): {error}")] - FailedToGetMmrPeaksForForest { forest: usize, error: MmrError }, - #[error("chain MMR forest expected to be 1 less than latest header's block num. Chain MMR forest: {forest}, block num: {block_num}")] - IncorrectChainMmrForestNumber { forest: usize, block_num: BlockNumber }, - #[error("note inclusion proof MMR error")] - NoteInclusionMmr(#[from] MmrError), #[error("failed to select note inclusion proofs")] SelectNoteInclusionProofError(#[source] DatabaseError), #[error("failed to select block headers")] SelectBlockHeaderError(#[source] DatabaseError), -} - -impl From for GetBlockInputsError { - fn from(value: GetNoteAuthenticationInfoError) -> Self { - match value { - GetNoteAuthenticationInfoError::DatabaseError(db_err) => db_err.into(), - GetNoteAuthenticationInfoError::MmrError(mmr_err) => Self::NoteInclusionMmr(mmr_err), - } - } + #[error("highest block number {highest_block_number} referenced by a batch is newer than the latest block {latest_block_number}")] + BatchBlockReferenceNewerThanLatestBlock { + highest_block_number: BlockNumber, + latest_block_number: BlockNumber, + }, } #[derive(Error, Debug)] diff --git a/crates/store/src/state.rs b/crates/store/src/state.rs index 9b1e5e450..7874975f8 100644 --- a/crates/store/src/state.rs +++ b/crates/store/src/state.rs @@ -775,11 +775,21 @@ impl State { let latest_block_number = inner.latest_block_num(); + // If `blocks` is empty, use the latest block number which will never trigger the error. + let highest_block_number = blocks.last().copied().unwrap_or(latest_block_number); + if highest_block_number > latest_block_number { + return Err(GetBlockInputsError::BatchBlockReferenceNewerThanLatestBlock { + highest_block_number, + latest_block_number, + }); + } + // The latest block is not yet in the chain MMR, so we can't (and don't need to) prove its // inclusion in the chain. blocks.remove(&latest_block_number); - // Fetch the partial MMR with authentication paths for the set of blocks. + // Fetch the partial MMR at the state of the latest block with authentication paths for the + // provided set of blocks. let partial_mmr = inner.blockchain.partial_mmr_from_blocks(&blocks, latest_block_number); // Fetch witnesses for all acounts. From 5a4136a2f59bbea46bffba76ac3cc223e2b44e3a Mon Sep 17 00:00:00 2001 From: Philipp Gackstatter Date: Fri, 21 Feb 2025 09:21:59 +0100 Subject: [PATCH 11/32] chore: Simplify genesis and add safety comment --- crates/store/src/genesis.rs | 11 +++++++---- crates/store/src/state.rs | 4 ++-- 2 files changed, 9 insertions(+), 6 deletions(-) diff --git a/crates/store/src/genesis.rs b/crates/store/src/genesis.rs index 90783f503..761c29b15 100644 --- a/crates/store/src/genesis.rs +++ b/crates/store/src/genesis.rs @@ -1,10 +1,10 @@ use miden_lib::transaction::TransactionKernel; use miden_objects::{ account::{delta::AccountUpdateDetails, Account}, - block::{BlockAccountUpdate, BlockHeader, BlockNumber, ProvenBlock}, - crypto::merkle::{EmptySubtreeRoots, MmrPeaks, SimpleSmt, Smt}, + block::{BlockAccountUpdate, BlockHeader, BlockNoteTree, BlockNumber, ProvenBlock}, + crypto::merkle::{MmrPeaks, SimpleSmt, Smt}, utils::serde::{ByteReader, ByteWriter, Deserializable, DeserializationError, Serializable}, - Digest, ACCOUNT_TREE_DEPTH, BLOCK_NOTE_TREE_DEPTH, + Digest, ACCOUNT_TREE_DEPTH, }; use crate::errors::GenesisError; @@ -58,13 +58,16 @@ impl GenesisState { MmrPeaks::new(0, Vec::new()).unwrap().hash_peaks(), account_smt.root(), Smt::default().root(), - *EmptySubtreeRoots::entry(BLOCK_NOTE_TREE_DEPTH, 0), + BlockNoteTree::empty().root(), Digest::default(), TransactionKernel::kernel_root(), Digest::default(), self.timestamp, ); + // SAFETY: Header and accounts should be valid by construction. + // No notes or nullifiers are created at genesis, which is consistent with the above empty + // block note tree root and empty nullifier tree root. Ok(ProvenBlock::new_unchecked(header, accounts, vec![], vec![])) } } diff --git a/crates/store/src/state.rs b/crates/store/src/state.rs index 7874975f8..283a9c085 100644 --- a/crates/store/src/state.rs +++ b/crates/store/src/state.rs @@ -754,8 +754,8 @@ impl State { reference_blocks: BTreeSet, ) -> Result { // Get the note inclusion proofs from the DB. - // We do this first so we have to acquire the lock to the state where we have to get the - // note inclusion proof's block inclusion proof. + // We do this first so we have to acquire the lock to the state just once. There we need the + // reference blocks of the note proofs to get their authentication paths in the chain MMR. let unauthenticated_note_proofs = self .db .select_note_inclusion_proofs(unauthenticated_notes) From d37f955afce7ee34247ab500a57fd4f2ffa21323 Mon Sep 17 00:00:00 2001 From: Philipp Gackstatter Date: Fri, 21 Feb 2025 09:27:24 +0100 Subject: [PATCH 12/32] chore: Remove unused miden-stdlib --- Cargo.lock | 1 - Cargo.toml | 1 - crates/block-producer/Cargo.toml | 1 - 3 files changed, 3 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index c685b4eb1..2a90e3055 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1885,7 +1885,6 @@ dependencies = [ "miden-node-utils", "miden-objects", "miden-processor", - "miden-stdlib", "miden-tx", "miden-tx-batch-prover", "pretty_assertions", diff --git a/Cargo.toml b/Cargo.toml index bbe3b1295..b4c92643a 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -37,7 +37,6 @@ miden-node-test-macro = { path = "crates/test-macro" } miden-node-utils = { path = "crates/utils", version = "0.8" } miden-objects = { git = "https://github.com/0xPolygonMiden/miden-base", branch = "pgackst-empty-blocks" } miden-processor = { version = "0.12" } -miden-stdlib = { version = "0.12", default-features = false } miden-tx = { git = "https://github.com/0xPolygonMiden/miden-base", branch = "pgackst-empty-blocks" } prost = { version = "0.13" } rand = { version = "0.8" } diff --git a/crates/block-producer/Cargo.toml b/crates/block-producer/Cargo.toml index 886952fbd..65abc437d 100644 --- a/crates/block-producer/Cargo.toml +++ b/crates/block-producer/Cargo.toml @@ -27,7 +27,6 @@ miden-node-proto = { workspace = true } miden-node-utils = { workspace = true } miden-objects = { workspace = true } miden-processor = { workspace = true } -miden-stdlib = { workspace = true } miden-tx = { workspace = true } miden-tx-batch-prover = { git = "https://github.com/0xPolygonMiden/miden-base.git", branch = "pgackst-empty-blocks" } rand = { version = "0.8" } From 639a5a9e175eeb0f18b6df5923da1840a4d1a75e Mon Sep 17 00:00:00 2001 From: Philipp Gackstatter Date: Fri, 21 Feb 2025 09:48:29 +0100 Subject: [PATCH 13/32] feat: Remove unused `NoteAuthenticationInfo` --- crates/proto/src/domain/note.rs | 46 +-------------------- crates/proto/src/generated/note.rs | 10 ----- crates/rpc-proto/proto/note.proto | 9 ----- crates/store/src/state.rs | 65 +----------------------------- proto/note.proto | 9 ----- 5 files changed, 3 insertions(+), 136 deletions(-) diff --git a/crates/proto/src/domain/note.rs b/crates/proto/src/domain/note.rs index 14131bc5b..b0c742993 100644 --- a/crates/proto/src/domain/note.rs +++ b/crates/proto/src/domain/note.rs @@ -1,16 +1,10 @@ -use std::collections::{BTreeMap, BTreeSet}; - use miden_objects::{ - note::{NoteExecutionHint, NoteId, NoteInclusionProof, NoteMetadata, NoteTag, NoteType}, - Digest, Felt, + note::{NoteExecutionHint, NoteId, NoteInclusionProof, NoteMetadata, NoteTag, NoteType}, Digest, Felt }; use crate::{ - convert, - domain::block::BlockInclusionProof, errors::{ConversionError, MissingFieldHelper}, generated::note as proto, - try_convert, }; impl TryFrom for NoteMetadata { @@ -89,41 +83,3 @@ impl TryFrom<&proto::NoteInclusionInBlockProof> for (NoteId, NoteInclusionProof) )) } } - -#[derive(Clone, Default, Debug)] -pub struct NoteAuthenticationInfo { - pub block_proofs: Vec, - pub note_proofs: BTreeMap, -} - -impl NoteAuthenticationInfo { - pub fn contains_note(&self, note: &NoteId) -> bool { - self.note_proofs.contains_key(note) - } - - pub fn note_ids(&self) -> BTreeSet { - self.note_proofs.keys().copied().collect() - } -} - -impl From for proto::NoteAuthenticationInfo { - fn from(value: NoteAuthenticationInfo) -> Self { - Self { - note_proofs: convert(&value.note_proofs), - block_proofs: convert(value.block_proofs), - } - } -} - -impl TryFrom for NoteAuthenticationInfo { - type Error = ConversionError; - - fn try_from(value: proto::NoteAuthenticationInfo) -> Result { - let result = Self { - block_proofs: try_convert(value.block_proofs)?, - note_proofs: try_convert(&value.note_proofs)?, - }; - - Ok(result) - } -} diff --git a/crates/proto/src/generated/note.rs b/crates/proto/src/generated/note.rs index 77a9bae41..293eaddab 100644 --- a/crates/proto/src/generated/note.rs +++ b/crates/proto/src/generated/note.rs @@ -78,13 +78,3 @@ pub struct NoteSyncRecord { #[prost(message, optional, tag = "4")] pub merkle_path: ::core::option::Option, } -/// Represents proof of notes inclusion in the block(s) and block(s) inclusion in the chain. -#[derive(Clone, PartialEq, ::prost::Message)] -pub struct NoteAuthenticationInfo { - /// Proof of each note's inclusion in a block. - #[prost(message, repeated, tag = "1")] - pub note_proofs: ::prost::alloc::vec::Vec, - /// Proof of each block's inclusion in the chain. - #[prost(message, repeated, tag = "2")] - pub block_proofs: ::prost::alloc::vec::Vec, -} diff --git a/crates/rpc-proto/proto/note.proto b/crates/rpc-proto/proto/note.proto index 9acfbd847..4a7a69703 100644 --- a/crates/rpc-proto/proto/note.proto +++ b/crates/rpc-proto/proto/note.proto @@ -80,12 +80,3 @@ message NoteSyncRecord { // The note's inclusion proof in the block. merkle.MerklePath merkle_path = 4; } - -// Represents proof of notes inclusion in the block(s) and block(s) inclusion in the chain. -message NoteAuthenticationInfo { - // Proof of each note's inclusion in a block. - repeated note.NoteInclusionInBlockProof note_proofs = 1; - - // Proof of each block's inclusion in the chain. - repeated block.BlockInclusionProof block_proofs = 2; -} diff --git a/crates/store/src/state.rs b/crates/store/src/state.rs index 283a9c085..8bca054f4 100644 --- a/crates/store/src/state.rs +++ b/crates/store/src/state.rs @@ -13,8 +13,6 @@ use miden_node_proto::{ domain::{ account::{AccountInfo, AccountProofRequest, StorageMapKeysProof}, batch::BatchInputs, - block::BlockInclusionProof, - note::NoteAuthenticationInfo, }, generated::responses::{AccountProofsResponse, AccountStateHeader, StorageSlotMapProof}, }; @@ -45,8 +43,8 @@ use crate::{ db::{Db, NoteRecord, NoteSyncUpdate, NullifierInfo, StateSyncUpdate}, errors::{ ApplyBlockError, DatabaseError, GetBatchInputsError, GetBlockHeaderError, - GetBlockInputsError, GetNoteAuthenticationInfoError, InvalidBlockError, NoteSyncError, - StateInitializationError, StateSyncError, + GetBlockInputsError, InvalidBlockError, NoteSyncError, StateInitializationError, + StateSyncError, }, nullifier_tree::NullifierTree, COMPONENT, @@ -495,65 +493,6 @@ impl State { self.db.select_notes_by_id(note_ids).await } - /// Queries all the note inclusion proofs matching a certain Note IDs from the database. - pub async fn get_note_authentication_info( - &self, - note_ids: BTreeSet, - ) -> Result { - // First we grab note inclusion proofs for the known notes. These proofs only - // prove that the note was included in a given block. We then also need to prove that - // each of those blocks is included in the chain. - let note_proofs = self.db.select_note_inclusion_proofs(note_ids).await?; - - // The set of blocks that the notes are included in. - let blocks = note_proofs - .values() - .map(|proof| proof.location().block_num()) - .collect::>(); - - // Grab the block merkle paths from the inner state. - // - // NOTE: Scoped block to automatically drop the mutex guard asap. - // - // We also avoid accessing the db in the block as this would delay - // dropping the guard. - let (chain_length, merkle_paths) = { - let state = self.inner.read().await; - let chain_length = state.blockchain.chain_length().as_usize(); - - let paths = blocks - .iter() - .map(|&block_num| { - let proof = state.blockchain.open(block_num.as_usize())?.merkle_path; - - Ok::<_, MmrError>((block_num, proof)) - }) - .collect::, MmrError>>()?; - - let chain_length = u32::try_from(chain_length) - .expect("Forest is a chain length so should fit into a u32"); - - (chain_length.into(), paths) - }; - - let headers = self.db.select_block_headers(blocks.into_iter()).await?; - - let headers = headers - .into_iter() - .map(|header| (header.block_num(), header)) - .collect::>(); - - let mut block_proofs = Vec::with_capacity(merkle_paths.len()); - for (block_num, mmr_path) in merkle_paths { - let block_header = - *headers.get(&block_num).ok_or(DatabaseError::BlockNotFoundInDb(block_num))?; - - block_proofs.push(BlockInclusionProof { block_header, mmr_path, chain_length }); - } - - Ok(NoteAuthenticationInfo { block_proofs, note_proofs }) - } - /// Fetches the inputs for a transaction batch from the database. /// /// ## Inputs diff --git a/proto/note.proto b/proto/note.proto index 9acfbd847..4a7a69703 100644 --- a/proto/note.proto +++ b/proto/note.proto @@ -80,12 +80,3 @@ message NoteSyncRecord { // The note's inclusion proof in the block. merkle.MerklePath merkle_path = 4; } - -// Represents proof of notes inclusion in the block(s) and block(s) inclusion in the chain. -message NoteAuthenticationInfo { - // Proof of each note's inclusion in a block. - repeated note.NoteInclusionInBlockProof note_proofs = 1; - - // Proof of each block's inclusion in the chain. - repeated block.BlockInclusionProof block_proofs = 2; -} From 96479700662e18bd301ae727725ccdba146315b1 Mon Sep 17 00:00:00 2001 From: Philipp Gackstatter Date: Fri, 21 Feb 2025 09:50:56 +0100 Subject: [PATCH 14/32] feat: Remove unused `BlockInclusionProof` --- crates/proto/src/domain/block.rs | 41 +---------------------------- crates/proto/src/domain/note.rs | 3 ++- crates/proto/src/generated/block.rs | 13 --------- crates/rpc-proto/proto/block.proto | 12 --------- proto/block.proto | 12 --------- 5 files changed, 3 insertions(+), 78 deletions(-) diff --git a/crates/proto/src/domain/block.rs b/crates/proto/src/domain/block.rs index 43b7f2f1a..ff19518bb 100644 --- a/crates/proto/src/domain/block.rs +++ b/crates/proto/src/domain/block.rs @@ -1,8 +1,7 @@ use std::collections::BTreeMap; use miden_objects::{ - block::{AccountWitness, BlockHeader, BlockInputs, BlockNumber, NullifierWitness}, - crypto::merkle::MerklePath, + block::{AccountWitness, BlockHeader, BlockInputs, NullifierWitness}, note::{NoteId, NoteInclusionProof}, transaction::ChainMmr, utils::{Deserializable, Serializable}, @@ -95,44 +94,6 @@ impl TryFrom for BlockHeader { } } -/// Data required to verify a block's inclusion proof. -#[derive(Clone, Debug)] -pub struct BlockInclusionProof { - pub block_header: BlockHeader, - pub mmr_path: MerklePath, - pub chain_length: BlockNumber, -} - -impl From for proto::BlockInclusionProof { - fn from(value: BlockInclusionProof) -> Self { - Self { - block_header: Some(value.block_header.into()), - mmr_path: Some((&value.mmr_path).into()), - chain_length: value.chain_length.as_u32(), - } - } -} - -impl TryFrom for BlockInclusionProof { - type Error = ConversionError; - - fn try_from(value: proto::BlockInclusionProof) -> Result { - let result = Self { - block_header: value - .block_header - .ok_or(proto::BlockInclusionProof::missing_field("block_header"))? - .try_into()?, - mmr_path: (&value - .mmr_path - .ok_or(proto::BlockInclusionProof::missing_field("mmr_path"))?) - .try_into()?, - chain_length: value.chain_length.into(), - }; - - Ok(result) - } -} - // BLOCK INPUTS // ================================================================================================ diff --git a/crates/proto/src/domain/note.rs b/crates/proto/src/domain/note.rs index b0c742993..6c3b7f165 100644 --- a/crates/proto/src/domain/note.rs +++ b/crates/proto/src/domain/note.rs @@ -1,5 +1,6 @@ use miden_objects::{ - note::{NoteExecutionHint, NoteId, NoteInclusionProof, NoteMetadata, NoteTag, NoteType}, Digest, Felt + note::{NoteExecutionHint, NoteId, NoteInclusionProof, NoteMetadata, NoteTag, NoteType}, + Digest, Felt, }; use crate::{ diff --git a/crates/proto/src/generated/block.rs b/crates/proto/src/generated/block.rs index 915dce7ec..1dd74f21c 100644 --- a/crates/proto/src/generated/block.rs +++ b/crates/proto/src/generated/block.rs @@ -36,16 +36,3 @@ pub struct BlockHeader { #[prost(fixed32, tag = "11")] pub timestamp: u32, } -/// Represents a block inclusion proof. -#[derive(Clone, PartialEq, ::prost::Message)] -pub struct BlockInclusionProof { - /// Block header associated with the inclusion proof. - #[prost(message, optional, tag = "1")] - pub block_header: ::core::option::Option, - /// Merkle path associated with the inclusion proof. - #[prost(message, optional, tag = "2")] - pub mmr_path: ::core::option::Option, - /// The chain length associated with `mmr_path`. - #[prost(fixed32, tag = "3")] - pub chain_length: u32, -} diff --git a/crates/rpc-proto/proto/block.proto b/crates/rpc-proto/proto/block.proto index f9a41a99c..74229793c 100644 --- a/crates/rpc-proto/proto/block.proto +++ b/crates/rpc-proto/proto/block.proto @@ -39,15 +39,3 @@ message BlockHeader { // The time when the block was created. fixed32 timestamp = 11; } - -// Represents a block inclusion proof. -message BlockInclusionProof { - // Block header associated with the inclusion proof. - BlockHeader block_header = 1; - - // Merkle path associated with the inclusion proof. - merkle.MerklePath mmr_path = 2; - - // The chain length associated with `mmr_path`. - fixed32 chain_length = 3; -} diff --git a/proto/block.proto b/proto/block.proto index f9a41a99c..74229793c 100644 --- a/proto/block.proto +++ b/proto/block.proto @@ -39,15 +39,3 @@ message BlockHeader { // The time when the block was created. fixed32 timestamp = 11; } - -// Represents a block inclusion proof. -message BlockInclusionProof { - // Block header associated with the inclusion proof. - BlockHeader block_header = 1; - - // Merkle path associated with the inclusion proof. - merkle.MerklePath mmr_path = 2; - - // The chain length associated with `mmr_path`. - fixed32 chain_length = 3; -} From 02c46cac1a584d0a59b04e4da922cd693daf3ad1 Mon Sep 17 00:00:00 2001 From: Philipp Gackstatter Date: Fri, 21 Feb 2025 09:57:05 +0100 Subject: [PATCH 15/32] chore: Remove unused error --- crates/store/src/errors.rs | 13 +------------ 1 file changed, 1 insertion(+), 12 deletions(-) diff --git a/crates/store/src/errors.rs b/crates/store/src/errors.rs index 97e58ac16..6abd4afa4 100644 --- a/crates/store/src/errors.rs +++ b/crates/store/src/errors.rs @@ -73,8 +73,6 @@ pub enum DatabaseError { AccountsNotFoundInDb(Vec), #[error("account {0} is not on the chain")] AccountNotPublic(AccountId), - #[error("block {0} not found")] - BlockNotFoundInDb(BlockNumber), #[error("data corrupted: {0}")] DataCorrupted(String), #[error("SQLite pool interaction failed: {0}")] @@ -93,8 +91,7 @@ impl From for Status { match err { DatabaseError::AccountNotFoundInDb(_) | DatabaseError::AccountsNotFoundInDb(_) - | DatabaseError::AccountNotPublic(_) - | DatabaseError::BlockNotFoundInDb(_) => Status::not_found(err.to_string()), + | DatabaseError::AccountNotPublic(_) => Status::not_found(err.to_string()), _ => Status::internal(err.to_string()), } @@ -260,14 +257,6 @@ pub enum NoteSyncError { MmrError(#[from] MmrError), } -#[derive(Error, Debug)] -pub enum GetNoteAuthenticationInfoError { - #[error("database error")] - DatabaseError(#[from] DatabaseError), - #[error("Mmr error")] - MmrError(#[from] MmrError), -} - #[derive(Error, Debug)] pub enum GetBatchInputsError { #[error("failed to select note inclusion proofs")] From 574fa81238e620fad22807de43f3fefc59246445 Mon Sep 17 00:00:00 2001 From: Philipp Gackstatter Date: Fri, 21 Feb 2025 14:28:09 +0100 Subject: [PATCH 16/32] chore: Rename unknown block ref error variants --- crates/store/src/errors.rs | 4 ++-- crates/store/src/state.rs | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/crates/store/src/errors.rs b/crates/store/src/errors.rs index 6abd4afa4..ebca9f753 100644 --- a/crates/store/src/errors.rs +++ b/crates/store/src/errors.rs @@ -231,7 +231,7 @@ pub enum GetBlockInputsError { #[error("failed to select block headers")] SelectBlockHeaderError(#[source] DatabaseError), #[error("highest block number {highest_block_number} referenced by a batch is newer than the latest block {latest_block_number}")] - BatchBlockReferenceNewerThanLatestBlock { + UnknownBatchBlockReference { highest_block_number: BlockNumber, latest_block_number: BlockNumber, }, @@ -266,7 +266,7 @@ pub enum GetBatchInputsError { #[error("set of blocks refernced by transactions is empty")] TransactionBlockReferencesEmpty, #[error("highest block number {highest_block_num} referenced by a transaction is newer than the latest block {latest_block_num}")] - TransactionBlockReferenceNewerThanLatestBlock { + UnknownTransactionBlockReference { highest_block_num: BlockNumber, latest_block_num: BlockNumber, }, diff --git a/crates/store/src/state.rs b/crates/store/src/state.rs index 8bca054f4..dac16cca7 100644 --- a/crates/store/src/state.rs +++ b/crates/store/src/state.rs @@ -548,7 +548,7 @@ impl State { let highest_block_num = *blocks.last().expect("we should have checked for empty block references"); if highest_block_num > latest_block_num { - return Err(GetBatchInputsError::TransactionBlockReferenceNewerThanLatestBlock { + return Err(GetBatchInputsError::UnknownTransactionBlockReference { highest_block_num, latest_block_num, }); @@ -717,7 +717,7 @@ impl State { // If `blocks` is empty, use the latest block number which will never trigger the error. let highest_block_number = blocks.last().copied().unwrap_or(latest_block_number); if highest_block_number > latest_block_number { - return Err(GetBlockInputsError::BatchBlockReferenceNewerThanLatestBlock { + return Err(GetBlockInputsError::UnknownBatchBlockReference { highest_block_number, latest_block_number, }); From 05baf32dbcb68198e3456a90e36c83068ce55e5f Mon Sep 17 00:00:00 2001 From: Philipp Gackstatter Date: Fri, 21 Feb 2025 14:38:37 +0100 Subject: [PATCH 17/32] chore: Define empty tree / vec next to each other --- crates/store/src/genesis.rs | 18 +++++++++++++++--- 1 file changed, 15 insertions(+), 3 deletions(-) diff --git a/crates/store/src/genesis.rs b/crates/store/src/genesis.rs index 761c29b15..d78990734 100644 --- a/crates/store/src/genesis.rs +++ b/crates/store/src/genesis.rs @@ -3,6 +3,7 @@ use miden_objects::{ account::{delta::AccountUpdateDetails, Account}, block::{BlockAccountUpdate, BlockHeader, BlockNoteTree, BlockNumber, ProvenBlock}, crypto::merkle::{MmrPeaks, SimpleSmt, Smt}, + note::Nullifier, utils::serde::{ByteReader, ByteWriter, Deserializable, DeserializationError, Serializable}, Digest, ACCOUNT_TREE_DEPTH, }; @@ -51,14 +52,20 @@ impl GenesisState { (update.account_id().prefix().into(), update.final_state_commitment().into()) }))?; + let empty_nullifiers: Vec = Vec::new(); + let empty_nullifier_tree = Smt::new(); + + let empty_output_notes = Vec::new(); + let empty_block_note_tree = BlockNoteTree::empty(); + let header = BlockHeader::new( self.version, Digest::default(), BlockNumber::GENESIS, MmrPeaks::new(0, Vec::new()).unwrap().hash_peaks(), account_smt.root(), - Smt::default().root(), - BlockNoteTree::empty().root(), + empty_nullifier_tree.root(), + empty_block_note_tree.root(), Digest::default(), TransactionKernel::kernel_root(), Digest::default(), @@ -68,7 +75,12 @@ impl GenesisState { // SAFETY: Header and accounts should be valid by construction. // No notes or nullifiers are created at genesis, which is consistent with the above empty // block note tree root and empty nullifier tree root. - Ok(ProvenBlock::new_unchecked(header, accounts, vec![], vec![])) + Ok(ProvenBlock::new_unchecked( + header, + accounts, + empty_output_notes, + empty_nullifiers, + )) } } From 566bdc0d01c8959bf671235cf1c4f8e514ed7771 Mon Sep 17 00:00:00 2001 From: Philipp Gackstatter Date: Fri, 21 Feb 2025 14:39:23 +0100 Subject: [PATCH 18/32] chore: Add changelog --- CHANGELOG.md | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 865b8f932..40808d400 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -11,12 +11,13 @@ - Add an optional open-telemetry trace exporter (#659, #690). - Support tracing across gRPC boundaries using remote tracing context (#669). - Instrument the block-producer's block building process (#676). +- Use `LocalBlockProver` for block building (#709). ### Changes - [BREAKING] Updated minimum Rust version to 1.84. - [BREAKING] `Endpoint` configuration simplified to a single string (#654). -- [BREAKING] Update `GetBlockInputs` RPC and use `LocalBlockProver` for block building (#709). +- [BREAKING] Update `GetBlockInputs` RPC (#709). ### Enhancements From 25435d3b861248f23d23dc416adae6236bd698c2 Mon Sep 17 00:00:00 2001 From: Philipp Gackstatter Date: Fri, 21 Feb 2025 14:41:01 +0100 Subject: [PATCH 19/32] chore: Remove unused test util code --- crates/block-producer/src/test_utils/block.rs | 26 -------- crates/block-producer/src/test_utils/store.rs | 63 ------------------- 2 files changed, 89 deletions(-) diff --git a/crates/block-producer/src/test_utils/block.rs b/crates/block-producer/src/test_utils/block.rs index 6293f5428..782de1ad3 100644 --- a/crates/block-producer/src/test_utils/block.rs +++ b/crates/block-producer/src/test_utils/block.rs @@ -68,32 +68,6 @@ pub async fn build_expected_block_header( ) } -// Note: Commented as it is unused atm. Is it worth fixing it? -// /// Builds the "actual" block header; i.e. the block header built using the Miden VM, used in the -// /// node -// pub async fn build_actual_block_header( -// store: &MockStoreSuccess, -// batches: Vec, -// ) -> BlockHeader { -// let updated_accounts: Vec<_> = -// batches.iter().flat_map(|batch| batch.account_updates().iter()).collect(); -// let produced_nullifiers: Vec = -// batches.iter().flat_map(ProvenBatch::produced_nullifiers).collect(); - -// let block_inputs_from_store: BlockInputs = store -// .get_block_inputs( -// updated_accounts.iter().map(|(&account_id, _)| account_id), -// produced_nullifiers.iter(), -// iter::empty(), -// ) -// .await -// .unwrap(); - -// let (block_witness, _) = BlockWitness::new(block_inputs_from_store, &batches).unwrap(); - -// BlockProver::new().prove(block_witness).unwrap() -// } - #[derive(Debug)] pub struct MockBlockBuilder { store_accounts: SimpleSmt, diff --git a/crates/block-producer/src/test_utils/store.rs b/crates/block-producer/src/test_utils/store.rs index edf7cff4b..4d0507e33 100644 --- a/crates/block-producer/src/test_utils/store.rs +++ b/crates/block-producer/src/test_utils/store.rs @@ -290,67 +290,4 @@ impl MockStoreSuccess { current_block_height: 0.into(), }) } - - // Note: Commented as it is unused atm. Is it worth fixing it? - // pub async fn get_block_inputs( - // &self, - // updated_accounts: impl Iterator + Send, - // produced_nullifiers: impl Iterator + Send, - // notes: impl Iterator + Send, - // ) -> Result { - // let locked_accounts = self.accounts.read().await; - // let locked_produced_nullifiers = self.produced_nullifiers.read().await; - - // let chain_peaks = { - // let locked_chain_mmr = self.chain_mmr.read().await; - // locked_chain_mmr.peaks() - // }; - - // let accounts = { - // updated_accounts - // .map(|account_id| { - // let ValuePath { value: hash, path: proof } = - // locked_accounts.open(&account_id.into()); - - // (account_id, AccountWitness { hash, proof }) - // }) - // .collect() - // }; - - // let nullifiers = produced_nullifiers - // .map(|nullifier| (*nullifier, locked_produced_nullifiers.open(&nullifier.inner()))) - // .collect(); - - // let locked_notes = self.notes.read().await; - // let note_proofs = notes - // .filter_map(|id| locked_notes.get(id).map(|proof| (*id, proof.clone()))) - // .collect::>(); - - // let locked_headers = self.block_headers.read().await; - // let latest_header = - // *locked_headers.iter().max_by_key(|(block_num, _)| *block_num).unwrap().1; - - // let locked_chain_mmr = self.chain_mmr.read().await; - // let chain_length = latest_header.block_num(); - // let block_proofs = note_proofs - // .values() - // .map(|note_proof| { - // let block_num = note_proof.location().block_num(); - // let block_header = *locked_headers.get(&block_num).unwrap(); - // let mmr_path = locked_chain_mmr.open(block_num.as_usize()).unwrap().merkle_path; - - // BlockInclusionProof { block_header, mmr_path, chain_length } - // }) - // .collect(); - - // let found_unauthenticated_notes = NoteAuthenticationInfo { block_proofs, note_proofs }; - - // Ok(BlockInputs { - // block_header: latest_header, - // chain_peaks, - // accounts, - // nullifiers, - // found_unauthenticated_notes, - // }) - // } } From 7c3b77dd83dabc6a8f9e47aadaa699a34e9b5e1b Mon Sep 17 00:00:00 2001 From: Philipp Gackstatter Date: Fri, 21 Feb 2025 14:41:51 +0100 Subject: [PATCH 20/32] chore: Annotate set type --- crates/store/src/state.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/store/src/state.rs b/crates/store/src/state.rs index dac16cca7..7f507993b 100644 --- a/crates/store/src/state.rs +++ b/crates/store/src/state.rs @@ -535,7 +535,7 @@ impl State { // Collect all blocks we need to query without duplicates, which is: // - all blocks for which we need to prove note inclusion. // - all blocks referenced by transactions in the batch. - let mut blocks = tx_reference_blocks; + let mut blocks: BTreeSet = tx_reference_blocks; blocks.extend(note_blocks); // Scoped block to automatically drop the read lock guard as soon as we're done. From c48bf3c6cf6f5799a2c49b7d7bc8e5a000885566 Mon Sep 17 00:00:00 2001 From: Philipp Gackstatter Date: Fri, 21 Feb 2025 16:01:34 +0100 Subject: [PATCH 21/32] feat: Move witness fetching to separate method --- crates/store/src/server/api.rs | 2 +- crates/store/src/state.rs | 108 ++++++++++++++++++++------------- 2 files changed, 66 insertions(+), 44 deletions(-) diff --git a/crates/store/src/server/api.rs b/crates/store/src/server/api.rs index a509e1723..d1993a5b5 100644 --- a/crates/store/src/server/api.rs +++ b/crates/store/src/server/api.rs @@ -356,7 +356,7 @@ impl api_server::Api for StoreApi { let unauthenticated_notes = unauthenticated_notes.into_iter().collect(); self.state - .get_block_inputs(&account_ids, &nullifiers, unauthenticated_notes, reference_blocks) + .get_block_inputs(account_ids, nullifiers, unauthenticated_notes, reference_blocks) .await .map(GetBlockInputsResponse::from) .map(Response::new) diff --git a/crates/store/src/state.rs b/crates/store/src/state.rs index 7f507993b..388c7f475 100644 --- a/crates/store/src/state.rs +++ b/crates/store/src/state.rs @@ -687,8 +687,8 @@ impl State { /// Returns data needed by the block producer to construct and prove the next block. pub async fn get_block_inputs( &self, - account_ids: &[AccountId], - nullifiers: &[Nullifier], + account_ids: Vec, + nullifiers: Vec, unauthenticated_notes: BTreeSet, reference_blocks: BTreeSet, ) -> Result { @@ -709,7 +709,67 @@ impl State { let mut blocks = reference_blocks; blocks.extend(note_proof_reference_blocks); - // Acquire the lock to the inner state. While we hold the lock, we don't access the DB. + let (latest_block_number, account_witnesses, nullifier_witnesses, partial_mmr) = + self.get_block_inputs_witnesses(&mut blocks, account_ids, nullifiers).await?; + + // Fetch the block headers for all blocks in the partial MMR plus the latest one which will + // be used as the previous block header of the block being built. + let mut headers = self + .db + .select_block_headers(blocks.into_iter().chain(std::iter::once(latest_block_number))) + .await + .map_err(GetBlockInputsError::SelectBlockHeaderError)?; + + // Find and remove the latest block as we must not add it to the chain MMR, since it is + // not yet in the chain. + let latest_block_header_index = headers + .iter() + .enumerate() + .find_map(|(index, header)| { + (header.block_num() == latest_block_number).then_some(index) + }) + .expect("DB should have returned the header of the latest block header"); + + // The order doesn't matter for ChainMmr::new, so swap remove is fine. + let latest_block_header = headers.swap_remove(latest_block_header_index); + + // SAFETY: This should not error because: + // - we're passing exactly the block headers that we've added to the partial MMR, + // - so none of the block header's block numbers should exceed the chain length of the + // partial MMR, + // - and we've added blocks to a BTreeSet, so there can be no duplicates. + let chain_mmr = ChainMmr::new(partial_mmr, headers) + .expect("partial mmr and block headers should be consistent"); + + Ok(BlockInputs::new( + latest_block_header, + chain_mmr, + account_witnesses, + nullifier_witnesses, + unauthenticated_note_proofs, + )) + } + + /// Get account and nullifier witnesses for the requested account IDs and nullifier as well as + /// the [`PartialMmr`] for the given blocks. The MMR won't contain the latest block and its + /// number is removed from `blocks` and returned separately. + /// + /// This method acquires the lock to the inner state and does not access the DB so we release + /// the lock asap. + async fn get_block_inputs_witnesses( + &self, + blocks: &mut BTreeSet, + account_ids: Vec, + nullifiers: Vec, + ) -> Result< + ( + BlockNumber, + BTreeMap, + BTreeMap, + PartialMmr, + ), + GetBlockInputsError, + > { let inner = self.inner.read().await; let latest_block_number = inner.latest_block_num(); @@ -729,7 +789,7 @@ impl State { // Fetch the partial MMR at the state of the latest block with authentication paths for the // provided set of blocks. - let partial_mmr = inner.blockchain.partial_mmr_from_blocks(&blocks, latest_block_number); + let partial_mmr = inner.blockchain.partial_mmr_from_blocks(blocks, latest_block_number); // Fetch witnesses for all acounts. let account_witnesses = account_ids @@ -755,45 +815,7 @@ impl State { }) .collect(); - // Release the lock. - std::mem::drop(inner); - - // Fetch the block headers for all blocks in the partial MMR plus the latest one which will - // be used as the previous block header of the block being built. - let mut headers = self - .db - .select_block_headers(blocks.into_iter().chain(std::iter::once(latest_block_number))) - .await - .map_err(GetBlockInputsError::SelectBlockHeaderError)?; - - // Find and remove the latest block as we must not add it to the chain MMR, since it is - // not yet in the chain. - let latest_block_header_index = headers - .iter() - .enumerate() - .find_map(|(index, header)| { - (header.block_num() == latest_block_number).then_some(index) - }) - .expect("DB should have returned the header of the latest block header"); - - // The order doesn't matter for ChainMmr::new, so swap remove is fine. - let latest_block_header = headers.swap_remove(latest_block_header_index); - - // SAFETY: This should not error because: - // - we're passing exactly the block headers that we've added to the partial MMR, - // - so none of the block header's block numbers should exceed the chain length of the - // partial MMR, - // - and we've added blocks to a BTreeSet, so there can be no duplicates. - let chain_mmr = ChainMmr::new(partial_mmr, headers) - .expect("partial mmr and block headers should be consistent"); - - Ok(BlockInputs::new( - latest_block_header, - chain_mmr, - account_witnesses, - nullifier_witnesses, - unauthenticated_note_proofs, - )) + Ok((latest_block_number, account_witnesses, nullifier_witnesses, partial_mmr)) } /// Returns data needed by the block producer to verify transactions validity. From 8d5bf5b3405372495b0e29c3ee812b11165c9641 Mon Sep 17 00:00:00 2001 From: Philipp Gackstatter Date: Fri, 21 Feb 2025 16:28:07 +0100 Subject: [PATCH 22/32] feat: Use telemetry injection ext trait and calc erased notes --- .../block-producer/src/block_builder/mod.rs | 89 +++++++++++++------ 1 file changed, 63 insertions(+), 26 deletions(-) diff --git a/crates/block-producer/src/block_builder/mod.rs b/crates/block-producer/src/block_builder/mod.rs index 6d4023224..f2ecf6573 100644 --- a/crates/block-producer/src/block_builder/mod.rs +++ b/crates/block-producer/src/block_builder/mod.rs @@ -94,8 +94,10 @@ impl BlockBuilder { .inspect(SelectedBlock::inject_telemetry) .then(|selected| self.get_block_inputs(selected)) .inspect_ok(BlockBatchesAndInputs::inject_telemetry) + .and_then(|inputs| self.propose_block(inputs)) + .inspect_ok(ProposedBlock::inject_telemetry) .and_then(|inputs| self.prove_block(inputs)) - .inspect_ok(BuiltBlock::inject_telemetry) + .inspect_ok(ProvenBlock::inject_telemetry) // Failure must be injected before the final pipeline stage i.e. before commit is called. The system cannot // handle errors after it considers the process complete (which makes sense). .and_then(|proven_block| async { self.inject_failure(proven_block) }) @@ -170,17 +172,24 @@ impl BlockBuilder { Ok(BlockBatchesAndInputs { batches, inputs }) } - #[instrument(target = COMPONENT, name = "block_builder.prove_block", skip_all, err)] - async fn prove_block( + #[instrument(target = COMPONENT, name = "block_builder.propose_block", skip_all, err)] + async fn propose_block( &self, batches_inputs: BlockBatchesAndInputs, - ) -> Result { + ) -> Result { let BlockBatchesAndInputs { batches, inputs } = batches_inputs; - // Question: Should we split proposing and proving in two stages for telemetry reasons? let proposed_block = ProposedBlock::new(inputs, batches).map_err(BuildBlockError::ProposeBlockFailed)?; + Ok(proposed_block) + } + + #[instrument(target = COMPONENT, name = "block_builder.prove_block", skip_all, err)] + async fn prove_block( + &self, + proposed_block: ProposedBlock, + ) -> Result { let proven_block = self .block_prover .prove(proposed_block) @@ -188,17 +197,17 @@ impl BlockBuilder { self.simulate_proving().await; - Ok(BuiltBlock { block: proven_block }) + Ok(proven_block) } #[instrument(target = COMPONENT, name = "block_builder.commit_block", skip_all, err)] async fn commit_block( &self, mempool: &SharedMempool, - built_block: BuiltBlock, + built_block: ProvenBlock, ) -> Result<(), BuildBlockError> { self.store - .apply_block(&built_block.block) + .apply_block(&built_block) .await .map_err(BuildBlockError::StoreApplyBlockFailed)?; @@ -247,10 +256,6 @@ struct BlockBatchesAndInputs { inputs: BlockInputs, } -struct BuiltBlock { - block: ProvenBlock, -} - impl SelectedBlock { fn inject_telemetry(&self) { let span = Span::current(); @@ -269,37 +274,69 @@ impl BlockBatchesAndInputs { i64::try_from(self.inputs.account_witnesses().len()) .expect("less than u32::MAX account updates"), ); + span.set_attribute( + "block.unauthenticated_notes.count", + i64::try_from(self.inputs.unauthenticated_note_proofs().len()) + .expect("less than u32::MAX unauthenticated notes"), + ); + } +} + +trait TelemetryInjector { + fn inject_telemetry(&self); +} + +impl TelemetryInjector for ProposedBlock { + /// Emit the input and output note related attributes. We do this here since this is the + /// earliest point we can set attributes after note erasure was done. + fn inject_telemetry(&self) { + let span = Span::current(); + span.set_attribute( "block.nullifiers.count", - i64::try_from(self.inputs.nullifier_witnesses().len()) - .expect("less than u32::MAX nullifiers"), + u32::try_from(self.created_nullifiers().len()) + .expect("should have less than u32::MAX created nullifiers"), ); + let num_block_created_notes = self + .output_note_batches() + .iter() + .fold(0, |acc, output_notes| acc + output_notes.len()); span.set_attribute( - "block.unauthenticated_notes.count", - i64::try_from(self.inputs.unauthenticated_note_proofs().len()) - .expect("less than u32::MAX dangling notes"), + "block.output_notes.count", + u32::try_from(num_block_created_notes) + .expect("should have less than u32::MAX output notes"), + ); + + let num_batch_created_notes = + self.batches().iter().fold(0, |acc, batch| acc + batch.output_notes().len()); + span.set_attribute( + "block.batches.output_notes.count", + u32::try_from(num_batch_created_notes) + .expect("should have less than u32::MAX erased notes"), + ); + + let num_erased_notes = num_block_created_notes + .checked_sub(num_batch_created_notes) + .expect("block should not create fewer notes than all batches in it"); + span.set_attribute( + "block.erased_notes.count", + u32::try_from(num_erased_notes).expect("should have less than u32::MAX erased notes"), ); } } -impl BuiltBlock { +impl TelemetryInjector for ProvenBlock { fn inject_telemetry(&self) { let span = Span::current(); - let header = self.block.header(); + let header = self.header(); span.set_attribute("block.hash", header.hash()); span.set_attribute("block.sub_hash", header.sub_hash()); span.set_attribute("block.parent_hash", header.prev_hash()); + span.set_attribute("block.timestamp", header.timestamp()); span.set_attribute("block.protocol.version", i64::from(header.version())); - // Question: Should this be here? - span.set_attribute( - "block.output_notes.count", - i64::try_from(self.block.output_notes().count()) - .expect("less than u32::MAX output notes"), - ); - span.set_attribute("block.commitments.kernel", header.kernel_root()); span.set_attribute("block.commitments.nullifier", header.nullifier_root()); span.set_attribute("block.commitments.account", header.account_root()); From 5724bd182b28a692339ff98857ec769ad5458b5f Mon Sep 17 00:00:00 2001 From: Philipp Gackstatter Date: Fri, 21 Feb 2025 16:36:09 +0100 Subject: [PATCH 23/32] chore: Rename unauthenticated notes count --- crates/block-producer/src/block_builder/mod.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/block-producer/src/block_builder/mod.rs b/crates/block-producer/src/block_builder/mod.rs index f2ecf6573..8aad8b4d5 100644 --- a/crates/block-producer/src/block_builder/mod.rs +++ b/crates/block-producer/src/block_builder/mod.rs @@ -275,7 +275,7 @@ impl BlockBatchesAndInputs { .expect("less than u32::MAX account updates"), ); span.set_attribute( - "block.unauthenticated_notes.count", + "block.erased_note_proofs.count", i64::try_from(self.inputs.unauthenticated_note_proofs().len()) .expect("less than u32::MAX unauthenticated notes"), ); From f70eff8cff1c8f0dc5cbc429e8bf0ca7fa9edb34 Mon Sep 17 00:00:00 2001 From: Philipp Gackstatter Date: Mon, 24 Feb 2025 09:48:30 +0100 Subject: [PATCH 24/32] chore: Add comments on `TelemetryInjector` --- crates/block-producer/src/block_builder/mod.rs | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/crates/block-producer/src/block_builder/mod.rs b/crates/block-producer/src/block_builder/mod.rs index 8aad8b4d5..a39fdbf57 100644 --- a/crates/block-producer/src/block_builder/mod.rs +++ b/crates/block-producer/src/block_builder/mod.rs @@ -282,11 +282,13 @@ impl BlockBatchesAndInputs { } } -trait TelemetryInjector { +/// An extension trait used only locally to implement telemetry injection. +trait TelemetryInjectorExt { + /// Inject [`tracing`] telemetry from self. fn inject_telemetry(&self); } -impl TelemetryInjector for ProposedBlock { +impl TelemetryInjectorExt for ProposedBlock { /// Emit the input and output note related attributes. We do this here since this is the /// earliest point we can set attributes after note erasure was done. fn inject_telemetry(&self) { @@ -325,7 +327,7 @@ impl TelemetryInjector for ProposedBlock { } } -impl TelemetryInjector for ProvenBlock { +impl TelemetryInjectorExt for ProvenBlock { fn inject_telemetry(&self) { let span = Span::current(); let header = self.header(); From ba63bc4f6a56388cefe2e0719e08d21138581f0e Mon Sep 17 00:00:00 2001 From: Philipp Gackstatter Date: Mon, 24 Feb 2025 09:53:12 +0100 Subject: [PATCH 25/32] fix: Num erased notes calculation --- crates/block-producer/src/block_builder/mod.rs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/crates/block-producer/src/block_builder/mod.rs b/crates/block-producer/src/block_builder/mod.rs index a39fdbf57..676abb27e 100644 --- a/crates/block-producer/src/block_builder/mod.rs +++ b/crates/block-producer/src/block_builder/mod.rs @@ -317,9 +317,9 @@ impl TelemetryInjectorExt for ProposedBlock { .expect("should have less than u32::MAX erased notes"), ); - let num_erased_notes = num_block_created_notes - .checked_sub(num_batch_created_notes) - .expect("block should not create fewer notes than all batches in it"); + let num_erased_notes = num_batch_created_notes + .checked_sub(num_block_created_notes) + .expect("all batches in the block should not create fewer notes than the block itself"); span.set_attribute( "block.erased_notes.count", u32::try_from(num_erased_notes).expect("should have less than u32::MAX erased notes"), From ebee91ef3796117ab78eff343acbe1b8751ba8cd Mon Sep 17 00:00:00 2001 From: Philipp Gackstatter Date: Mon, 24 Feb 2025 10:06:20 +0100 Subject: [PATCH 26/32] chore: Add doc comments to telemetry wrappers --- crates/block-producer/src/block_builder/mod.rs | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/crates/block-producer/src/block_builder/mod.rs b/crates/block-producer/src/block_builder/mod.rs index 676abb27e..9a24f4aa4 100644 --- a/crates/block-producer/src/block_builder/mod.rs +++ b/crates/block-producer/src/block_builder/mod.rs @@ -247,14 +247,12 @@ impl BlockBuilder { } } +/// A wrapper around batches selected for inlucion in a block, primarily used to be able to inject +/// telemetry in-between the selection and fetching the required [`BlockInputs`]. struct SelectedBlock { block_number: BlockNumber, batches: Vec, } -struct BlockBatchesAndInputs { - batches: Vec, - inputs: BlockInputs, -} impl SelectedBlock { fn inject_telemetry(&self) { @@ -264,6 +262,13 @@ impl SelectedBlock { } } +/// A wrapper around the inputs needed to build a [`ProposedBlock`], primarily used to be able to +/// inject telemetry in-between fetching block inputs and proposing the block. +struct BlockBatchesAndInputs { + batches: Vec, + inputs: BlockInputs, +} + impl BlockBatchesAndInputs { fn inject_telemetry(&self) { let span = Span::current(); From b6a0d73835fc23f47b4d2bb1e1a481afa207b5c7 Mon Sep 17 00:00:00 2001 From: Philipp Gackstatter Date: Mon, 24 Feb 2025 10:22:41 +0100 Subject: [PATCH 27/32] chore: Fix typo in proto file --- crates/proto/src/generated/requests.rs | 4 ++-- crates/rpc-proto/proto/requests.proto | 4 ++-- proto/requests.proto | 4 ++-- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/crates/proto/src/generated/requests.rs b/crates/proto/src/generated/requests.rs index bd67d4e0d..e4dc3e131 100644 --- a/crates/proto/src/generated/requests.rs +++ b/crates/proto/src/generated/requests.rs @@ -84,7 +84,7 @@ pub struct GetBlockInputsRequest { /// IDs of all accounts updated in the proposed block for which to retrieve account witnesses. #[prost(message, repeated, tag = "1")] pub account_ids: ::prost::alloc::vec::Vec, - /// Nullifiers all notes consumed by the block for which to retrieve witnesses. + /// Nullifiers of all notes consumed by the block for which to retrieve witnesses. /// /// Due to note erasure it will generally not be possible to know the exact set of nullifiers /// a block will create, unless we pre-execute note erasure. So in practice, this set of @@ -95,7 +95,7 @@ pub struct GetBlockInputsRequest { /// provide a nullifier witness for it. #[prost(message, repeated, tag = "2")] pub nullifiers: ::prost::alloc::vec::Vec, - /// Array of note IDs for which to retrieve note inclusion proofs, **if they exist**. + /// Array of note IDs for which to retrieve note inclusion proofs, **if they exist in the store**. #[prost(message, repeated, tag = "3")] pub unauthenticated_notes: ::prost::alloc::vec::Vec, /// Array of block numbers referenced by all batches in the block. diff --git a/crates/rpc-proto/proto/requests.proto b/crates/rpc-proto/proto/requests.proto index 91b53da99..13d4568b1 100644 --- a/crates/rpc-proto/proto/requests.proto +++ b/crates/rpc-proto/proto/requests.proto @@ -79,7 +79,7 @@ message GetBlockInputsRequest { // IDs of all accounts updated in the proposed block for which to retrieve account witnesses. repeated account.AccountId account_ids = 1; - // Nullifiers all notes consumed by the block for which to retrieve witnesses. + // Nullifiers of all notes consumed by the block for which to retrieve witnesses. // // Due to note erasure it will generally not be possible to know the exact set of nullifiers // a block will create, unless we pre-execute note erasure. So in practice, this set of @@ -90,7 +90,7 @@ message GetBlockInputsRequest { // provide a nullifier witness for it. repeated digest.Digest nullifiers = 2; - // Array of note IDs for which to retrieve note inclusion proofs, **if they exist**. + // Array of note IDs for which to retrieve note inclusion proofs, **if they exist in the store**. repeated digest.Digest unauthenticated_notes = 3; // Array of block numbers referenced by all batches in the block. diff --git a/proto/requests.proto b/proto/requests.proto index 91b53da99..13d4568b1 100644 --- a/proto/requests.proto +++ b/proto/requests.proto @@ -79,7 +79,7 @@ message GetBlockInputsRequest { // IDs of all accounts updated in the proposed block for which to retrieve account witnesses. repeated account.AccountId account_ids = 1; - // Nullifiers all notes consumed by the block for which to retrieve witnesses. + // Nullifiers of all notes consumed by the block for which to retrieve witnesses. // // Due to note erasure it will generally not be possible to know the exact set of nullifiers // a block will create, unless we pre-execute note erasure. So in practice, this set of @@ -90,7 +90,7 @@ message GetBlockInputsRequest { // provide a nullifier witness for it. repeated digest.Digest nullifiers = 2; - // Array of note IDs for which to retrieve note inclusion proofs, **if they exist**. + // Array of note IDs for which to retrieve note inclusion proofs, **if they exist in the store**. repeated digest.Digest unauthenticated_notes = 3; // Array of block numbers referenced by all batches in the block. From 40041bb848e75fdc46cb86b01200cc0f314ba721 Mon Sep 17 00:00:00 2001 From: Philipp Gackstatter Date: Mon, 24 Feb 2025 10:55:43 +0100 Subject: [PATCH 28/32] chore: Add expect messages on `BlockNoteIndex` --- Cargo.lock | 38 +++++++++---------- crates/block-producer/src/test_utils/block.rs | 2 +- crates/store/src/db/mod.rs | 3 +- crates/store/src/db/sql/mod.rs | 5 ++- crates/store/src/db/tests.rs | 12 +++--- 5 files changed, 31 insertions(+), 29 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 2a90e3055..7256368a5 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -463,9 +463,9 @@ dependencies = [ [[package]] name = "cc" -version = "1.2.14" +version = "1.2.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0c3d1b2e905a3a7b00a6141adb0e4c0bb941d11caf55349d863942a1cc44e3c9" +checksum = "c736e259eea577f443d5c86c304f9f4ae0295c43f3ba05c21f1d66b5f06001af" dependencies = [ "jobserver", "libc", @@ -811,9 +811,9 @@ checksum = "59f8e79d1fbf76bdfbde321e902714bf6c49df88a7dda6fc682fc2979226962d" [[package]] name = "either" -version = "1.13.0" +version = "1.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "60b1af1c220855b6ceac025d3f6ecdd2b7c4894bfe9cd9bda4fbb4bc7c0d4cf0" +checksum = "b7914353092ddf589ad78f25c5c1c21b7f80b0ff8621e7c814c3485b5306da9d" [[package]] name = "ena" @@ -1530,9 +1530,9 @@ checksum = "830d08ce1d1d941e6b30645f1a0eb5643013d835ce3779a5fc208261dbe10f55" [[package]] name = "libc" -version = "0.2.169" +version = "0.2.170" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b5aba8db14291edd000dfcc4d620c7ebfb122c613afb886ca8803fa4e128a20a" +checksum = "875b3680cb2f8f71bdcf9a30f38d48282f5d3c95cbf9b3fa57269bb5d5c06828" [[package]] name = "libloading" @@ -1706,7 +1706,7 @@ dependencies = [ [[package]] name = "miden-block-prover" version = "0.8.0" -source = "git+https://github.com/0xPolygonMiden/miden-base?branch=pgackst-empty-blocks#ce76f0f097623d85576cd6fea9739df73134c9fc" +source = "git+https://github.com/0xPolygonMiden/miden-base?branch=pgackst-empty-blocks#b6d06c32f9ec8e9926a39a35fc7b129b8bbdc9be" dependencies = [ "miden-crypto", "miden-lib", @@ -1795,7 +1795,7 @@ dependencies = [ [[package]] name = "miden-lib" version = "0.8.0" -source = "git+https://github.com/0xPolygonMiden/miden-base?branch=pgackst-empty-blocks#ce76f0f097623d85576cd6fea9739df73134c9fc" +source = "git+https://github.com/0xPolygonMiden/miden-base?branch=pgackst-empty-blocks#b6d06c32f9ec8e9926a39a35fc7b129b8bbdc9be" dependencies = [ "miden-assembly", "miden-objects", @@ -1991,7 +1991,7 @@ dependencies = [ [[package]] name = "miden-objects" version = "0.8.0" -source = "git+https://github.com/0xPolygonMiden/miden-base?branch=pgackst-empty-blocks#ce76f0f097623d85576cd6fea9739df73134c9fc" +source = "git+https://github.com/0xPolygonMiden/miden-base?branch=pgackst-empty-blocks#b6d06c32f9ec8e9926a39a35fc7b129b8bbdc9be" dependencies = [ "getrandom 0.2.15", "miden-assembly", @@ -2050,7 +2050,7 @@ dependencies = [ [[package]] name = "miden-tx" version = "0.8.0" -source = "git+https://github.com/0xPolygonMiden/miden-base?branch=pgackst-empty-blocks#ce76f0f097623d85576cd6fea9739df73134c9fc" +source = "git+https://github.com/0xPolygonMiden/miden-base?branch=pgackst-empty-blocks#b6d06c32f9ec8e9926a39a35fc7b129b8bbdc9be" dependencies = [ "async-trait", "miden-lib", @@ -2067,7 +2067,7 @@ dependencies = [ [[package]] name = "miden-tx-batch-prover" version = "0.8.0" -source = "git+https://github.com/0xPolygonMiden/miden-base?branch=pgackst-empty-blocks#ce76f0f097623d85576cd6fea9739df73134c9fc" +source = "git+https://github.com/0xPolygonMiden/miden-base?branch=pgackst-empty-blocks#b6d06c32f9ec8e9926a39a35fc7b129b8bbdc9be" dependencies = [ "miden-core", "miden-crypto", @@ -2137,9 +2137,9 @@ checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a" [[package]] name = "miniz_oxide" -version = "0.8.4" +version = "0.8.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b3b1c9bd4fe1f0f8b387f6eb9eb3b4a1aa26185e5750efb9140301703f62cd1b" +checksum = "8e3e04debbb59698c15bacbb6d93584a8c0ca9cc3213cb423d31f760d8843ce5" dependencies = [ "adler2", ] @@ -2391,9 +2391,9 @@ checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39" [[package]] name = "owo-colors" -version = "4.1.0" +version = "4.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fb37767f6569cd834a413442455e0f066d0d522de8630436e2a1761d9726ba56" +checksum = "1036865bb9422d3300cf723f657c2851d0e9ab12567854b1f4eba3d77decf564" [[package]] name = "parking_lot" @@ -2789,9 +2789,9 @@ dependencies = [ [[package]] name = "redox_syscall" -version = "0.5.8" +version = "0.5.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "03a862b389f93e68874fbf580b9de08dd02facb9a788ebadaf4a3fd33cf58834" +checksum = "82b568323e98e49e2a0899dcee453dd679fae22d69adf9b11dd508d1549b7e2f" dependencies = [ "bitflags", ] @@ -2853,9 +2853,9 @@ checksum = "2b15c43186be67a4fd63bee50d0303afffcef381492ebe2c5d87f324e1b8815c" [[package]] name = "ring" -version = "0.17.9" +version = "0.17.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e75ec5e92c4d8aede845126adc388046234541629e76029599ed35a003c7ed24" +checksum = "da5349ae27d3887ca812fb375b45a4fbb36d8d12d2df394968cd86e35683fe73" dependencies = [ "cc", "cfg-if", diff --git a/crates/block-producer/src/test_utils/block.rs b/crates/block-producer/src/test_utils/block.rs index 782de1ad3..7fcf0bd4c 100644 --- a/crates/block-producer/src/test_utils/block.rs +++ b/crates/block-producer/src/test_utils/block.rs @@ -156,7 +156,7 @@ pub(crate) fn flatten_output_notes<'a>( ) -> impl Iterator { batches.enumerate().flat_map(|(batch_idx, batch)| { batch.iter().map(move |(note_idx_in_batch, note)| { - (BlockNoteIndex::new(batch_idx, *note_idx_in_batch), note) + (BlockNoteIndex::new(batch_idx, *note_idx_in_batch).unwrap(), note) }) }) } diff --git a/crates/store/src/db/mod.rs b/crates/store/src/db/mod.rs index c7c5ba923..3b2027455 100644 --- a/crates/store/src/db/mod.rs +++ b/crates/store/src/db/mod.rs @@ -91,7 +91,8 @@ impl NoteRecord { let note_idx_in_batch = row.get(2)?; // SAFETY: We can assume the batch and note indices stored in the DB are valid so this // should never panic. - let note_index = BlockNoteIndex::new(batch_idx, note_idx_in_batch); + let note_index = BlockNoteIndex::new(batch_idx, note_idx_in_batch) + .expect("batch and note index from DB should be valid"); let note_id = row.get_ref(3)?.as_blob()?; let note_id = RpoDigest::read_from_bytes(note_id)?; let note_type = row.get::<_, u8>(4)?.try_into()?; diff --git a/crates/store/src/db/sql/mod.rs b/crates/store/src/db/sql/mod.rs index 16b6dc419..77400f2f4 100644 --- a/crates/store/src/db/sql/mod.rs +++ b/crates/store/src/db/sql/mod.rs @@ -787,7 +787,8 @@ pub fn select_notes_since_block_by_tag_and_sender( let note_idx_in_batch = row.get(2)?; // SAFETY: We can assume the batch and note indices stored in the DB are valid so this // should never panic. - let note_index = BlockNoteIndex::new(batch_idx, note_idx_in_batch); + let note_index = BlockNoteIndex::new(batch_idx, note_idx_in_batch) + .expect("batch and note index from DB should be valid"); let note_id = read_from_blob_column(row, 3)?; let note_type = row.get::<_, u8>(4)?; let sender = read_from_blob_column(row, 5)?; @@ -881,7 +882,7 @@ pub fn select_note_inclusion_proofs( let note_index = row.get(3)?; // SAFETY: We can assume the batch and note indices stored in the DB are valid so this // should never panic. - let node_index_in_block = BlockNoteIndex::new(batch_index, note_index).leaf_index_value(); + let node_index_in_block = BlockNoteIndex::new(batch_index, note_index).expect("batch and note index from DB should be valid").leaf_index_value(); let merkle_path_data = row.get_ref(4)?.as_blob()?; let merkle_path = MerklePath::read_from_bytes(merkle_path_data)?; diff --git a/crates/store/src/db/tests.rs b/crates/store/src/db/tests.rs index ac5b73b5c..98faf1f1e 100644 --- a/crates/store/src/db/tests.rs +++ b/crates/store/src/db/tests.rs @@ -180,7 +180,7 @@ fn sql_select_notes() { for i in 0..10 { let note = NoteRecord { block_num, - note_index: BlockNoteIndex::new(0, i as usize), + note_index: BlockNoteIndex::new(0, i as usize).unwrap(), note_id: num_to_rpo_digest(u64::from(i)), metadata: NoteMetadata::new( ACCOUNT_ID_OFF_CHAIN_SENDER.try_into().unwrap(), @@ -220,7 +220,7 @@ fn sql_select_notes_different_execution_hints() { let note_none = NoteRecord { block_num, - note_index: BlockNoteIndex::new(0, 0), + note_index: BlockNoteIndex::new(0, 0).unwrap(), note_id: num_to_rpo_digest(0), metadata: NoteMetadata::new( ACCOUNT_ID_OFF_CHAIN_SENDER.try_into().unwrap(), @@ -244,7 +244,7 @@ fn sql_select_notes_different_execution_hints() { let note_always = NoteRecord { block_num, - note_index: BlockNoteIndex::new(0, 1), + note_index: BlockNoteIndex::new(0, 1).unwrap(), note_id: num_to_rpo_digest(1), metadata: NoteMetadata::new( ACCOUNT_ID_OFF_CHAIN_SENDER.try_into().unwrap(), @@ -268,7 +268,7 @@ fn sql_select_notes_different_execution_hints() { let note_after_block = NoteRecord { block_num, - note_index: BlockNoteIndex::new(0, 2), + note_index: BlockNoteIndex::new(0, 2).unwrap(), note_id: num_to_rpo_digest(2), metadata: NoteMetadata::new( ACCOUNT_ID_OFF_CHAIN_SENDER.try_into().unwrap(), @@ -317,7 +317,7 @@ fn sql_unconsumed_network_notes() { }; let note = NoteRecord { block_num, - note_index: BlockNoteIndex::new(0, i as usize), + note_index: BlockNoteIndex::new(0, i as usize).unwrap(), note_id: num_to_rpo_digest(i), metadata: NoteMetadata::new( account_id, @@ -883,7 +883,7 @@ fn notes() { assert!(res.is_empty()); // test insertion - let note_index = BlockNoteIndex::new(0, 2); + let note_index = BlockNoteIndex::new(0, 2).unwrap(); let note_id = num_to_rpo_digest(3); let tag = 5u32; let sender = AccountId::try_from(ACCOUNT_ID_OFF_CHAIN_SENDER).unwrap(); From d9053e1320262868887218497980e0d240c062fc Mon Sep 17 00:00:00 2001 From: Philipp Gackstatter Date: Mon, 24 Feb 2025 10:58:02 +0100 Subject: [PATCH 29/32] chore: Apply `make format` --- crates/store/src/db/sql/mod.rs | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/crates/store/src/db/sql/mod.rs b/crates/store/src/db/sql/mod.rs index 77400f2f4..c4f1894ba 100644 --- a/crates/store/src/db/sql/mod.rs +++ b/crates/store/src/db/sql/mod.rs @@ -882,7 +882,9 @@ pub fn select_note_inclusion_proofs( let note_index = row.get(3)?; // SAFETY: We can assume the batch and note indices stored in the DB are valid so this // should never panic. - let node_index_in_block = BlockNoteIndex::new(batch_index, note_index).expect("batch and note index from DB should be valid").leaf_index_value(); + let node_index_in_block = BlockNoteIndex::new(batch_index, note_index) + .expect("batch and note index from DB should be valid") + .leaf_index_value(); let merkle_path_data = row.get_ref(4)?.as_blob()?; let merkle_path = MerklePath::read_from_bytes(merkle_path_data)?; From d8f2e43b1a9964eab27de2bc1e8ce949d3745aad Mon Sep 17 00:00:00 2001 From: Philipp Gackstatter Date: Mon, 24 Feb 2025 12:51:54 +0100 Subject: [PATCH 30/32] chore: Use miden-base `next` branch --- Cargo.lock | 10 +++++----- Cargo.toml | 6 +++--- crates/block-producer/Cargo.toml | 4 ++-- 3 files changed, 10 insertions(+), 10 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 7256368a5..fe8458343 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1706,7 +1706,7 @@ dependencies = [ [[package]] name = "miden-block-prover" version = "0.8.0" -source = "git+https://github.com/0xPolygonMiden/miden-base?branch=pgackst-empty-blocks#b6d06c32f9ec8e9926a39a35fc7b129b8bbdc9be" +source = "git+https://github.com/0xPolygonMiden/miden-base?branch=next#f2d50bfa4a83841875570d1301adccbe164ea111" dependencies = [ "miden-crypto", "miden-lib", @@ -1795,7 +1795,7 @@ dependencies = [ [[package]] name = "miden-lib" version = "0.8.0" -source = "git+https://github.com/0xPolygonMiden/miden-base?branch=pgackst-empty-blocks#b6d06c32f9ec8e9926a39a35fc7b129b8bbdc9be" +source = "git+https://github.com/0xPolygonMiden/miden-base?branch=next#f2d50bfa4a83841875570d1301adccbe164ea111" dependencies = [ "miden-assembly", "miden-objects", @@ -1991,7 +1991,7 @@ dependencies = [ [[package]] name = "miden-objects" version = "0.8.0" -source = "git+https://github.com/0xPolygonMiden/miden-base?branch=pgackst-empty-blocks#b6d06c32f9ec8e9926a39a35fc7b129b8bbdc9be" +source = "git+https://github.com/0xPolygonMiden/miden-base?branch=next#f2d50bfa4a83841875570d1301adccbe164ea111" dependencies = [ "getrandom 0.2.15", "miden-assembly", @@ -2050,7 +2050,7 @@ dependencies = [ [[package]] name = "miden-tx" version = "0.8.0" -source = "git+https://github.com/0xPolygonMiden/miden-base?branch=pgackst-empty-blocks#b6d06c32f9ec8e9926a39a35fc7b129b8bbdc9be" +source = "git+https://github.com/0xPolygonMiden/miden-base?branch=next#f2d50bfa4a83841875570d1301adccbe164ea111" dependencies = [ "async-trait", "miden-lib", @@ -2067,7 +2067,7 @@ dependencies = [ [[package]] name = "miden-tx-batch-prover" version = "0.8.0" -source = "git+https://github.com/0xPolygonMiden/miden-base?branch=pgackst-empty-blocks#b6d06c32f9ec8e9926a39a35fc7b129b8bbdc9be" +source = "git+https://github.com/0xPolygonMiden/miden-base?branch=next#f2d50bfa4a83841875570d1301adccbe164ea111" dependencies = [ "miden-core", "miden-crypto", diff --git a/Cargo.toml b/Cargo.toml index b4c92643a..7d8563b0e 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -28,16 +28,16 @@ version = "0.8.0" assert_matches = { version = "1.5" } itertools = { version = "0.14" } miden-air = { version = "0.12" } -miden-lib = { git = "https://github.com/0xPolygonMiden/miden-base", branch = "pgackst-empty-blocks" } +miden-lib = { git = "https://github.com/0xPolygonMiden/miden-base", branch = "next" } miden-node-block-producer = { path = "crates/block-producer", version = "0.8" } miden-node-proto = { path = "crates/proto", version = "0.8" } miden-node-rpc = { path = "crates/rpc", version = "0.8" } miden-node-store = { path = "crates/store", version = "0.8" } miden-node-test-macro = { path = "crates/test-macro" } miden-node-utils = { path = "crates/utils", version = "0.8" } -miden-objects = { git = "https://github.com/0xPolygonMiden/miden-base", branch = "pgackst-empty-blocks" } +miden-objects = { git = "https://github.com/0xPolygonMiden/miden-base", branch = "next" } miden-processor = { version = "0.12" } -miden-tx = { git = "https://github.com/0xPolygonMiden/miden-base", branch = "pgackst-empty-blocks" } +miden-tx = { git = "https://github.com/0xPolygonMiden/miden-base", branch = "next" } prost = { version = "0.13" } rand = { version = "0.8" } thiserror = { version = "2.0", default-features = false } diff --git a/crates/block-producer/Cargo.toml b/crates/block-producer/Cargo.toml index 65abc437d..6c354019c 100644 --- a/crates/block-producer/Cargo.toml +++ b/crates/block-producer/Cargo.toml @@ -21,14 +21,14 @@ tracing-forest = ["miden-node-utils/tracing-forest"] async-trait = { version = "0.1" } futures = { version = "0.3" } itertools = { workspace = true } -miden-block-prover = { git = "https://github.com/0xPolygonMiden/miden-base.git", branch = "pgackst-empty-blocks" } +miden-block-prover = { git = "https://github.com/0xPolygonMiden/miden-base.git", branch = "next" } miden-lib = { workspace = true } miden-node-proto = { workspace = true } miden-node-utils = { workspace = true } miden-objects = { workspace = true } miden-processor = { workspace = true } miden-tx = { workspace = true } -miden-tx-batch-prover = { git = "https://github.com/0xPolygonMiden/miden-base.git", branch = "pgackst-empty-blocks" } +miden-tx-batch-prover = { git = "https://github.com/0xPolygonMiden/miden-base.git", branch = "next" } rand = { version = "0.8" } serde = { version = "1.0", features = ["derive"] } thiserror = { workspace = true } From 3a4eb171acea7819f984ba6605c4316a4f735696 Mon Sep 17 00:00:00 2001 From: Philipp Gackstatter Date: Mon, 24 Feb 2025 14:13:57 +0100 Subject: [PATCH 31/32] chore: Increase recursion limit --- bin/node/src/main.rs | 3 +++ 1 file changed, 3 insertions(+) diff --git a/bin/node/src/main.rs b/bin/node/src/main.rs index 599e2a6fc..9cfa0b769 100644 --- a/bin/node/src/main.rs +++ b/bin/node/src/main.rs @@ -1,3 +1,6 @@ +// This is required due to a long chain of and_then in BlockBuilder::build_block causing rust error E0275. +#![recursion_limit = "256"] + use std::path::PathBuf; use anyhow::{anyhow, Context}; From 70c8d885d7f8e2e3fe5d01853e8e060f41751d1f Mon Sep 17 00:00:00 2001 From: Philipp Gackstatter Date: Mon, 24 Feb 2025 14:26:10 +0100 Subject: [PATCH 32/32] chore: `make format` --- bin/node/src/main.rs | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/bin/node/src/main.rs b/bin/node/src/main.rs index 9cfa0b769..2845b9395 100644 --- a/bin/node/src/main.rs +++ b/bin/node/src/main.rs @@ -1,4 +1,5 @@ -// This is required due to a long chain of and_then in BlockBuilder::build_block causing rust error E0275. +// This is required due to a long chain of and_then in BlockBuilder::build_block causing rust error +// E0275. #![recursion_limit = "256"] use std::path::PathBuf;