diff --git a/CHANGELOG.md b/CHANGELOG.md index e51e27f17..81da63401 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -11,12 +11,14 @@ - Add an optional open-telemetry trace exporter (#659, #690). - Support tracing across gRPC boundaries using remote tracing context (#669). - Instrument the block-producer's block building process (#676). +- Use `LocalBlockProver` for block building (#709). - Initial developer and operator guides covering monitoring (#699). ### Changes - [BREAKING] Updated minimum Rust version to 1.84. - [BREAKING] `Endpoint` configuration simplified to a single string (#654). +- [BREAKING] Update `GetBlockInputs` RPC (#709). - [BREAKING] `CheckNullifiersByPrefix` now takes a starting block number (#707). - [BREAKING] Removed nullifiers from `SyncState` endpoint (#708). diff --git a/Cargo.lock b/Cargo.lock index a7eb2e6ce..fe8458343 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -105,9 +105,9 @@ dependencies = [ [[package]] name = "anyhow" -version = "1.0.95" +version = "1.0.96" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "34ac096ce696dc2fcabef30516bb13c0a68a11d30131d3df6f04711467681b04" +checksum = "6b964d184e89d9b6b67dd2715bc8e74cf3107fb2b529990c90cf517326150bf4" [[package]] name = "arrayref" @@ -385,9 +385,9 @@ checksum = "8f68f53c83ab957f72c32642f3868eec03eb974d1fb82e453128456482613d36" [[package]] name = "blake3" -version = "1.5.5" +version = "1.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b8ee0c1824c4dea5b5f81736aff91bae041d2c07ee1192bec91054e10e3e601e" +checksum = "1230237285e3e10cde447185e8975408ae24deaa67205ce684805c25bc0c7937" dependencies = [ "arrayref", "arrayvec", @@ -463,9 +463,9 @@ dependencies = [ [[package]] name = "cc" -version = "1.2.11" +version = "1.2.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e4730490333d58093109dc02c23174c3f4d490998c3fed3cc8e82d57afedb9cf" +checksum = "c736e259eea577f443d5c86c304f9f4ae0295c43f3ba05c21f1d66b5f06001af" dependencies = [ "jobserver", "libc", @@ -524,9 +524,9 @@ dependencies = [ [[package]] name = "clap" -version = "4.5.28" +version = "4.5.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3e77c3243bd94243c03672cb5154667347c457ca271254724f9f393aee1c05ff" +checksum = "92b7b18d71fad5313a1e320fa9897994228ce274b60faa4d694fe0ea89cd9e6d" dependencies = [ "clap_builder", "clap_derive", @@ -534,9 +534,9 @@ dependencies = [ [[package]] name = "clap_builder" -version = "4.5.27" +version = "4.5.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1b26884eb4b57140e4d2d93652abfa49498b938b3c9179f9fc487b0acc3edad7" +checksum = "a35db2071778a7344791a4fb4f95308b5673d219dee3ae348b86642574ecc90c" dependencies = [ "anstream", "anstyle", @@ -811,9 +811,9 @@ checksum = "59f8e79d1fbf76bdfbde321e902714bf6c49df88a7dda6fc682fc2979226962d" [[package]] name = "either" -version = "1.13.0" +version = "1.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "60b1af1c220855b6ceac025d3f6ecdd2b7c4894bfe9cd9bda4fbb4bc7c0d4cf0" +checksum = "b7914353092ddf589ad78f25c5c1c21b7f80b0ff8621e7c814c3485b5306da9d" [[package]] name = "ena" @@ -826,9 +826,9 @@ dependencies = [ [[package]] name = "equivalent" -version = "1.0.1" +version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5" +checksum = "877a4ace8713b0bcf2a4e7eec82529c029f1d0619886d18145fea96c3ffe5c0f" [[package]] name = "errno" @@ -880,6 +880,12 @@ version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80" +[[package]] +name = "fixedbitset" +version = "0.5.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1d674e81391d1e1ab681a28d99df07927c6d4aa5b027d7da16ba32d1d21ecd99" + [[package]] name = "fnv" version = "1.0.7" @@ -1046,9 +1052,9 @@ checksum = "a8d1add55171497b4705a648c6b583acafb01d58050a51727785f0b2c8e0a2b2" [[package]] name = "h2" -version = "0.4.7" +version = "0.4.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ccae279728d634d083c00f6099cb58f01cc99c145b84b8be2f6c74618d79922e" +checksum = "5017294ff4bb30944501348f6f8e42e6ad28f42c8bbef7a74029aff064a4e3c2" dependencies = [ "atomic-waker", "bytes", @@ -1440,15 +1446,6 @@ dependencies = [ "either", ] -[[package]] -name = "itertools" -version = "0.13.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "413ee7dfc52ee1a4949ceeb7dbc8a33f2d6c088194d9f922fb8318faf1f01186" -dependencies = [ - "either", -] - [[package]] name = "itertools" version = "0.14.0" @@ -1503,7 +1500,7 @@ dependencies = [ "ena", "itertools 0.11.0", "lalrpop-util", - "petgraph", + "petgraph 0.6.5", "regex", "regex-syntax 0.8.5", "string_cache", @@ -1533,9 +1530,9 @@ checksum = "830d08ce1d1d941e6b30645f1a0eb5643013d835ce3779a5fc208261dbe10f55" [[package]] name = "libc" -version = "0.2.169" +version = "0.2.170" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b5aba8db14291edd000dfcc4d620c7ebfb122c613afb886ca8803fa4e128a20a" +checksum = "875b3680cb2f8f71bdcf9a30f38d48282f5d3c95cbf9b3fa57269bb5d5c06828" [[package]] name = "libloading" @@ -1599,9 +1596,9 @@ dependencies = [ [[package]] name = "log" -version = "0.4.25" +version = "0.4.26" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "04cbf5b083de1c7e0222a7a51dbfdba1cbe1c6ab0b15e29fff3f6c077fd9cd9f" +checksum = "30bde2b3dc3671ae49d8e2e9f044c7c005836e7a023ee57cffa25ab82764bb9e" [[package]] name = "logos" @@ -1706,6 +1703,17 @@ dependencies = [ "unicode-width 0.2.0", ] +[[package]] +name = "miden-block-prover" +version = "0.8.0" +source = "git+https://github.com/0xPolygonMiden/miden-base?branch=next#f2d50bfa4a83841875570d1301adccbe164ea111" +dependencies = [ + "miden-crypto", + "miden-lib", + "miden-objects", + "thiserror 2.0.11", +] + [[package]] name = "miden-core" version = "0.12.0" @@ -1728,9 +1736,9 @@ dependencies = [ [[package]] name = "miden-crypto" -version = "0.13.2" +version = "0.13.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1945918276152bd9b8e8434643ad24d4968e075b68a5ed03927b53ac75490a79" +checksum = "1d8f76b64bfbb75705403ec3e2faad6a045544871d9c441758becc55415cfe64" dependencies = [ "blake3", "cc", @@ -1787,7 +1795,7 @@ dependencies = [ [[package]] name = "miden-lib" version = "0.8.0" -source = "git+https://github.com/0xPolygonMiden/miden-base?branch=next#66cf1bc8744cf739aa3ef726300c389796047394" +source = "git+https://github.com/0xPolygonMiden/miden-base?branch=next#f2d50bfa4a83841875570d1301adccbe164ea111" dependencies = [ "miden-assembly", "miden-objects", @@ -1870,13 +1878,13 @@ dependencies = [ "futures", "itertools 0.14.0", "miden-air", + "miden-block-prover", "miden-lib", "miden-node-proto", "miden-node-test-macro", "miden-node-utils", "miden-objects", "miden-processor", - "miden-stdlib", "miden-tx", "miden-tx-batch-prover", "pretty_assertions", @@ -1983,7 +1991,7 @@ dependencies = [ [[package]] name = "miden-objects" version = "0.8.0" -source = "git+https://github.com/0xPolygonMiden/miden-base?branch=next#66cf1bc8744cf739aa3ef726300c389796047394" +source = "git+https://github.com/0xPolygonMiden/miden-base?branch=next#f2d50bfa4a83841875570d1301adccbe164ea111" dependencies = [ "getrandom 0.2.15", "miden-assembly", @@ -2042,7 +2050,7 @@ dependencies = [ [[package]] name = "miden-tx" version = "0.8.0" -source = "git+https://github.com/0xPolygonMiden/miden-base?branch=next#66cf1bc8744cf739aa3ef726300c389796047394" +source = "git+https://github.com/0xPolygonMiden/miden-base?branch=next#f2d50bfa4a83841875570d1301adccbe164ea111" dependencies = [ "async-trait", "miden-lib", @@ -2059,7 +2067,7 @@ dependencies = [ [[package]] name = "miden-tx-batch-prover" version = "0.8.0" -source = "git+https://github.com/0xPolygonMiden/miden-base?branch=next#66cf1bc8744cf739aa3ef726300c389796047394" +source = "git+https://github.com/0xPolygonMiden/miden-base?branch=next#f2d50bfa4a83841875570d1301adccbe164ea111" dependencies = [ "miden-core", "miden-crypto", @@ -2129,9 +2137,9 @@ checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a" [[package]] name = "miniz_oxide" -version = "0.8.3" +version = "0.8.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b8402cab7aefae129c6977bb0ff1b8fd9a04eb5b51efc50a70bea51cda0c7924" +checksum = "8e3e04debbb59698c15bacbb6d93584a8c0ca9cc3213cb423d31f760d8843ce5" dependencies = [ "adler2", ] @@ -2300,9 +2308,9 @@ dependencies = [ [[package]] name = "once_cell" -version = "1.20.2" +version = "1.20.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1261fe7e33c73b354eab43b1273a57c8f967d0391e80353e51f764ac02cf6775" +checksum = "945462a4b81e43c4e3ba96bd7b49d834c6f61198356aa858733bc4acf3cbe62e" [[package]] name = "openssl-probe" @@ -2383,9 +2391,9 @@ checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39" [[package]] name = "owo-colors" -version = "4.1.0" +version = "4.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fb37767f6569cd834a413442455e0f066d0d522de8630436e2a1761d9726ba56" +checksum = "1036865bb9422d3300cf723f657c2851d0e9ab12567854b1f4eba3d77decf564" [[package]] name = "parking_lot" @@ -2460,7 +2468,17 @@ version = "0.6.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b4c5cc86750666a3ed20bdaf5ca2a0344f9c67674cae0515bec2da16fbaa47db" dependencies = [ - "fixedbitset", + "fixedbitset 0.4.2", + "indexmap 2.7.1", +] + +[[package]] +name = "petgraph" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3672b37090dbd86368a4145bc067582552b29c27377cad4e0a306c97f9bd7772" +dependencies = [ + "fixedbitset 0.5.7", "indexmap 2.7.1", ] @@ -2596,9 +2614,9 @@ dependencies = [ [[package]] name = "prost" -version = "0.13.4" +version = "0.13.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2c0fef6c4230e4ccf618a35c59d7ede15dea37de8427500f50aff708806e42ec" +checksum = "2796faa41db3ec313a31f7624d9286acf277b52de526150b7e69f3debf891ee5" dependencies = [ "bytes", "prost-derive", @@ -2606,16 +2624,16 @@ dependencies = [ [[package]] name = "prost-build" -version = "0.13.4" +version = "0.13.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d0f3e5beed80eb580c68e2c600937ac2c4eedabdfd5ef1e5b7ea4f3fba84497b" +checksum = "be769465445e8c1474e9c5dac2018218498557af32d9ed057325ec9a41ae81bf" dependencies = [ "heck", - "itertools 0.13.0", + "itertools 0.14.0", "log", "multimap", "once_cell", - "petgraph", + "petgraph 0.7.1", "prettyplease", "prost", "prost-types", @@ -2626,12 +2644,12 @@ dependencies = [ [[package]] name = "prost-derive" -version = "0.13.4" +version = "0.13.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "157c5a9d7ea5c2ed2d9fb8f495b64759f7816c7eaea54ba3978f0d63000162e3" +checksum = "8a56d757972c98b346a9b766e3f02746cde6dd1cd1d1d563472929fdd74bec4d" dependencies = [ "anyhow", - "itertools 0.13.0", + "itertools 0.14.0", "proc-macro2", "quote", "syn", @@ -2639,9 +2657,9 @@ dependencies = [ [[package]] name = "prost-reflect" -version = "0.14.5" +version = "0.14.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e92b959d24e05a3e2da1d0beb55b48bc8a97059b8336ea617780bd6addbbfb5a" +checksum = "a7b318f733603136dcc61aa9e77c928d67f87d2436c34ec052ba3f1b5ca219de" dependencies = [ "logos", "miette", @@ -2652,9 +2670,9 @@ dependencies = [ [[package]] name = "prost-types" -version = "0.13.4" +version = "0.13.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cc2f1e56baa61e93533aebc21af4d2134b70f66275e0fcdf3cbe43d77ff7e8fc" +checksum = "52c2c1bf36ddb1a1c396b3601a3cec27c2462e45f07c386894ec3ccf5332bd16" dependencies = [ "prost", ] @@ -2771,9 +2789,9 @@ dependencies = [ [[package]] name = "redox_syscall" -version = "0.5.8" +version = "0.5.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "03a862b389f93e68874fbf580b9de08dd02facb9a788ebadaf4a3fd33cf58834" +checksum = "82b568323e98e49e2a0899dcee453dd679fae22d69adf9b11dd508d1549b7e2f" dependencies = [ "bitflags", ] @@ -2835,15 +2853,14 @@ checksum = "2b15c43186be67a4fd63bee50d0303afffcef381492ebe2c5d87f324e1b8815c" [[package]] name = "ring" -version = "0.17.8" +version = "0.17.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c17fa4cb658e3583423e915b9f3acc01cceaee1860e33d59ebae66adc3a2dc0d" +checksum = "da5349ae27d3887ca812fb375b45a4fbb36d8d12d2df394968cd86e35683fe73" dependencies = [ "cc", "cfg-if", "getrandom 0.2.15", "libc", - "spin", "untrusted", "windows-sys 0.52.0", ] @@ -2917,9 +2934,9 @@ dependencies = [ [[package]] name = "rustls" -version = "0.23.22" +version = "0.23.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9fb9263ab4eb695e42321db096e3b8fbd715a59b154d5c88d82db2175b681ba7" +checksum = "47796c98c480fce5406ef69d1c76378375492c3b0a0de587be0c1d9feb12f395" dependencies = [ "log", "once_cell", @@ -3071,18 +3088,18 @@ checksum = "388a1df253eca08550bef6c72392cfe7c30914bf41df5269b68cbd6ff8f570a3" [[package]] name = "serde" -version = "1.0.217" +version = "1.0.218" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "02fc4265df13d6fa1d00ecff087228cc0a2b5f3c0e87e258d8b94a156e984c70" +checksum = "e8dfc9d19bdbf6d17e22319da49161d5d0108e4188e8b680aef6299eed22df60" dependencies = [ "serde_derive", ] [[package]] name = "serde_derive" -version = "1.0.217" +version = "1.0.218" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a9bf7cf98d04a2b28aead066b7496853d4779c9cc183c440dbac457641e19a0" +checksum = "f09503e191f4e797cb8aac08e9a4a4695c5edf6a2e70e376d961ddd5c969f82b" dependencies = [ "proc-macro2", "quote", @@ -3091,9 +3108,9 @@ dependencies = [ [[package]] name = "serde_json" -version = "1.0.138" +version = "1.0.139" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d434192e7da787e94a6ea7e9670b26a036d0ca41e0b7efb2676dd32bae872949" +checksum = "44f86c3acccc9c65b153fe1b85a3be07fe5515274ec9f0653b4a0875731c72a6" dependencies = [ "itoa", "memchr", @@ -3174,9 +3191,9 @@ dependencies = [ [[package]] name = "smallvec" -version = "1.13.2" +version = "1.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3c5e1a9a646d36c3599cd173a41282daf47c44583ad367b8e6837255952e5c67" +checksum = "7fcf8323ef1faaee30a44a340193b1ac6814fd9b7b4e88e9d4519a3e4abe1cfd" [[package]] name = "smawk" @@ -3301,15 +3318,15 @@ dependencies = [ [[package]] name = "target-triple" -version = "0.1.3" +version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "42a4d50cdb458045afc8131fd91b64904da29548bcb63c7236e0844936c13078" +checksum = "1ac9aa371f599d22256307c24a9d748c041e548cbf599f35d890f9d365361790" [[package]] name = "tempfile" -version = "3.16.0" +version = "3.17.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "38c246215d7d24f48ae091a2902398798e05d978b24315d6efbc00ede9a8bb91" +checksum = "22e5a0acb1f3f55f65cc4a866c361b2fb2a0ff6366785ae6fbb5f85df07ba230" dependencies = [ "cfg-if", "fastrand", @@ -3525,9 +3542,9 @@ dependencies = [ [[package]] name = "toml" -version = "0.8.19" +version = "0.8.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a1ed1f98e3fdc28d6d910e6737ae6ab1a93bf1985935a1193e68f93eeb68d24e" +checksum = "cd87a5cdd6ffab733b2f74bc4fd7ee5fff6634124999ac278c35fc78c6120148" dependencies = [ "serde", "serde_spanned", @@ -3546,9 +3563,9 @@ dependencies = [ [[package]] name = "toml_edit" -version = "0.22.23" +version = "0.22.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "02a8b472d1a3d7c18e2d61a489aee3453fd9031c33e4f55bd533f4a7adca1bee" +checksum = "17b4795ff5edd201c7cd6dca065ae59972ce77d1b80fa0a84d94950ece7d1474" dependencies = [ "indexmap 2.7.1", "serde", @@ -3834,9 +3851,9 @@ dependencies = [ [[package]] name = "typenum" -version = "1.17.0" +version = "1.18.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "42ff0bf0c66b8238c6f3b578df37d0b7848e55df8577b3f74f92a69acceeb825" +checksum = "1dccffe3ce07af9386bfd29e80c0ab1a8205a2fc34e4bcd40364df902cfa8f3f" [[package]] name = "unarray" @@ -3861,9 +3878,9 @@ checksum = "75b844d17643ee918803943289730bec8aac480150456169e647ed0b576ba539" [[package]] name = "unicode-ident" -version = "1.0.16" +version = "1.0.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a210d160f08b701c8721ba1c726c11662f877ea6b7094007e1ca9a1041945034" +checksum = "00e2473a93778eb0bad35909dff6a10d28e63f792f16ed15e404fca9d5eeedbe" [[package]] name = "unicode-linebreak" @@ -4357,18 +4374,18 @@ checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" [[package]] name = "winnow" -version = "0.7.1" +version = "0.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "86e376c75f4f43f44db463cf729e0d3acbf954d13e22c51e26e4c264b4ab545f" +checksum = "0e7f4ea97f6f78012141bcdb6a216b2609f0979ada50b20ca5b52dde2eac2bb1" dependencies = [ "memchr", ] [[package]] name = "winter-air" -version = "0.11.0" +version = "0.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3a8fdb702503625f54dcaf9222aa2c7a0b2e868b3eb84b90d1837d68034bf999" +checksum = "827ef2aa5a5ab663936e0a6326286e0fc83321771df0d9ea20c46c72c8baa90d" dependencies = [ "libm", "winter-crypto", diff --git a/Cargo.toml b/Cargo.toml index 2c05af39a..7d8563b0e 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -37,9 +37,7 @@ miden-node-test-macro = { path = "crates/test-macro" } miden-node-utils = { path = "crates/utils", version = "0.8" } miden-objects = { git = "https://github.com/0xPolygonMiden/miden-base", branch = "next" } miden-processor = { version = "0.12" } -miden-stdlib = { version = "0.12", default-features = false } miden-tx = { git = "https://github.com/0xPolygonMiden/miden-base", branch = "next" } -miden-tx-batch-prover = { git = "https://github.com/0xPolygonMiden/miden-base.git", branch = "next" } prost = { version = "0.13" } rand = { version = "0.8" } thiserror = { version = "2.0", default-features = false } diff --git a/bin/node/src/main.rs b/bin/node/src/main.rs index 599e2a6fc..2845b9395 100644 --- a/bin/node/src/main.rs +++ b/bin/node/src/main.rs @@ -1,3 +1,7 @@ +// This is required due to a long chain of and_then in BlockBuilder::build_block causing rust error +// E0275. +#![recursion_limit = "256"] + use std::path::PathBuf; use anyhow::{anyhow, Context}; diff --git a/crates/block-producer/Cargo.toml b/crates/block-producer/Cargo.toml index dabf8d7bd..6c354019c 100644 --- a/crates/block-producer/Cargo.toml +++ b/crates/block-producer/Cargo.toml @@ -21,14 +21,14 @@ tracing-forest = ["miden-node-utils/tracing-forest"] async-trait = { version = "0.1" } futures = { version = "0.3" } itertools = { workspace = true } +miden-block-prover = { git = "https://github.com/0xPolygonMiden/miden-base.git", branch = "next" } miden-lib = { workspace = true } miden-node-proto = { workspace = true } miden-node-utils = { workspace = true } miden-objects = { workspace = true } miden-processor = { workspace = true } -miden-stdlib = { workspace = true } miden-tx = { workspace = true } -miden-tx-batch-prover = { workspace = true } +miden-tx-batch-prover = { git = "https://github.com/0xPolygonMiden/miden-base.git", branch = "next" } rand = { version = "0.8" } serde = { version = "1.0", features = ["derive"] } thiserror = { workspace = true } diff --git a/crates/block-producer/src/block.rs b/crates/block-producer/src/block.rs deleted file mode 100644 index 857a5b018..000000000 --- a/crates/block-producer/src/block.rs +++ /dev/null @@ -1,106 +0,0 @@ -use std::collections::BTreeMap; - -use miden_node_proto::{ - domain::note::NoteAuthenticationInfo, - errors::{ConversionError, MissingFieldHelper}, - generated::responses::GetBlockInputsResponse, - AccountInputRecord, NullifierWitness, -}; -use miden_objects::{ - account::AccountId, - block::BlockHeader, - crypto::merkle::{MerklePath, MmrPeaks, SmtProof}, - note::Nullifier, - Digest, -}; - -// BLOCK INPUTS -// ================================================================================================ - -/// Information needed from the store to build a block -#[derive(Clone, Debug)] -pub struct BlockInputs { - /// Previous block header - pub block_header: BlockHeader, - - /// MMR peaks for the current chain state - pub chain_peaks: MmrPeaks, - - /// The hashes of the requested accounts and their authentication paths - pub accounts: BTreeMap, - - /// The requested nullifiers and their authentication paths - pub nullifiers: BTreeMap, - - /// List of unauthenticated notes found in the store - pub found_unauthenticated_notes: NoteAuthenticationInfo, -} - -#[derive(Clone, Debug, Default)] -pub struct AccountWitness { - pub hash: Digest, - pub proof: MerklePath, -} - -impl TryFrom for BlockInputs { - type Error = ConversionError; - - fn try_from(response: GetBlockInputsResponse) -> Result { - let block_header: BlockHeader = response - .block_header - .ok_or(miden_node_proto::generated::block::BlockHeader::missing_field("block_header"))? - .try_into()?; - - let chain_peaks = { - // setting the number of leaves to the current block number gives us one leaf less than - // what is currently in the chain MMR (i.e., chain MMR with block_num = 1 has 2 leave); - // this is because GetBlockInputs returns the state of the chain MMR as of one block - // ago so that block_header.chain_root matches the hash of MMR peaks. - let num_leaves = block_header.block_num().as_usize(); - - MmrPeaks::new( - num_leaves, - response - .mmr_peaks - .into_iter() - .map(TryInto::try_into) - .collect::>()?, - )? - }; - - let accounts = response - .account_states - .into_iter() - .map(|entry| { - let domain: AccountInputRecord = entry.try_into()?; - let witness = AccountWitness { - hash: domain.account_hash, - proof: domain.proof, - }; - Ok((domain.account_id, witness)) - }) - .collect::, ConversionError>>()?; - - let nullifiers = response - .nullifiers - .into_iter() - .map(|entry| { - let witness: NullifierWitness = entry.try_into()?; - Ok((witness.nullifier, witness.proof)) - }) - .collect::, ConversionError>>()?; - - let found_unauthenticated_notes = response - .found_unauthenticated_notes - .ok_or(GetBlockInputsResponse::missing_field("found_authenticated_notes"))? - .try_into()?; - - Ok(Self { - block_header, - chain_peaks, - accounts, - nullifiers, - found_unauthenticated_notes, - }) - } -} diff --git a/crates/block-producer/src/block_builder/mod.rs b/crates/block-producer/src/block_builder/mod.rs index bbdc23c5a..9a24f4aa4 100644 --- a/crates/block-producer/src/block_builder/mod.rs +++ b/crates/block-producer/src/block_builder/mod.rs @@ -1,30 +1,23 @@ -use std::{ - collections::BTreeSet, - ops::{Add, Range}, -}; +use std::ops::Range; use futures::FutureExt; +use miden_block_prover::LocalBlockProver; use miden_node_utils::tracing::OpenTelemetrySpanExt; use miden_objects::{ - account::AccountId, batch::ProvenBatch, - block::{Block, BlockNumber}, - note::{NoteHeader, NoteId, Nullifier}, - transaction::{InputNoteCommitment, OutputNote}, + block::{BlockInputs, BlockNumber, ProposedBlock, ProvenBlock}, + note::NoteHeader, + MIN_PROOF_SECURITY_LEVEL, }; use rand::Rng; use tokio::time::Duration; use tracing::{instrument, Span}; use crate::{ - block::BlockInputs, errors::BuildBlockError, mempool::SharedMempool, store::StoreClient, - COMPONENT, SERVER_BLOCK_FREQUENCY, + errors::BuildBlockError, mempool::SharedMempool, store::StoreClient, COMPONENT, + SERVER_BLOCK_FREQUENCY, }; -pub(crate) mod prover; - -use self::prover::{block_witness::BlockWitness, BlockProver}; - // BLOCK BUILDER // ================================================================================================= @@ -39,7 +32,9 @@ pub struct BlockBuilder { pub failure_rate: f64, pub store: StoreClient, - pub block_kernel: BlockProver, + + /// The prover used to prove a proposed block into a proven block. + pub block_prover: LocalBlockProver, } impl BlockBuilder { @@ -49,7 +44,7 @@ impl BlockBuilder { // Note: The range cannot be empty. simulated_proof_time: Duration::ZERO..Duration::from_millis(1), failure_rate: 0.0, - block_kernel: BlockProver::new(), + block_prover: LocalBlockProver::new(MIN_PROOF_SECURITY_LEVEL), store, } } @@ -98,7 +93,9 @@ impl BlockBuilder { Self::select_block(mempool) .inspect(SelectedBlock::inject_telemetry) .then(|selected| self.get_block_inputs(selected)) - .inspect_ok(BlockSummaryAndInputs::inject_telemetry) + .inspect_ok(BlockBatchesAndInputs::inject_telemetry) + .and_then(|inputs| self.propose_block(inputs)) + .inspect_ok(ProposedBlock::inject_telemetry) .and_then(|inputs| self.prove_block(inputs)) .inspect_ok(ProvenBlock::inject_telemetry) // Failure must be injected before the final pipeline stage i.e. before commit is called. The system cannot @@ -119,67 +116,98 @@ impl BlockBuilder { SelectedBlock { block_number, batches } } + /// Fetches block inputs from the store for the [`SelectedBlock`]. + /// + /// For a given set of batches, we need to get the following block inputs from the store: + /// + /// - Note inclusion proofs for unauthenticated notes (not required to be complete due to the + /// possibility of note erasure) + /// - A chain MMR with: + /// - All blocks referenced by batches + /// - All blocks referenced by note inclusion proofs + /// - Account witnesses for all accounts updated in the block + /// - Nullifier witnesses for all nullifiers created in the block + /// - Due to note erasure the set of nullifiers the block creates it not necessarily equal to + /// the union of all nullifiers created in proven batches. However, since we don't yet know + /// which nullifiers the block will actually create, we fetch witnesses for all nullifiers + /// created by batches. If we knew that a certain note will be erased, we would not have to + /// supply a nullifier witness for it. #[instrument(target = COMPONENT, name = "block_builder.get_block_inputs", skip_all, err)] async fn get_block_inputs( &self, selected_block: SelectedBlock, - ) -> Result { + ) -> Result { let SelectedBlock { block_number: _, batches } = selected_block; - let summary = BlockSummary::summarize_batches(&batches); + + let batch_iter = batches.iter(); + + let unauthenticated_notes_iter = batch_iter.clone().flat_map(|batch| { + // Note: .cloned() shouldn't be necessary but not having it produces an odd lifetime + // error in BlockProducer::serve. Not sure if there's a better fix. Error: + // implementation of `FnOnce` is not general enough + // closure with signature `fn(&InputNoteCommitment) -> miden_objects::note::NoteId` must + // implement `FnOnce<(&InputNoteCommitment,)>` ...but it actually implements + // `FnOnce<(&InputNoteCommitment,)>` + batch + .input_notes() + .iter() + .cloned() + .filter_map(|note| note.header().map(NoteHeader::id)) + }); + let block_references_iter = batch_iter.clone().map(ProvenBatch::reference_block_num); + let account_ids_iter = batch_iter.clone().flat_map(ProvenBatch::updated_accounts); + let created_nullifiers_iter = batch_iter.flat_map(ProvenBatch::created_nullifiers); let inputs = self .store .get_block_inputs( - summary.updated_accounts.iter().copied(), - summary.nullifiers.iter(), - summary.dangling_notes.iter(), + account_ids_iter, + created_nullifiers_iter, + unauthenticated_notes_iter, + block_references_iter, ) .await .map_err(BuildBlockError::GetBlockInputsFailed)?; - let missing_notes: Vec<_> = summary - .dangling_notes - .difference(&inputs.found_unauthenticated_notes.note_ids()) - .copied() - .collect(); - if !missing_notes.is_empty() { - return Err(BuildBlockError::UnauthenticatedNotesNotFound(missing_notes)); - } + Ok(BlockBatchesAndInputs { batches, inputs }) + } + + #[instrument(target = COMPONENT, name = "block_builder.propose_block", skip_all, err)] + async fn propose_block( + &self, + batches_inputs: BlockBatchesAndInputs, + ) -> Result { + let BlockBatchesAndInputs { batches, inputs } = batches_inputs; - Ok(BlockSummaryAndInputs { batches, summary, inputs }) + let proposed_block = + ProposedBlock::new(inputs, batches).map_err(BuildBlockError::ProposeBlockFailed)?; + + Ok(proposed_block) } #[instrument(target = COMPONENT, name = "block_builder.prove_block", skip_all, err)] async fn prove_block( &self, - preimage: BlockSummaryAndInputs, + proposed_block: ProposedBlock, ) -> Result { - let BlockSummaryAndInputs { batches, summary, inputs } = preimage; - - let (block_header_witness, updated_accounts) = BlockWitness::new(inputs, &batches)?; - - let new_block_header = self.block_kernel.prove(block_header_witness)?; - - let block = Block::new( - new_block_header, - updated_accounts, - summary.output_notes, - summary.nullifiers, - )?; + let proven_block = self + .block_prover + .prove(proposed_block) + .map_err(BuildBlockError::ProveBlockFailed)?; self.simulate_proving().await; - Ok(ProvenBlock { block }) + Ok(proven_block) } #[instrument(target = COMPONENT, name = "block_builder.commit_block", skip_all, err)] async fn commit_block( &self, mempool: &SharedMempool, - proven_block: ProvenBlock, + built_block: ProvenBlock, ) -> Result<(), BuildBlockError> { self.store - .apply_block(&proven_block.block) + .apply_block(&built_block) .await .map_err(BuildBlockError::StoreApplyBlockFailed)?; @@ -219,65 +247,12 @@ impl BlockBuilder { } } -struct BlockSummary { - updated_accounts: BTreeSet, - nullifiers: Vec, - output_notes: Vec>, - dangling_notes: BTreeSet, -} - -impl BlockSummary { - #[instrument(target = COMPONENT, name = "block_builder.summarize_batches", skip_all)] - fn summarize_batches(batches: &[ProvenBatch]) -> Self { - let updated_accounts: BTreeSet = batches - .iter() - .flat_map(ProvenBatch::account_updates) - .map(|(account_id, _)| *account_id) - .collect(); - - let output_notes: Vec<_> = - batches.iter().map(|batch| batch.output_notes().to_vec()).collect(); - - let nullifiers: Vec = - batches.iter().flat_map(ProvenBatch::produced_nullifiers).collect(); - - // Populate set of output notes from all batches - let output_notes_set: BTreeSet<_> = output_notes - .iter() - .flat_map(|output_notes| output_notes.iter().map(OutputNote::id)) - .collect(); - - // Build a set of unauthenticated input notes for this block which do not have a - // matching output note produced in this block - let dangling_notes: BTreeSet<_> = batches - .iter() - .flat_map(ProvenBatch::input_notes) - .filter_map(InputNoteCommitment::header) - .map(NoteHeader::id) - .filter(|note_id| !output_notes_set.contains(note_id)) - .collect(); - - Self { - updated_accounts, - nullifiers, - output_notes, - dangling_notes, - } - } -} - +/// A wrapper around batches selected for inlucion in a block, primarily used to be able to inject +/// telemetry in-between the selection and fetching the required [`BlockInputs`]. struct SelectedBlock { block_number: BlockNumber, batches: Vec, } -struct BlockSummaryAndInputs { - batches: Vec, - summary: BlockSummary, - inputs: BlockInputs, -} -struct ProvenBlock { - block: Block, -} impl SelectedBlock { fn inject_telemetry(&self) { @@ -287,41 +262,85 @@ impl SelectedBlock { } } -impl BlockSummaryAndInputs { +/// A wrapper around the inputs needed to build a [`ProposedBlock`], primarily used to be able to +/// inject telemetry in-between fetching block inputs and proposing the block. +struct BlockBatchesAndInputs { + batches: Vec, + inputs: BlockInputs, +} + +impl BlockBatchesAndInputs { fn inject_telemetry(&self) { let span = Span::current(); // SAFETY: We do not expect to have more than u32::MAX of any count per block. span.set_attribute( "block.updated_accounts.count", - i64::try_from(self.summary.updated_accounts.len()) + i64::try_from(self.inputs.account_witnesses().len()) .expect("less than u32::MAX account updates"), ); span.set_attribute( - "block.output_notes.count", - i64::try_from(self.summary.output_notes.iter().fold(0, |acc, x| acc.add(x.len()))) - .expect("less than u32::MAX output notes"), + "block.erased_note_proofs.count", + i64::try_from(self.inputs.unauthenticated_note_proofs().len()) + .expect("less than u32::MAX unauthenticated notes"), ); + } +} + +/// An extension trait used only locally to implement telemetry injection. +trait TelemetryInjectorExt { + /// Inject [`tracing`] telemetry from self. + fn inject_telemetry(&self); +} + +impl TelemetryInjectorExt for ProposedBlock { + /// Emit the input and output note related attributes. We do this here since this is the + /// earliest point we can set attributes after note erasure was done. + fn inject_telemetry(&self) { + let span = Span::current(); + span.set_attribute( "block.nullifiers.count", - i64::try_from(self.summary.nullifiers.len()).expect("less than u32::MAX nullifiers"), + u32::try_from(self.created_nullifiers().len()) + .expect("should have less than u32::MAX created nullifiers"), ); + let num_block_created_notes = self + .output_note_batches() + .iter() + .fold(0, |acc, output_notes| acc + output_notes.len()); + span.set_attribute( + "block.output_notes.count", + u32::try_from(num_block_created_notes) + .expect("should have less than u32::MAX output notes"), + ); + + let num_batch_created_notes = + self.batches().iter().fold(0, |acc, batch| acc + batch.output_notes().len()); + span.set_attribute( + "block.batches.output_notes.count", + u32::try_from(num_batch_created_notes) + .expect("should have less than u32::MAX erased notes"), + ); + + let num_erased_notes = num_batch_created_notes + .checked_sub(num_block_created_notes) + .expect("all batches in the block should not create fewer notes than the block itself"); span.set_attribute( - "block.dangling_notes.count", - i64::try_from(self.summary.dangling_notes.len()) - .expect("less than u32::MAX dangling notes"), + "block.erased_notes.count", + u32::try_from(num_erased_notes).expect("should have less than u32::MAX erased notes"), ); } } -impl ProvenBlock { +impl TelemetryInjectorExt for ProvenBlock { fn inject_telemetry(&self) { let span = Span::current(); - let header = self.block.header(); + let header = self.header(); span.set_attribute("block.hash", header.hash()); span.set_attribute("block.sub_hash", header.sub_hash()); span.set_attribute("block.parent_hash", header.prev_hash()); + span.set_attribute("block.timestamp", header.timestamp()); span.set_attribute("block.protocol.version", i64::from(header.version())); diff --git a/crates/block-producer/src/block_builder/prover/asm/block_kernel.masm b/crates/block-producer/src/block_builder/prover/asm/block_kernel.masm deleted file mode 100644 index 309501d59..000000000 --- a/crates/block-producer/src/block_builder/prover/asm/block_kernel.masm +++ /dev/null @@ -1,244 +0,0 @@ -#! Note: For now, the "block kernel" only computes the account root. Eventually, it will compute -#! the entire block header. -#! -#! Stack inputs: [num_accounts_updated, OLD_ACCOUNT_ROOT, NEW_ACCOUNT_HASH_0, account_id_0, ... , -#! NEW_ACCOUNT_HASH_n, account_id_n] - -use.std::collections::smt -use.std::collections::mmr -use.std::sys - -const.ACCOUNT_TREE_DEPTH=64 -const.BLOCK_NOTES_BATCH_TREE_DEPTH=6 -const.CHAIN_MMR_PTR=1000 - -#! Compute the account root -#! -#! Inputs: -#! Operand stack: [] -#! Advice stack: [num_accounts_updated, OLD_ACCOUNT_ROOT, [NEW_ACCOUNT_HASH_i, account_id_i]] -#! Outputs: -#! Operand stack: [NEW_ACCOUNT_ROOT] -proc.compute_account_root - # move the number of updated accounts and an old account root to the operand stack - adv_push.5 - # OS => [OLD_ACCOUNT_ROOT, num_accounts_updated] - # AS => [[NEW_ACCOUNT_HASH_i, account_id_i]] - - # assess if we should loop - dup.4 neq.0 - # OS => [flag, OLD_ACCOUNT_ROOT, num_accounts_updated] - # AS => [[NEW_ACCOUNT_HASH_i, account_id_i]] - - while.true - # num_accounts_updated here serves as a counter, so rename it accordingly - # old account root will be updated in each iteration, so rename it to the ROOT_i - # OS => [ROOT_i, counter] - # AS => [[NEW_ACCOUNT_HASH_i, account_id_i]] - - # move the account hash to the operand stack and move it below the root - adv_push.4 swapw - # OS => [ROOT_i, NEW_ACCOUNT_HASH_i, counter] - # AS => [account_id_i, [NEW_ACCOUNT_HASH_{i+1}, account_id_{i+1}]] - - # move the account id to the operand stack, push the account tree depth - adv_push.1 push.ACCOUNT_TREE_DEPTH - # OS => [account_tree_depth, account_id_i, ROOT_i, NEW_ACCOUNT_HASH_i, counter] - # AS => [[NEW_ACCOUNT_HASH_{i+1}, account_id_{i+1}]] - - # set new value in SMT - mtree_set dropw - # OS => [ROOT_{i+1}, counter] - # AS => [[NEW_ACCOUNT_HASH_{i+1}, account_id_{i+1}]] - - # loop counter - movup.4 sub.1 dup movdn.5 neq.0 - # OS => [flag, ROOT_{i+1}, counter] - # AS => [[NEW_ACCOUNT_HASH_{i+1}, account_id_{i+1}]] - end - - # drop the counter - movup.4 drop - # OS => [ROOT_{n-1}] - # AS => [] -end - -#! Compute the note root. -#! -#! Each batch contains a tree of depth 10 for its created notes. The block's created notes tree is -#! created by aggregating up to 2^6 tree roots coming from the batches contained in the block. -#! -#! `SMT_EMPTY_ROOT` must be `E16`, the root of the empty tree of depth 16. If less than 2^6 batches -#! are contained in the block, `E10` is used as the padding value; this is derived from the fact -#! that `SMT_EMPTY_ROOT` is `E16`, and that our tree has depth 6. -#! -#! Inputs: -#! Operand stack: [] -#! Advice stack: [num_notes_updated, SMT_EMPTY_ROOT, [BATCH_NOTE_TREE_ROOT_i, batch_note_root_idx_i]] -#! Outputs: -#! Operand stack: [NOTES_ROOT] -proc.compute_note_root - # move the number of updated notes and empty root to the operand stack - adv_push.5 - # OS => [SMT_EMPTY_ROOT, num_notes_updated] - # AS => [[BATCH_NOTE_TREE_ROOT_i, batch_note_root_idx_i]] - - # assess if we should loop - dup.4 neq.0 - # OS => [flag, SMT_EMPTY_ROOT, num_notes_updated] - # AS => [[BATCH_NOTE_TREE_ROOT_i, batch_note_root_idx_i]] - - while.true - # num_notes_updated here serves as a counter, so rename it accordingly - # empty root will be updated in each iteration, so rename it to the ROOT_i - # OS => [ROOT_i, counter] - # AS => [[BATCH_NOTE_TREE_ROOT_i, batch_note_root_idx_i]] - - # move the batch note tree root to the operand stack and move it below the root - adv_push.4 swapw - # OS => [ROOT_i, BATCH_NOTE_TREE_ROOT_i, counter] - # AS => [batch_note_root_idx_i, [BATCH_NOTE_TREE_ROOT_{i+1}, batch_note_root_idx_{i+1}]] - - # move the batch note root index to the operand stack, push the block notes batch tree depth - adv_push.1 push.BLOCK_NOTES_BATCH_TREE_DEPTH - # OS => [batch_tree_depth, batch_note_root_idx_i, ROOT_i, BATCH_NOTE_TREE_ROOT_i, counter] - # AS => [[BATCH_NOTE_TREE_ROOT_{i+1}, batch_note_root_idx_{i+1}]] - - # set new value in SMT - mtree_set dropw - # OS => [ROOT_{i+1}, counter] - # AS => [[BATCH_NOTE_TREE_ROOT_{i+1}, batch_note_root_idx_{i+1}]] - - # loop counter - movup.4 sub.1 dup movdn.5 neq.0 - # OS => [flag, ROOT_{i+1}, counter] - # AS => [[BATCH_NOTE_TREE_ROOT_{i+1}, batch_note_root_idx_{i+1}]] - end - - # drop the counter - movup.4 drop - # OS => [ROOT_{n-1}] - # AS => [] -end - -#! Compute the nullifier root. -#! -#! Inputs: -#! Operand stack: [] -#! Advice stack: [num_produced_nullifiers, OLD_NULLIFIER_ROOT, NULLIFIER_VALUE, [NULLIFIER_i]] -#! Outputs: -#! Operand stack: [NULLIFIER_ROOT] -proc.compute_nullifier_root - # move the number of produced nullifiers, old root and nullifier value to the operand stack; - # move nullifier value below the root - adv_push.9 swapw - # OS => [OLD_NULLIFIER_ROOT, NULLIFIER_VALUE, num_produced_nullifiers] - # AS => [[NULLIFIER_i]] - - # assess if we should loop - dup.8 neq.0 - # OS => [flag, OLD_NULLIFIER_ROOT, NULLIFIER_VALUE, num_produced_nullifiers] - # AS => [[NULLIFIER_i]] - - while.true - # num_produced_nullifiers here serves as a counter, so rename it accordingly - # old nullifier root will be updated in each iteration, so rename it to the ROOT_i - # OS => [ROOT_i, NULLIFIER_VALUE, counter] - # AS => [[NULLIFIER_i]] - - # move the nullifier hash to the operand stack - adv_push.4 - # OS => [NULLIFIER_i, ROOT_i, NULLIFIER_VALUE, counter] - # AS => [[NULLIFIER_{i+1}]] - - # dup the nullifier value - dupw.2 - # OS => [NULLIFIER_VALUE, NULLIFIER_i, ROOT_i, NULLIFIER_VALUE, counter] - # AS => [[NULLIFIER_{i+1}]] - - exec.smt::set - # OS => [OLD_VALUE, ROOT_{i+1}, NULLIFIER_VALUE, counter] - # AS => [[NULLIFIER_{i+1}]] - - # Check that OLD_VALUE == 0 (i.e. that nullifier was indeed not previously produced) - assertz assertz assertz assertz - # OS => [ROOT_{i+1}, NULLIFIER_VALUE, counter] - # AS => [[NULLIFIER_{i+1}]] - - # loop counter - movup.8 sub.1 dup movdn.9 neq.0 - # OS => [flag, ROOT_{i+1}, NULLIFIER_VALUE, counter] - # AS => [[NULLIFIER_{i+1}]] - end - - # drop the counter and the nullifier value - swapw dropw movup.4 drop - # OS => [ROOT_{n-1}] - # AS => [] -end - -#! Compute the chain MMR root -#! -#! Inputs: -#! Operand stack: [] -#! Advice stack: [PREV_BLOCK_HASH_TO_INSERT, PREV_CHAIN_MMR_HASH] -#! Advice map: { -#! PREV_CHAIN_MMR_HASH: [NUM_LEAVES, [peak_i], ] -#! } -#! Outputs: -#! Operand stack: [CHAIN_MMR_ROOT] -proc.compute_chain_mmr_root - # move the previous block hash and chain MMR hash to the operand stack - adv_push.8 - # OS => [PREV_CHAIN_MMR_HASH, PREV_BLOCK_HASH_TO_INSERT] - # AS => [] - - # push chain MMR pointer to the operand stack - push.CHAIN_MMR_PTR movdn.4 - # OS => [PREV_CHAIN_MMR_HASH, chain_mmr_ptr, PREV_BLOCK_HASH_TO_INSERT] - - # load the chain MMR (as of previous block) at memory location CHAIN_MMR_PTR - exec.mmr::unpack - # OS => [PREV_BLOCK_HASH_TO_INSERT] - - # push chain MMR pointer to the operand stack - push.CHAIN_MMR_PTR movdn.4 - # OS => [PREV_BLOCK_HASH_TO_INSERT, chain_mmr_ptr] - - # add PREV_BLOCK_HASH_TO_INSERT to chain MMR - exec.mmr::add - # OS => [] - - # Compute new MMR root - push.CHAIN_MMR_PTR exec.mmr::pack - # OS => [CHAIN_MMR_ROOT] -end - -#! Inputs: -#! Operand stack: [] -#! Advice stack: [, , , ] -#! Advice map: { -#! PREV_CHAIN_MMR_HASH: [NUM_LEAVES, [peak_i], ] -#! } -#! Outputs: -#! Operand stack: [ACCOUNT_ROOT, NOTE_ROOT, NULLIFIER_ROOT, CHAIN_MMR_ROOT] -begin - exec.compute_account_root mem_storew.0 dropw - # => [, , ] - - exec.compute_note_root mem_storew.4 dropw - # => [, ] - - exec.compute_nullifier_root mem_storew.8 dropw - # => [] - - exec.compute_chain_mmr_root - # => [CHAIN_MMR_ROOT] - - # Load output on stack - padw mem_loadw.8 padw mem_loadw.4 padw mem_loadw.0 - # => [ACCOUNT_ROOT, NOTE_ROOT, NULLIFIER_ROOT, CHAIN_MMR_ROOT] - - # truncate the stack - exec.sys::truncate_stack -end diff --git a/crates/block-producer/src/block_builder/prover/block_witness.rs b/crates/block-producer/src/block_builder/prover/block_witness.rs deleted file mode 100644 index a0972e541..000000000 --- a/crates/block-producer/src/block_builder/prover/block_witness.rs +++ /dev/null @@ -1,322 +0,0 @@ -use std::collections::{BTreeMap, BTreeSet}; - -use miden_objects::{ - account::{delta::AccountUpdateDetails, AccountId}, - batch::{BatchAccountUpdate, ProvenBatch}, - block::{BlockAccountUpdate, BlockHeader}, - crypto::merkle::{EmptySubtreeRoots, MerklePath, MerkleStore, MmrPeaks, SmtProof}, - note::Nullifier, - transaction::TransactionId, - vm::{AdviceInputs, StackInputs}, - Digest, Felt, BLOCK_NOTE_TREE_DEPTH, MAX_BATCHES_PER_BLOCK, ZERO, -}; - -use crate::{ - block::BlockInputs, - errors::{BlockProverError, BuildBlockError}, -}; - -// BLOCK WITNESS -// ================================================================================================= - -/// Provides inputs to the `BlockKernel` so that it can generate the new header. -#[derive(Debug, PartialEq)] -pub struct BlockWitness { - pub(super) updated_accounts: Vec<(AccountId, AccountUpdateWitness)>, - /// (`batch_index`, `created_notes_root`) for batches that contain notes - pub(super) batch_created_notes_roots: BTreeMap, - pub(super) produced_nullifiers: BTreeMap, - pub(super) chain_peaks: MmrPeaks, - pub(super) prev_header: BlockHeader, -} - -impl BlockWitness { - pub fn new( - mut block_inputs: BlockInputs, - batches: &[ProvenBatch], - ) -> Result<(Self, Vec), BuildBlockError> { - // This limit should be enforced by the mempool. - assert!(batches.len() <= MAX_BATCHES_PER_BLOCK); - - Self::validate_nullifiers(&block_inputs, batches)?; - - let batch_created_notes_roots = batches - .iter() - .enumerate() - .filter(|(_, batch)| !batch.output_notes().is_empty()) - .map(|(batch_index, batch)| (batch_index, batch.output_notes_tree().root())) - .collect(); - - // Order account updates by account ID and each update's initial state hash. - // - // This let's us chronologically order the updates per account across batches. - let mut updated_accounts = - BTreeMap::>::new(); - for (account_id, update) in batches.iter().flat_map(ProvenBatch::account_updates) { - updated_accounts - .entry(*account_id) - .or_default() - .insert(update.initial_state_commitment(), update.clone()); - } - - // Build account witnesses. - let mut account_witnesses = Vec::with_capacity(updated_accounts.len()); - let mut block_updates = Vec::with_capacity(updated_accounts.len()); - - for (account_id, mut updates) in updated_accounts { - let (initial_state_hash, proof) = block_inputs - .accounts - .remove(&account_id) - .map(|witness| (witness.hash, witness.proof)) - .ok_or(BuildBlockError::MissingAccountInput(account_id))?; - - let mut details: Option = None; - - // Chronologically chain updates for this account together using the state hashes to - // link them. - let mut transactions = Vec::new(); - let mut current_hash = initial_state_hash; - while !updates.is_empty() { - let update = updates.remove(¤t_hash).ok_or_else(|| { - BuildBlockError::InconsistentAccountStateTransition( - account_id, - current_hash, - updates.keys().copied().collect(), - ) - })?; - - current_hash = update.final_state_commitment(); - let (update_transactions, update_details) = update.into_parts(); - transactions.extend(update_transactions); - - details = Some(match details { - None => update_details, - Some(details) => details.merge(update_details).map_err(|source| { - BuildBlockError::AccountUpdateError { account_id, source } - })?, - }); - } - - account_witnesses.push(( - account_id, - AccountUpdateWitness { - initial_state_hash, - final_state_hash: current_hash, - proof, - transactions: transactions.clone(), - }, - )); - - block_updates.push(BlockAccountUpdate::new( - account_id, - current_hash, - details.expect("Must be some by now"), - transactions, - )); - } - - if !block_inputs.accounts.is_empty() { - return Err(BuildBlockError::ExtraStoreData( - block_inputs.accounts.keys().copied().collect(), - )); - } - - Ok(( - Self { - updated_accounts: account_witnesses, - batch_created_notes_roots, - produced_nullifiers: block_inputs.nullifiers, - chain_peaks: block_inputs.chain_peaks, - prev_header: block_inputs.block_header, - }, - block_updates, - )) - } - - /// Converts [`BlockWitness`] into inputs to the block kernel program - pub(super) fn into_program_inputs( - self, - ) -> Result<(AdviceInputs, StackInputs), BlockProverError> { - let advice_inputs = self.build_advice_inputs()?; - - Ok((advice_inputs, StackInputs::default())) - } - - /// Returns an iterator over all transactions which affected accounts in the block with - /// corresponding account IDs. - pub(super) fn transactions(&self) -> impl Iterator + '_ { - self.updated_accounts.iter().flat_map(|(account_id, update)| { - update.transactions.iter().map(move |tx_id| (*tx_id, *account_id)) - }) - } - - // HELPERS - // --------------------------------------------------------------------------------------------- - - /// Validates that the nullifiers returned from the store are the same the produced nullifiers - /// in the batches. Note that validation that the value of the nullifiers is `0` will be - /// done in MASM. - fn validate_nullifiers( - block_inputs: &BlockInputs, - batches: &[ProvenBatch], - ) -> Result<(), BuildBlockError> { - let produced_nullifiers_from_store: BTreeSet = - block_inputs.nullifiers.keys().copied().collect(); - - let produced_nullifiers_from_batches: BTreeSet = - batches.iter().flat_map(ProvenBatch::produced_nullifiers).collect(); - - if produced_nullifiers_from_store == produced_nullifiers_from_batches { - Ok(()) - } else { - let differing_nullifiers: Vec = produced_nullifiers_from_store - .symmetric_difference(&produced_nullifiers_from_batches) - .copied() - .collect(); - - Err(BuildBlockError::InconsistentNullifiers(differing_nullifiers)) - } - } - - /// Builds the advice inputs to the block kernel - fn build_advice_inputs(self) -> Result { - let advice_stack = { - let mut advice_stack = Vec::new(); - - // add account stack inputs to the advice stack - { - let mut account_data = Vec::new(); - let mut num_accounts_updated: u64 = 0; - for (idx, (account_id, account_update)) in self.updated_accounts.iter().enumerate() - { - account_data.extend(account_update.final_state_hash); - account_data.push(account_id.prefix().as_felt()); - - let idx = u64::try_from(idx).expect("can't be more than 2^64 - 1 accounts"); - num_accounts_updated = idx + 1; - } - - // append number of accounts updated - advice_stack.push(num_accounts_updated.try_into().expect( - "updated accounts number is greater than or equal to the field modulus", - )); - - // append initial account root - advice_stack.extend(self.prev_header.account_root()); - - // append the updated accounts data - advice_stack.extend(account_data); - } - - // add notes stack inputs to the advice stack - { - // append the number of updated notes - advice_stack - .push(Felt::try_from(self.batch_created_notes_roots.len() as u64).expect( - "notes roots number is greater than or equal to the field modulus", - )); - - // append the empty root - let empty_root = EmptySubtreeRoots::entry(BLOCK_NOTE_TREE_DEPTH, 0); - advice_stack.extend(*empty_root); - - for (batch_index, batch_created_notes_root) in &self.batch_created_notes_roots { - advice_stack.extend(batch_created_notes_root.iter()); - - let batch_index = Felt::try_from(*batch_index as u64) - .expect("batch index is greater than or equal to the field modulus"); - advice_stack.push(batch_index); - } - } - - // Nullifiers stack inputs - { - let num_produced_nullifiers: Felt = (self.produced_nullifiers.len() as u64) - .try_into() - .expect("nullifiers number is greater than or equal to the field modulus"); - - // append number of nullifiers - advice_stack.push(num_produced_nullifiers); - - // append initial nullifier root - advice_stack.extend(self.prev_header.nullifier_root()); - - // append nullifier value (`[block_num, 0, 0, 0]`) - let block_num = self.prev_header.block_num() + 1; - advice_stack.extend([block_num.into(), ZERO, ZERO, ZERO]); - - for nullifier in self.produced_nullifiers.keys() { - advice_stack.extend(nullifier.inner()); - } - } - - // Chain MMR stack inputs - { - advice_stack.extend(self.prev_header.hash()); - advice_stack.extend(self.chain_peaks.hash_peaks()); - } - - advice_stack - }; - - let merkle_store = { - let mut merkle_store = MerkleStore::default(); - - // add accounts merkle paths - merkle_store - .add_merkle_paths(self.updated_accounts.into_iter().map( - |(account_id, AccountUpdateWitness { initial_state_hash, proof, .. })| { - (account_id.prefix().into(), initial_state_hash, proof) - }, - )) - .map_err(BlockProverError::InvalidMerklePaths)?; - - // add nullifiers merkle paths - merkle_store - .add_merkle_paths(self.produced_nullifiers.iter().map(|(nullifier, proof)| { - // Note: the initial value for all nullifiers in the tree is `[0, 0, 0, 0]` - ( - u64::from(nullifier.most_significant_felt()), - Digest::default(), - proof.path().clone(), - ) - })) - .map_err(BlockProverError::InvalidMerklePaths)?; - - merkle_store - }; - - let advice_map: Vec<_> = self - .produced_nullifiers - .values() - .map(|proof| (proof.leaf().hash(), proof.leaf().to_elements())) - .chain(std::iter::once(mmr_peaks_advice_map_key_value(&self.chain_peaks))) - .collect(); - - let advice_inputs = AdviceInputs::default() - .with_merkle_store(merkle_store) - .with_map(advice_map) - .with_stack(advice_stack); - - Ok(advice_inputs) - } -} - -#[derive(Debug, PartialEq, Eq)] -pub(super) struct AccountUpdateWitness { - pub initial_state_hash: Digest, - pub final_state_hash: Digest, - pub proof: MerklePath, - pub transactions: Vec, -} - -// HELPERS -// ================================================================================================= - -// Generates the advice map key/value for Mmr peaks -fn mmr_peaks_advice_map_key_value(peaks: &MmrPeaks) -> (Digest, Vec) { - let mut elements = vec![Felt::new(peaks.num_leaves() as u64), ZERO, ZERO, ZERO]; - elements.extend(peaks.flatten_and_pad_peaks()); - - (peaks.hash_peaks(), elements) -} diff --git a/crates/block-producer/src/block_builder/prover/mod.rs b/crates/block-producer/src/block_builder/prover/mod.rs deleted file mode 100644 index ded93a7d4..000000000 --- a/crates/block-producer/src/block_builder/prover/mod.rs +++ /dev/null @@ -1,132 +0,0 @@ -use std::time::{SystemTime, UNIX_EPOCH}; - -use miden_lib::transaction::TransactionKernel; -use miden_objects::{ - assembly::Assembler, - block::{compute_tx_hash, BlockHeader}, - Digest, -}; -use miden_processor::{execute, DefaultHost, ExecutionOptions, MemAdviceProvider, Program}; -use miden_stdlib::StdLibrary; - -use self::block_witness::BlockWitness; -use crate::errors::{BlockProverError, BuildBlockError}; - -/// The index of the word at which the account root is stored on the output stack. -pub const ACCOUNT_ROOT_WORD_IDX: usize = 0; - -/// The index of the word at which the note root is stored on the output stack. -pub const NOTE_ROOT_WORD_IDX: usize = 4; - -/// The index of the word at which the nullifier root is stored on the output stack. -pub const NULLIFIER_ROOT_WORD_IDX: usize = 8; - -/// The index of the word at which the note root is stored on the output stack. -pub const CHAIN_MMR_ROOT_WORD_IDX: usize = 12; - -pub mod block_witness; - -#[cfg(test)] -mod tests; - -const BLOCK_KERNEL_MASM: &str = include_str!("asm/block_kernel.masm"); - -#[derive(Debug)] -pub(crate) struct BlockProver { - kernel: Program, -} - -impl BlockProver { - pub fn new() -> Self { - let account_program = { - let assembler = Assembler::default() - .with_library(StdLibrary::default()) - .expect("failed to load std-lib"); - - assembler - .assemble_program(BLOCK_KERNEL_MASM) - .expect("failed to load account update program") - }; - - Self { kernel: account_program } - } - - // Note: this will eventually all be done in the VM, and also return an `ExecutionProof` - pub fn prove(&self, witness: BlockWitness) -> Result { - let prev_hash = witness.prev_header.hash(); - let block_num = witness.prev_header.block_num() + 1; - let version = witness.prev_header.version(); - - let tx_hash = compute_tx_hash(witness.transactions()); - let (account_root, note_root, nullifier_root, chain_root) = self.compute_roots(witness)?; - - let proof_hash = Digest::default(); - let timestamp = SystemTime::now() - .duration_since(UNIX_EPOCH) - .expect("today is expected to be after 1970") - .as_secs() - .try_into() - .expect("timestamp must fit in a `u32`"); - - Ok(BlockHeader::new( - version, - prev_hash, - block_num, - chain_root, - account_root, - nullifier_root, - note_root, - tx_hash, - TransactionKernel::kernel_root(), - proof_hash, - timestamp, - )) - } - - fn compute_roots( - &self, - witness: BlockWitness, - ) -> Result<(Digest, Digest, Digest, Digest), BlockProverError> { - let (advice_inputs, stack_inputs) = witness.into_program_inputs()?; - let mut host = { - let advice_provider = MemAdviceProvider::from(advice_inputs); - - let mut host = DefaultHost::new(advice_provider); - host.load_mast_forest(StdLibrary::default().mast_forest().clone()) - .expect("failed to load mast forest"); - - host - }; - - let execution_output = - execute(&self.kernel, stack_inputs, &mut host, ExecutionOptions::default()) - .map_err(BlockProverError::ProgramExecutionFailed)?; - - let new_account_root = execution_output - .stack_outputs() - .get_stack_word(ACCOUNT_ROOT_WORD_IDX) - .ok_or(BlockProverError::InvalidRootOutput("account"))?; - - let new_note_root = execution_output - .stack_outputs() - .get_stack_word(NOTE_ROOT_WORD_IDX) - .ok_or(BlockProverError::InvalidRootOutput("note"))?; - - let new_nullifier_root = execution_output - .stack_outputs() - .get_stack_word(NULLIFIER_ROOT_WORD_IDX) - .ok_or(BlockProverError::InvalidRootOutput("nullifier"))?; - - let new_chain_mmr_root = execution_output - .stack_outputs() - .get_stack_word(CHAIN_MMR_ROOT_WORD_IDX) - .ok_or(BlockProverError::InvalidRootOutput("chain mmr"))?; - - Ok(( - new_account_root.into(), - new_note_root.into(), - new_nullifier_root.into(), - new_chain_mmr_root.into(), - )) - } -} diff --git a/crates/block-producer/src/block_builder/prover/tests.rs b/crates/block-producer/src/block_builder/prover/tests.rs deleted file mode 100644 index f020faf62..000000000 --- a/crates/block-producer/src/block_builder/prover/tests.rs +++ /dev/null @@ -1,951 +0,0 @@ -use std::{collections::BTreeMap, iter}; - -use assert_matches::assert_matches; -use miden_node_proto::domain::note::NoteAuthenticationInfo; -use miden_objects::{ - account::{ - delta::AccountUpdateDetails, AccountId, AccountIdVersion, AccountStorageMode, AccountType, - }, - batch::ProvenBatch, - block::{BlockAccountUpdate, BlockNoteIndex, BlockNoteTree, BlockNumber}, - crypto::merkle::{ - EmptySubtreeRoots, LeafIndex, MerklePath, Mmr, MmrPeaks, Smt, SmtLeaf, SmtProof, SMT_DEPTH, - }, - note::{NoteExecutionHint, NoteHeader, NoteMetadata, NoteTag, NoteType, Nullifier}, - testing::account_id::{ - ACCOUNT_ID_OFF_CHAIN_SENDER, ACCOUNT_ID_REGULAR_ACCOUNT_UPDATABLE_CODE_OFF_CHAIN, - }, - transaction::{OutputNote, ProvenTransaction}, - Felt, BATCH_NOTE_TREE_DEPTH, BLOCK_NOTE_TREE_DEPTH, ONE, ZERO, -}; - -use self::block_witness::AccountUpdateWitness; -use super::*; -use crate::{ - block::{AccountWitness, BlockInputs}, - test_utils::{ - batch::TransactionBatchConstructor, - block::{build_actual_block_header, build_expected_block_header, MockBlockBuilder}, - MockProvenTxBuilder, MockStoreSuccessBuilder, - }, -}; - -// BLOCK WITNESS TESTS -// ================================================================================================= - -/// Tests that `BlockWitness` constructor fails if the store and transaction batches contain a -/// different set of account ids. -/// -/// The store will contain accounts 1 & 2, while the transaction batches will contain 2 & 3. -#[test] -fn block_witness_validation_inconsistent_account_ids() { - let account_id_1 = AccountId::dummy( - [0; 15], - AccountIdVersion::Version0, - AccountType::RegularAccountImmutableCode, - miden_objects::account::AccountStorageMode::Private, - ); - let account_id_2 = AccountId::dummy( - [1; 15], - AccountIdVersion::Version0, - AccountType::RegularAccountImmutableCode, - miden_objects::account::AccountStorageMode::Private, - ); - let account_id_3 = AccountId::dummy( - [2; 15], - AccountIdVersion::Version0, - AccountType::RegularAccountImmutableCode, - miden_objects::account::AccountStorageMode::Private, - ); - - let block_inputs_from_store: BlockInputs = { - let block_header = BlockHeader::mock(0, None, None, &[], Digest::default()); - let chain_peaks = MmrPeaks::new(0, Vec::new()).unwrap(); - - let accounts = BTreeMap::from_iter(vec![ - (account_id_1, AccountWitness::default()), - (account_id_2, AccountWitness::default()), - ]); - - BlockInputs { - block_header, - chain_peaks, - accounts, - nullifiers: BTreeMap::default(), - found_unauthenticated_notes: NoteAuthenticationInfo::default(), - } - }; - - let batches: Vec = { - let batch_1 = { - let tx = MockProvenTxBuilder::with_account( - account_id_2, - Digest::default(), - Digest::default(), - ) - .build(); - - ProvenBatch::mocked_from_transactions([&tx]) - }; - - let batch_2 = { - let tx = MockProvenTxBuilder::with_account( - account_id_3, - Digest::default(), - Digest::default(), - ) - .build(); - - ProvenBatch::mocked_from_transactions([&tx]) - }; - - vec![batch_1, batch_2] - }; - - let block_witness_result = BlockWitness::new(block_inputs_from_store, &batches); - - assert!(block_witness_result.is_err()); -} - -/// Tests that `BlockWitness` constructor fails if the store and transaction batches contain a -/// different at least 1 account who's state hash is different. -/// -/// Only account 1 will have a different state hash -#[test] -fn block_witness_validation_inconsistent_account_hashes() { - let account_id_1 = - AccountId::try_from(ACCOUNT_ID_REGULAR_ACCOUNT_UPDATABLE_CODE_OFF_CHAIN).unwrap(); - let account_id_2 = AccountId::try_from(ACCOUNT_ID_OFF_CHAIN_SENDER).unwrap(); - - let account_1_hash_store = - Digest::new([Felt::new(1u64), Felt::new(2u64), Felt::new(3u64), Felt::new(4u64)]); - let account_1_hash_batches = - Digest::new([Felt::new(4u64), Felt::new(3u64), Felt::new(2u64), Felt::new(1u64)]); - - let block_inputs_from_store: BlockInputs = { - let block_header = BlockHeader::mock(0, None, None, &[], Digest::default()); - let chain_peaks = MmrPeaks::new(0, Vec::new()).unwrap(); - - let accounts = BTreeMap::from_iter(vec![ - ( - account_id_1, - AccountWitness { - hash: account_1_hash_store, - proof: MerklePath::default(), - }, - ), - (account_id_2, AccountWitness::default()), - ]); - - BlockInputs { - block_header, - chain_peaks, - accounts, - nullifiers: BTreeMap::default(), - found_unauthenticated_notes: NoteAuthenticationInfo::default(), - } - }; - - let batches = { - let batch_1 = ProvenBatch::mocked_from_transactions([&MockProvenTxBuilder::with_account( - account_id_1, - account_1_hash_batches, - Digest::default(), - ) - .build()]); - - let batch_2 = ProvenBatch::mocked_from_transactions([&MockProvenTxBuilder::with_account( - account_id_2, - Digest::default(), - Digest::default(), - ) - .build()]); - - vec![batch_1, batch_2] - }; - - let block_witness_result = BlockWitness::new(block_inputs_from_store, &batches); - - assert_matches!( - block_witness_result, - Err(BuildBlockError::InconsistentAccountStateTransition( - account_id, - account_hash_store, - account_hash_batches - )) => { - assert_eq!(account_id, account_id_1); - assert_eq!(account_hash_store, account_1_hash_store); - assert_eq!(account_hash_batches, vec![account_1_hash_batches]); - } - ); -} - -/// Creates two batches which each update the same pair of accounts. -/// -/// The transactions are ordered such that the batches cannot be chronologically ordered -/// themselves: `[tx_x0, tx_y1], [tx_y0, tx_x1]`. This test ensures that the witness is -/// produced correctly as if for a single batch: `[tx_x0, tx_x1, tx_y0, tx_y1]`. -#[test] -fn block_witness_multiple_batches_per_account() { - let x_account_id = - AccountId::try_from(ACCOUNT_ID_REGULAR_ACCOUNT_UPDATABLE_CODE_OFF_CHAIN).unwrap(); - let y_account_id = AccountId::try_from(ACCOUNT_ID_OFF_CHAIN_SENDER).unwrap(); - - let x_hashes = [ - Digest::new((0..4).map(Felt::new).collect::>().try_into().unwrap()), - Digest::new((4..8).map(Felt::new).collect::>().try_into().unwrap()), - Digest::new((8..12).map(Felt::new).collect::>().try_into().unwrap()), - ]; - let y_hashes = [ - Digest::new((12..16).map(Felt::new).collect::>().try_into().unwrap()), - Digest::new((16..20).map(Felt::new).collect::>().try_into().unwrap()), - Digest::new((20..24).map(Felt::new).collect::>().try_into().unwrap()), - ]; - - let x_txs = [ - MockProvenTxBuilder::with_account(x_account_id, x_hashes[0], x_hashes[1]).build(), - MockProvenTxBuilder::with_account(x_account_id, x_hashes[1], x_hashes[2]).build(), - ]; - let y_txs = [ - MockProvenTxBuilder::with_account(y_account_id, y_hashes[0], y_hashes[1]).build(), - MockProvenTxBuilder::with_account(y_account_id, y_hashes[1], y_hashes[2]).build(), - ]; - - let x_proof = MerklePath::new(vec![Digest::new( - (24..28).map(Felt::new).collect::>().try_into().unwrap(), - )]); - let y_proof = MerklePath::new(vec![Digest::new( - (28..32).map(Felt::new).collect::>().try_into().unwrap(), - )]); - - let block_inputs_from_store: BlockInputs = { - let block_header = BlockHeader::mock(0, None, None, &[], Digest::default()); - let chain_peaks = MmrPeaks::new(0, Vec::new()).unwrap(); - - let x_witness = AccountWitness { - hash: x_hashes[0], - proof: x_proof.clone(), - }; - let y_witness = AccountWitness { - hash: y_hashes[0], - proof: y_proof.clone(), - }; - let accounts = BTreeMap::from_iter([(x_account_id, x_witness), (y_account_id, y_witness)]); - - BlockInputs { - block_header, - chain_peaks, - accounts, - nullifiers: BTreeMap::default(), - found_unauthenticated_notes: NoteAuthenticationInfo::default(), - } - }; - - let batches = { - let batch_1 = ProvenBatch::mocked_from_transactions([&x_txs[0], &y_txs[1]]); - let batch_2 = ProvenBatch::mocked_from_transactions([&y_txs[0], &x_txs[1]]); - - vec![batch_1, batch_2] - }; - - let (block_witness, _) = BlockWitness::new(block_inputs_from_store, &batches).unwrap(); - let account_witnesses = block_witness.updated_accounts.into_iter().collect::>(); - - let x_expected = AccountUpdateWitness { - initial_state_hash: x_hashes[0], - final_state_hash: *x_hashes.last().unwrap(), - proof: x_proof, - transactions: x_txs.iter().map(ProvenTransaction::id).collect(), - }; - - let y_expected = AccountUpdateWitness { - initial_state_hash: y_hashes[0], - final_state_hash: *y_hashes.last().unwrap(), - proof: y_proof, - transactions: y_txs.iter().map(ProvenTransaction::id).collect(), - }; - - let expected = [(x_account_id, x_expected), (y_account_id, y_expected)].into(); - - assert_eq!(account_witnesses, expected); -} - -// ACCOUNT ROOT TESTS -// ================================================================================================= - -/// Tests that the `BlockProver` computes the proper account root. -/// -/// We assume an initial store with 5 accounts, and all will be updated. -#[tokio::test] -#[miden_node_test_macro::enable_logging] -async fn compute_account_root_success() { - // Set up account states - // --------------------------------------------------------------------------------------------- - let account_ids = [ - AccountId::dummy( - [0; 15], - AccountIdVersion::Version0, - AccountType::RegularAccountImmutableCode, - miden_objects::account::AccountStorageMode::Private, - ), - AccountId::dummy( - [1; 15], - AccountIdVersion::Version0, - AccountType::RegularAccountImmutableCode, - miden_objects::account::AccountStorageMode::Private, - ), - AccountId::dummy( - [2; 15], - AccountIdVersion::Version0, - AccountType::RegularAccountImmutableCode, - miden_objects::account::AccountStorageMode::Private, - ), - AccountId::dummy( - [3; 15], - AccountIdVersion::Version0, - AccountType::RegularAccountImmutableCode, - miden_objects::account::AccountStorageMode::Private, - ), - AccountId::dummy( - [4; 15], - AccountIdVersion::Version0, - AccountType::RegularAccountImmutableCode, - miden_objects::account::AccountStorageMode::Private, - ), - ]; - - let account_initial_states = [ - [Felt::new(1u64), Felt::new(1u64), Felt::new(1u64), Felt::new(1u64)], - [Felt::new(2u64), Felt::new(2u64), Felt::new(2u64), Felt::new(2u64)], - [Felt::new(3u64), Felt::new(3u64), Felt::new(3u64), Felt::new(3u64)], - [Felt::new(4u64), Felt::new(4u64), Felt::new(4u64), Felt::new(4u64)], - [Felt::new(5u64), Felt::new(5u64), Felt::new(5u64), Felt::new(5u64)], - ]; - - let account_final_states = [ - [Felt::new(2u64), Felt::new(2u64), Felt::new(2u64), Felt::new(2u64)], - [Felt::new(3u64), Felt::new(3u64), Felt::new(3u64), Felt::new(3u64)], - [Felt::new(4u64), Felt::new(4u64), Felt::new(4u64), Felt::new(4u64)], - [Felt::new(5u64), Felt::new(5u64), Felt::new(5u64), Felt::new(5u64)], - [Felt::new(1u64), Felt::new(1u64), Felt::new(1u64), Felt::new(1u64)], - ]; - - // Set up store's account SMT - // --------------------------------------------------------------------------------------------- - - let store = MockStoreSuccessBuilder::from_accounts( - account_ids - .iter() - .zip(account_initial_states.iter()) - .map(|(&account_id, &account_hash)| (account_id, account_hash.into())), - ) - .build(); - - // Block prover - // --------------------------------------------------------------------------------------------- - - // Block inputs is initialized with all the accounts and their initial state - let block_inputs_from_store: BlockInputs = store - .get_block_inputs(account_ids.into_iter(), std::iter::empty(), std::iter::empty()) - .await - .unwrap(); - - let batches: Vec = { - let txs: Vec<_> = account_ids - .iter() - .enumerate() - .map(|(idx, &account_id)| { - MockProvenTxBuilder::with_account( - account_id, - account_initial_states[idx].into(), - account_final_states[idx].into(), - ) - .build() - }) - .collect(); - - let batch_1 = ProvenBatch::mocked_from_transactions(&txs[..2]); - let batch_2 = ProvenBatch::mocked_from_transactions(&txs[2..]); - - vec![batch_1, batch_2] - }; - - let (block_witness, _) = BlockWitness::new(block_inputs_from_store, &batches).unwrap(); - - let block_prover = BlockProver::new(); - let block_header = block_prover.prove(block_witness).unwrap(); - - // Update SMT by hand to get new root - // --------------------------------------------------------------------------------------------- - let block = MockBlockBuilder::new(&store) - .await - .account_updates( - account_ids - .iter() - .zip(account_final_states.iter()) - .map(|(&account_id, &account_hash)| { - BlockAccountUpdate::new( - account_id, - account_hash.into(), - AccountUpdateDetails::Private, - vec![], - ) - }) - .collect(), - ) - .build(); - - // Compare roots - // --------------------------------------------------------------------------------------------- - assert_eq!(block_header.account_root(), block.header().account_root()); -} - -/// Test that the current account root is returned if the batches are empty -#[tokio::test] -#[miden_node_test_macro::enable_logging] -async fn compute_account_root_empty_batches() { - // Set up account states - // --------------------------------------------------------------------------------------------- - let account_ids = [ - AccountId::dummy( - [0; 15], - AccountIdVersion::Version0, - AccountType::RegularAccountImmutableCode, - AccountStorageMode::Private, - ), - AccountId::dummy( - [1; 15], - AccountIdVersion::Version0, - AccountType::RegularAccountImmutableCode, - AccountStorageMode::Private, - ), - AccountId::dummy( - [2; 15], - AccountIdVersion::Version0, - AccountType::RegularAccountImmutableCode, - AccountStorageMode::Private, - ), - AccountId::dummy( - [3; 15], - AccountIdVersion::Version0, - AccountType::RegularAccountImmutableCode, - AccountStorageMode::Private, - ), - AccountId::dummy( - [4; 15], - AccountIdVersion::Version0, - AccountType::RegularAccountImmutableCode, - AccountStorageMode::Private, - ), - ]; - - let account_initial_states = [ - [Felt::new(1u64), Felt::new(1u64), Felt::new(1u64), Felt::new(1u64)], - [Felt::new(2u64), Felt::new(2u64), Felt::new(2u64), Felt::new(2u64)], - [Felt::new(3u64), Felt::new(3u64), Felt::new(3u64), Felt::new(3u64)], - [Felt::new(4u64), Felt::new(4u64), Felt::new(4u64), Felt::new(4u64)], - [Felt::new(5u64), Felt::new(5u64), Felt::new(5u64), Felt::new(5u64)], - ]; - - // Set up store's account SMT - // --------------------------------------------------------------------------------------------- - - let store = MockStoreSuccessBuilder::from_accounts( - account_ids - .iter() - .zip(account_initial_states.iter()) - .map(|(&account_id, &account_hash)| (account_id, account_hash.into())), - ) - .build(); - - // Block prover - // --------------------------------------------------------------------------------------------- - - // Block inputs is initialized with all the accounts and their initial state - let block_inputs_from_store: BlockInputs = store - .get_block_inputs(std::iter::empty(), std::iter::empty(), std::iter::empty()) - .await - .unwrap(); - - let batches = Vec::new(); - let (block_witness, _) = BlockWitness::new(block_inputs_from_store, &batches).unwrap(); - - let block_prover = BlockProver::new(); - let block_header = block_prover.prove(block_witness).unwrap(); - - // Compare roots - // --------------------------------------------------------------------------------------------- - assert_eq!(block_header.account_root(), store.account_root().await); -} - -// NOTE ROOT TESTS -// ================================================================================================= - -/// Tests that the block kernel returns the empty tree (depth 20) if no notes were created, and -/// contains no batches -#[tokio::test] -#[miden_node_test_macro::enable_logging] -async fn compute_note_root_empty_batches_success() { - // Set up store - // --------------------------------------------------------------------------------------------- - - let store = MockStoreSuccessBuilder::from_batches(iter::empty()).build(); - - // Block prover - // --------------------------------------------------------------------------------------------- - - // Block inputs is initialized with all the accounts and their initial state - let block_inputs_from_store: BlockInputs = store - .get_block_inputs(std::iter::empty(), std::iter::empty(), std::iter::empty()) - .await - .unwrap(); - - let batches: Vec = Vec::new(); - - let (block_witness, _) = BlockWitness::new(block_inputs_from_store, &batches).unwrap(); - - let block_prover = BlockProver::new(); - let block_header = block_prover.prove(block_witness).unwrap(); - - // Compare roots - // --------------------------------------------------------------------------------------------- - let created_notes_empty_root = EmptySubtreeRoots::entry(BLOCK_NOTE_TREE_DEPTH, 0); - assert_eq!(block_header.note_root(), *created_notes_empty_root); -} - -/// Tests that the block kernel returns the empty tree (depth 20) if no notes were created, but -/// which contains at least 1 batch. -#[tokio::test] -#[miden_node_test_macro::enable_logging] -async fn compute_note_root_empty_notes_success() { - // Set up store - // --------------------------------------------------------------------------------------------- - - let store = MockStoreSuccessBuilder::from_batches(iter::empty()).build(); - - // Block prover - // --------------------------------------------------------------------------------------------- - - // Block inputs is initialized with all the accounts and their initial state - let block_inputs_from_store: BlockInputs = store - .get_block_inputs(std::iter::empty(), std::iter::empty(), std::iter::empty()) - .await - .unwrap(); - - let batches: Vec = { - let batch = ProvenBatch::mocked_from_transactions(vec![]); - vec![batch] - }; - - let (block_witness, _) = BlockWitness::new(block_inputs_from_store, &batches).unwrap(); - - let block_prover = BlockProver::new(); - let block_header = block_prover.prove(block_witness).unwrap(); - - // Compare roots - // --------------------------------------------------------------------------------------------- - let created_notes_empty_root = EmptySubtreeRoots::entry(BLOCK_NOTE_TREE_DEPTH, 0); - assert_eq!(block_header.note_root(), *created_notes_empty_root); -} - -/// Tests that the block kernel returns the expected tree when multiple notes were created across -/// many batches. -#[tokio::test] -#[miden_node_test_macro::enable_logging] -async fn compute_note_root_success() { - let account_ids = [ - AccountId::dummy( - [0; 15], - AccountIdVersion::Version0, - AccountType::RegularAccountImmutableCode, - miden_objects::account::AccountStorageMode::Private, - ), - AccountId::dummy( - [1; 15], - AccountIdVersion::Version0, - AccountType::RegularAccountImmutableCode, - miden_objects::account::AccountStorageMode::Private, - ), - AccountId::dummy( - [2; 15], - AccountIdVersion::Version0, - AccountType::RegularAccountImmutableCode, - miden_objects::account::AccountStorageMode::Private, - ), - ]; - - let notes_created: Vec = [ - Digest::from([Felt::new(1u64), Felt::new(1u64), Felt::new(1u64), Felt::new(1u64)]), - Digest::from([Felt::new(2u64), Felt::new(2u64), Felt::new(2u64), Felt::new(2u64)]), - Digest::from([Felt::new(3u64), Felt::new(3u64), Felt::new(3u64), Felt::new(3u64)]), - ] - .into_iter() - .zip(account_ids.iter()) - .map(|(note_digest, &account_id)| { - NoteHeader::new( - note_digest.into(), - NoteMetadata::new( - account_id, - NoteType::Private, - NoteTag::for_local_use_case(0u16, 0u16).unwrap(), - NoteExecutionHint::none(), - ONE, - ) - .unwrap(), - ) - }) - .collect(); - - // Set up store - // --------------------------------------------------------------------------------------------- - - let store = MockStoreSuccessBuilder::from_batches(iter::empty()).build(); - - // Block prover - // --------------------------------------------------------------------------------------------- - - // Block inputs is initialized with all the accounts and their initial state - let block_inputs_from_store: BlockInputs = store - .get_block_inputs(account_ids.into_iter(), std::iter::empty(), std::iter::empty()) - .await - .unwrap(); - - let batches: Vec = { - let txs: Vec<_> = notes_created - .iter() - .zip(account_ids.iter()) - .map(|(note, &account_id)| { - let note = OutputNote::Header(*note); - MockProvenTxBuilder::with_account(account_id, Digest::default(), Digest::default()) - .output_notes(vec![note]) - .build() - }) - .collect(); - - let batch_1 = ProvenBatch::mocked_from_transactions(&txs[..2]); - let batch_2 = ProvenBatch::mocked_from_transactions(&txs[2..]); - - vec![batch_1, batch_2] - }; - - let (block_witness, _) = BlockWitness::new(block_inputs_from_store, &batches).unwrap(); - - let block_prover = BlockProver::new(); - let block_header = block_prover.prove(block_witness).unwrap(); - - // Create block note tree to get new root - // --------------------------------------------------------------------------------------------- - - // The current logic is hardcoded to a depth of 6 - // Specifically, we assume the block has up to 2^6 batches, and each batch up to 2^10 created - // notes, where each note is stored at depth 10 in the batch tree. - #[allow(clippy::items_after_statements, reason = "assert belongs to this section")] - const _: () = assert!(BLOCK_NOTE_TREE_DEPTH - BATCH_NOTE_TREE_DEPTH == 6); - - // The first 2 txs were put in the first batch; the 3rd was put in the second - let note_tree = BlockNoteTree::with_entries([ - ( - BlockNoteIndex::new(0, 0).unwrap(), - notes_created[0].id(), - *notes_created[0].metadata(), - ), - ( - BlockNoteIndex::new(0, 1).unwrap(), - notes_created[1].id(), - *notes_created[1].metadata(), - ), - ( - BlockNoteIndex::new(1, 0).unwrap(), - notes_created[2].id(), - *notes_created[2].metadata(), - ), - ]) - .unwrap(); - - // Compare roots - // --------------------------------------------------------------------------------------------- - assert_eq!(block_header.note_root(), note_tree.root()); -} - -// NULLIFIER ROOT TESTS -// ================================================================================================= - -/// Tests that `BlockWitness` constructor fails if the store and transaction batches contain a -/// different set of nullifiers. -/// -/// The transaction batches will contain nullifiers 1 & 2, while the store will contain 2 & 3. -#[test] -fn block_witness_validation_inconsistent_nullifiers() { - let batches: Vec = { - let batch_1 = { - let tx = MockProvenTxBuilder::with_account_index(0).nullifiers_range(0..1).build(); - - ProvenBatch::mocked_from_transactions([&tx]) - }; - - let batch_2 = { - let tx = MockProvenTxBuilder::with_account_index(1).nullifiers_range(1..2).build(); - - ProvenBatch::mocked_from_transactions([&tx]) - }; - - vec![batch_1, batch_2] - }; - - let nullifier_1 = batches[0].produced_nullifiers().next().unwrap(); - let nullifier_2 = batches[1].produced_nullifiers().next().unwrap(); - let nullifier_3 = - Nullifier::from([101_u32.into(), 102_u32.into(), 103_u32.into(), 104_u32.into()]); - - let block_inputs_from_store: BlockInputs = { - let block_header = BlockHeader::mock(0, None, None, &[], Digest::default()); - let chain_peaks = MmrPeaks::new(0, Vec::new()).unwrap(); - - let accounts = batches - .iter() - .flat_map(|batch| { - batch - .account_updates() - .iter() - .map(|(account_id, update)| (*account_id, update.initial_state_commitment())) - }) - .map(|(account_id, hash)| { - (account_id, AccountWitness { hash, proof: MerklePath::default() }) - }) - .collect(); - - let nullifiers = BTreeMap::from_iter(vec![ - ( - nullifier_2, - SmtProof::new( - MerklePath::new(vec![Digest::default(); SMT_DEPTH as usize]), - SmtLeaf::new_empty(LeafIndex::new_max_depth( - nullifier_2.most_significant_felt().into(), - )), - ) - .unwrap(), - ), - ( - nullifier_3, - SmtProof::new( - MerklePath::new(vec![Digest::default(); SMT_DEPTH as usize]), - SmtLeaf::new_empty(LeafIndex::new_max_depth( - nullifier_3.most_significant_felt().into(), - )), - ) - .unwrap(), - ), - ]); - - BlockInputs { - block_header, - chain_peaks, - accounts, - nullifiers, - found_unauthenticated_notes: NoteAuthenticationInfo::default(), - } - }; - - let block_witness_result = BlockWitness::new(block_inputs_from_store, &batches); - - assert_matches!( - block_witness_result, - Err(BuildBlockError::InconsistentNullifiers(nullifiers)) => { - assert_eq!(nullifiers, vec![nullifier_1, nullifier_3]); - } - ); -} - -/// Tests that the block kernel returns the expected nullifier tree when no nullifiers are present -/// in the transaction -#[tokio::test] -async fn compute_nullifier_root_empty_success() { - let batches: Vec = { - let batch_1 = { - let tx = MockProvenTxBuilder::with_account_index(0).build(); - - ProvenBatch::mocked_from_transactions([&tx]) - }; - - let batch_2 = { - let tx = MockProvenTxBuilder::with_account_index(1).build(); - - ProvenBatch::mocked_from_transactions([&tx]) - }; - - vec![batch_1, batch_2] - }; - - let account_ids: Vec = batches - .iter() - .flat_map(|batch| { - batch - .account_updates() - .iter() - .map(|(account_id, update)| (*account_id, update.initial_state_commitment())) - }) - .map(|(account_id, _)| account_id) - .collect(); - - // Set up store - // --------------------------------------------------------------------------------------------- - - let store = MockStoreSuccessBuilder::from_batches(batches.iter()).build(); - - // Block prover - // --------------------------------------------------------------------------------------------- - - // Block inputs is initialized with all the accounts and their initial state - let block_inputs_from_store: BlockInputs = store - .get_block_inputs(account_ids.into_iter(), std::iter::empty(), std::iter::empty()) - .await - .unwrap(); - - let (block_witness, _) = BlockWitness::new(block_inputs_from_store, &batches).unwrap(); - - let block_prover = BlockProver::new(); - let block_header = block_prover.prove(block_witness).unwrap(); - - // Create SMT by hand to get new root - // --------------------------------------------------------------------------------------------- - let nullifier_smt = Smt::new(); - - // Compare roots - // --------------------------------------------------------------------------------------------- - assert_eq!(block_header.nullifier_root(), nullifier_smt.root()); -} - -/// Tests that the block kernel returns the expected nullifier tree when multiple nullifiers are -/// present in the transaction -#[tokio::test] -async fn compute_nullifier_root_success() { - let batches: Vec = { - let batch_1 = { - let tx = MockProvenTxBuilder::with_account_index(0).nullifiers_range(0..1).build(); - - ProvenBatch::mocked_from_transactions([&tx]) - }; - - let batch_2 = { - let tx = MockProvenTxBuilder::with_account_index(1).nullifiers_range(1..2).build(); - - ProvenBatch::mocked_from_transactions([&tx]) - }; - - vec![batch_1, batch_2] - }; - - let account_ids: Vec = batches - .iter() - .flat_map(|batch| { - batch - .account_updates() - .iter() - .map(|(account_id, update)| (*account_id, update.initial_state_commitment())) - }) - .map(|(account_id, _)| account_id) - .collect(); - - let nullifiers = [ - batches[0].produced_nullifiers().next().unwrap(), - batches[1].produced_nullifiers().next().unwrap(), - ]; - - // Set up store - // --------------------------------------------------------------------------------------------- - let initial_block_num = BlockNumber::from(42); - - let store = MockStoreSuccessBuilder::from_batches(batches.iter()) - .initial_block_num(initial_block_num) - .build(); - - // Block prover - // --------------------------------------------------------------------------------------------- - - // Block inputs is initialized with all the accounts and their initial state - let block_inputs_from_store: BlockInputs = store - .get_block_inputs(account_ids.into_iter(), nullifiers.iter(), std::iter::empty()) - .await - .unwrap(); - - let (block_witness, _) = BlockWitness::new(block_inputs_from_store, &batches).unwrap(); - - let block_prover = BlockProver::new(); - let block_header = block_prover.prove(block_witness).unwrap(); - - // Create SMT by hand to get new root - // --------------------------------------------------------------------------------------------- - - // Note that the block number in store is 42; the nullifiers get added to the next block (i.e. - // block number 43) - let nullifier_smt = - Smt::with_entries(nullifiers.into_iter().map(|nullifier| { - (nullifier.inner(), [(initial_block_num + 1).into(), ZERO, ZERO, ZERO]) - })) - .unwrap(); - - // Compare roots - // --------------------------------------------------------------------------------------------- - assert_eq!(block_header.nullifier_root(), nullifier_smt.root()); -} - -// CHAIN MMR ROOT TESTS -// ================================================================================================= - -/// Test that the chain mmr root is as expected if the batches are empty -#[tokio::test] -#[miden_node_test_macro::enable_logging] -async fn compute_chain_mmr_root_empty_mmr() { - let store = MockStoreSuccessBuilder::from_batches(iter::empty()).build(); - - let expected_block_header = build_expected_block_header(&store, &[]).await; - let actual_block_header = build_actual_block_header(&store, Vec::new()).await; - - assert_eq!(actual_block_header.chain_root(), expected_block_header.chain_root()); -} - -/// add header to non-empty MMR (1 peak), and check that we get the expected commitment -#[tokio::test] -#[miden_node_test_macro::enable_logging] -async fn compute_chain_mmr_root_mmr_1_peak() { - let initial_chain_mmr = { - let mut mmr = Mmr::new(); - mmr.add(Digest::default()); - - mmr - }; - - let store = MockStoreSuccessBuilder::from_batches(iter::empty()) - .initial_chain_mmr(initial_chain_mmr) - .build(); - - let expected_block_header = build_expected_block_header(&store, &[]).await; - let actual_block_header = build_actual_block_header(&store, Vec::new()).await; - - assert_eq!(actual_block_header.chain_root(), expected_block_header.chain_root()); -} - -/// add header to an MMR with 17 peaks, and check that we get the expected commitment -#[tokio::test] -#[miden_node_test_macro::enable_logging] -async fn compute_chain_mmr_root_mmr_17_peaks() { - let initial_chain_mmr = { - let mut mmr = Mmr::new(); - for _ in 0..(2_u32.pow(17) - 1) { - mmr.add(Digest::default()); - } - - assert_eq!(mmr.peaks().peaks().len(), 17); - - mmr - }; - - let store = MockStoreSuccessBuilder::from_batches(iter::empty()) - .initial_chain_mmr(initial_chain_mmr) - .build(); - - let expected_block_header = build_expected_block_header(&store, &[]).await; - let actual_block_header = build_actual_block_header(&store, Vec::new()).await; - - assert_eq!(actual_block_header.chain_root(), expected_block_header.chain_root()); -} diff --git a/crates/block-producer/src/errors.rs b/crates/block-producer/src/errors.rs index 02bc01568..6c9de3e37 100644 --- a/crates/block-producer/src/errors.rs +++ b/crates/block-producer/src/errors.rs @@ -1,15 +1,13 @@ +use miden_block_prover::ProvenBlockError; use miden_node_proto::errors::ConversionError; use miden_node_utils::formatting::format_opt; use miden_objects::{ - account::AccountId, block::BlockNumber, - crypto::merkle::MerkleError, note::{NoteId, Nullifier}, transaction::TransactionId, - AccountDeltaError, BlockError, Digest, ProposedBatchError, + Digest, ProposedBatchError, ProposedBlockError, }; -use miden_processor::ExecutionError; -use miden_tx_batch_prover::errors::BatchProveError; +use miden_tx_batch_prover::errors::ProvenBatchError; use thiserror::Error; use tokio::task::JoinError; @@ -143,20 +141,7 @@ pub enum BuildBatchError { ProposeBatchError(#[source] ProposedBatchError), #[error("failed to prove proposed transaction batch")] - ProveBatchError(#[source] BatchProveError), -} - -// Block prover errors -// ================================================================================================= - -#[derive(Debug, Error)] -pub enum BlockProverError { - #[error("received invalid merkle path")] - InvalidMerklePaths(#[source] MerkleError), - #[error("program execution failed")] - ProgramExecutionFailed(#[source] ExecutionError), - #[error("failed to retrieve {0} root from stack outputs")] - InvalidRootOutput(&'static str), + ProveBatchError(#[source] ProvenBatchError), } // Block building errors @@ -164,31 +149,14 @@ pub enum BlockProverError { #[derive(Debug, Error)] pub enum BuildBlockError { - #[error("failed to compute new block")] - BlockProverFailed(#[from] BlockProverError), #[error("failed to apply block to store")] StoreApplyBlockFailed(#[source] StoreError), #[error("failed to get block inputs from store")] GetBlockInputsFailed(#[source] StoreError), - #[error("block inputs from store did not contain data for account {0}")] - MissingAccountInput(AccountId), - #[error("block inputs from store contained extra data for accounts {0:?}")] - ExtraStoreData(Vec), - #[error("account {0} with state {1} cannot transaction to remaining states {2:?}")] - InconsistentAccountStateTransition(AccountId, Digest, Vec), - #[error( - "block inputs from store and transaction batches produced different nullifiers: {0:?}" - )] - InconsistentNullifiers(Vec), - #[error("unauthenticated transaction notes not found in the store or in outputs of other transactions in the block: {0:?}")] - UnauthenticatedNotesNotFound(Vec), - #[error("failed to merge transaction delta into account {account_id}")] - AccountUpdateError { - account_id: AccountId, - source: AccountDeltaError, - }, - #[error("block construction failed")] - BlockConstructionError(#[from] BlockError), + #[error("failed to propose block")] + ProposeBlockFailed(#[source] ProposedBlockError), + #[error("failed to prove block")] + ProveBlockFailed(#[source] ProvenBlockError), /// We sometimes randomly inject errors into the batch building process to test our failure /// responses. #[error("nothing actually went wrong, failure was injected on purpose")] diff --git a/crates/block-producer/src/lib.rs b/crates/block-producer/src/lib.rs index 1cb3b62c9..8aa594232 100644 --- a/crates/block-producer/src/lib.rs +++ b/crates/block-producer/src/lib.rs @@ -10,7 +10,6 @@ mod errors; mod mempool; mod store; -pub mod block; pub mod config; pub mod server; diff --git a/crates/block-producer/src/store/mod.rs b/crates/block-producer/src/store/mod.rs index 3d8009cda..9d4ad4d40 100644 --- a/crates/block-producer/src/store/mod.rs +++ b/crates/block-producer/src/store/mod.rs @@ -22,7 +22,7 @@ use miden_node_proto::{ use miden_node_utils::{formatting::format_opt, tracing::grpc::OtelInterceptor}; use miden_objects::{ account::AccountId, - block::{Block, BlockHeader, BlockNumber}, + block::{BlockHeader, BlockInputs, BlockNumber, ProvenBlock}, note::{NoteId, Nullifier}, transaction::ProvenTransaction, utils::Serializable, @@ -32,7 +32,7 @@ use miden_processor::crypto::RpoDigest; use tonic::{service::interceptor::InterceptedService, transport::Channel}; use tracing::{debug, info, instrument}; -use crate::{block::BlockInputs, errors::StoreError, COMPONENT}; +use crate::{errors::StoreError, COMPONENT}; // TRANSACTION INPUTS // ================================================================================================ @@ -197,13 +197,15 @@ impl StoreClient { pub async fn get_block_inputs( &self, updated_accounts: impl Iterator + Send, - produced_nullifiers: impl Iterator + Send, - notes: impl Iterator + Send, + created_nullifiers: impl Iterator + Send, + unauthenticated_notes: impl Iterator + Send, + reference_blocks: impl Iterator + Send, ) -> Result { let request = tonic::Request::new(GetBlockInputsRequest { account_ids: updated_accounts.map(Into::into).collect(), - nullifiers: produced_nullifiers.map(digest::Digest::from).collect(), - unauthenticated_notes: notes.map(digest::Digest::from).collect(), + nullifiers: created_nullifiers.map(digest::Digest::from).collect(), + unauthenticated_notes: unauthenticated_notes.map(digest::Digest::from).collect(), + reference_blocks: reference_blocks.map(|block_num| block_num.as_u32()).collect(), }); let store_response = self.inner.clone().get_block_inputs(request).await?.into_inner(); @@ -228,7 +230,7 @@ impl StoreClient { } #[instrument(target = COMPONENT, name = "store.client.apply_block", skip_all, err)] - pub async fn apply_block(&self, block: &Block) -> Result<(), StoreError> { + pub async fn apply_block(&self, block: &ProvenBlock) -> Result<(), StoreError> { let request = tonic::Request::new(ApplyBlockRequest { block: block.to_bytes() }); self.inner.clone().apply_block(request).await.map(|_| ()).map_err(Into::into) diff --git a/crates/block-producer/src/test_utils/batch.rs b/crates/block-producer/src/test_utils/batch.rs index 37c2041fb..8e4371a3f 100644 --- a/crates/block-producer/src/test_utils/batch.rs +++ b/crates/block-producer/src/test_utils/batch.rs @@ -1,7 +1,7 @@ use std::collections::BTreeMap; use miden_objects::{ - batch::{BatchAccountUpdate, BatchId, BatchNoteTree, ProvenBatch}, + batch::{BatchAccountUpdate, BatchId, ProvenBatch}, block::BlockNumber, transaction::{InputNotes, ProvenTransaction}, Digest, @@ -56,16 +56,12 @@ impl TransactionBatchConstructor for ProvenBatch { output_notes.extend(tx.output_notes().iter().cloned()); } - ProvenBatch::new( + ProvenBatch::new_unchecked( BatchId::from_transactions(txs.into_iter()), Digest::default(), BlockNumber::GENESIS, account_updates, InputNotes::new_unchecked(input_notes), - BatchNoteTree::with_contiguous_leaves( - output_notes.iter().map(|x| (x.id(), x.metadata())), - ) - .unwrap(), output_notes, BlockNumber::from(u32::MAX), ) diff --git a/crates/block-producer/src/test_utils/block.rs b/crates/block-producer/src/test_utils/block.rs index 03ea004f3..7fcf0bd4c 100644 --- a/crates/block-producer/src/test_utils/block.rs +++ b/crates/block-producer/src/test_utils/block.rs @@ -1,8 +1,9 @@ -use std::iter; - use miden_objects::{ batch::ProvenBatch, - block::{Block, BlockAccountUpdate, BlockHeader, BlockNoteIndex, BlockNoteTree, NoteBatch}, + block::{ + BlockAccountUpdate, BlockHeader, BlockNoteIndex, BlockNoteTree, OutputNoteBatch, + ProvenBlock, + }, crypto::merkle::{Mmr, SimpleSmt}, note::Nullifier, transaction::OutputNote, @@ -10,10 +11,6 @@ use miden_objects::{ }; use super::MockStoreSuccess; -use crate::{ - block::BlockInputs, - block_builder::prover::{block_witness::BlockWitness, BlockProver}, -}; /// Constructs the block we expect to be built given the store state, and a set of transaction /// batches to be applied @@ -71,31 +68,6 @@ pub async fn build_expected_block_header( ) } -/// Builds the "actual" block header; i.e. the block header built using the Miden VM, used in the -/// node -pub async fn build_actual_block_header( - store: &MockStoreSuccess, - batches: Vec, -) -> BlockHeader { - let updated_accounts: Vec<_> = - batches.iter().flat_map(|batch| batch.account_updates().iter()).collect(); - let produced_nullifiers: Vec = - batches.iter().flat_map(ProvenBatch::produced_nullifiers).collect(); - - let block_inputs_from_store: BlockInputs = store - .get_block_inputs( - updated_accounts.iter().map(|(&account_id, _)| account_id), - produced_nullifiers.iter(), - iter::empty(), - ) - .await - .unwrap(); - - let (block_witness, _) = BlockWitness::new(block_inputs_from_store, &batches).unwrap(); - - BlockProver::new().prove(block_witness).unwrap() -} - #[derive(Debug)] pub struct MockBlockBuilder { store_accounts: SimpleSmt, @@ -103,7 +75,7 @@ pub struct MockBlockBuilder { last_block_header: BlockHeader, updated_accounts: Option>, - created_notes: Option>, + created_notes: Option>, produced_nullifiers: Option>, } @@ -140,7 +112,7 @@ impl MockBlockBuilder { } #[must_use] - pub fn created_notes(mut self, created_notes: Vec) -> Self { + pub fn created_notes(mut self, created_notes: Vec) -> Self { self.created_notes = Some(created_notes); self @@ -153,7 +125,7 @@ impl MockBlockBuilder { self } - pub fn build(self) -> Block { + pub fn build(self) -> ProvenBlock { let created_notes = self.created_notes.unwrap_or_default(); let header = BlockHeader::new( @@ -170,28 +142,27 @@ impl MockBlockBuilder { 1, ); - Block::new( + ProvenBlock::new_unchecked( header, self.updated_accounts.unwrap_or_default(), created_notes, self.produced_nullifiers.unwrap_or_default(), ) - .unwrap() } } pub(crate) fn flatten_output_notes<'a>( - batches: impl Iterator, + batches: impl Iterator, ) -> impl Iterator { batches.enumerate().flat_map(|(batch_idx, batch)| { - batch.iter().enumerate().map(move |(note_idx_in_batch, note)| { - (BlockNoteIndex::new(batch_idx, note_idx_in_batch).unwrap(), note) + batch.iter().map(move |(note_idx_in_batch, note)| { + (BlockNoteIndex::new(batch_idx, *note_idx_in_batch).unwrap(), note) }) }) } pub(crate) fn note_created_smt_from_note_batches<'a>( - batches: impl Iterator, + batches: impl Iterator, ) -> BlockNoteTree { let note_leaf_iterator = flatten_output_notes(batches).map(|(index, note)| (index, note.id(), *note.metadata())); @@ -201,6 +172,8 @@ pub(crate) fn note_created_smt_from_note_batches<'a>( pub(crate) fn block_output_notes<'a>( batches: impl Iterator + Clone, -) -> Vec> { - batches.map(|batch| batch.output_notes().to_vec()).collect() +) -> Vec { + batches + .map(|batch| batch.output_notes().iter().cloned().enumerate().collect()) + .collect() } diff --git a/crates/block-producer/src/test_utils/store.rs b/crates/block-producer/src/test_utils/store.rs index af404acd9..4d0507e33 100644 --- a/crates/block-producer/src/test_utils/store.rs +++ b/crates/block-producer/src/test_utils/store.rs @@ -3,12 +3,11 @@ use std::{ num::NonZeroU32, }; -use miden_node_proto::domain::{block::BlockInclusionProof, note::NoteAuthenticationInfo}; use miden_objects::{ batch::ProvenBatch, - block::{Block, BlockHeader, BlockNumber, NoteBatch}, - crypto::merkle::{Mmr, SimpleSmt, Smt, ValuePath}, - note::{NoteId, NoteInclusionProof, Nullifier}, + block::{BlockHeader, BlockNumber, OutputNoteBatch, ProvenBlock}, + crypto::merkle::{Mmr, SimpleSmt, Smt}, + note::{NoteId, NoteInclusionProof}, transaction::ProvenTransaction, ACCOUNT_TREE_DEPTH, EMPTY_WORD, ZERO, }; @@ -16,7 +15,6 @@ use tokio::sync::RwLock; use super::*; use crate::{ - block::{AccountWitness, BlockInputs}, errors::StoreError, store::TransactionInputs, test_utils::block::{ @@ -28,7 +26,7 @@ use crate::{ #[derive(Debug)] pub struct MockStoreSuccessBuilder { accounts: Option>, - notes: Option>, + notes: Option>, produced_nullifiers: Option>, chain_mmr: Option, block_num: Option, @@ -76,7 +74,10 @@ impl MockStoreSuccessBuilder { } #[must_use] - pub fn initial_notes<'a>(mut self, notes: impl Iterator + Clone) -> Self { + pub fn initial_notes<'a>( + mut self, + notes: impl Iterator + Clone, + ) -> Self { self.notes = Some(notes.cloned().collect()); self @@ -191,7 +192,7 @@ impl MockStoreSuccess { locked_accounts.root() } - pub async fn apply_block(&self, block: &Block) -> Result<(), StoreError> { + pub async fn apply_block(&self, block: &ProvenBlock) -> Result<(), StoreError> { // Intentionally, we take and hold both locks, to prevent calls to `get_tx_inputs()` from // going through while we're updating the store's data structure let mut locked_accounts = self.accounts.write().await; @@ -206,7 +207,7 @@ impl MockStoreSuccess { debug_assert_eq!(locked_accounts.root(), header.account_root()); // update nullifiers - for nullifier in block.nullifiers() { + for nullifier in block.created_nullifiers() { locked_produced_nullifiers .insert(nullifier.inner(), [header.block_num().into(), ZERO, ZERO, ZERO]); } @@ -219,11 +220,11 @@ impl MockStoreSuccess { } // build note tree - let note_tree = block.build_note_tree(); + let note_tree = block.build_output_note_tree(); // update notes let mut locked_notes = self.notes.write().await; - for (note_index, note) in block.notes() { + for (note_index, note) in block.output_notes() { locked_notes.insert( note.id(), NoteInclusionProof::new( @@ -289,66 +290,4 @@ impl MockStoreSuccess { current_block_height: 0.into(), }) } - - pub async fn get_block_inputs( - &self, - updated_accounts: impl Iterator + Send, - produced_nullifiers: impl Iterator + Send, - notes: impl Iterator + Send, - ) -> Result { - let locked_accounts = self.accounts.read().await; - let locked_produced_nullifiers = self.produced_nullifiers.read().await; - - let chain_peaks = { - let locked_chain_mmr = self.chain_mmr.read().await; - locked_chain_mmr.peaks() - }; - - let accounts = { - updated_accounts - .map(|account_id| { - let ValuePath { value: hash, path: proof } = - locked_accounts.open(&account_id.into()); - - (account_id, AccountWitness { hash, proof }) - }) - .collect() - }; - - let nullifiers = produced_nullifiers - .map(|nullifier| (*nullifier, locked_produced_nullifiers.open(&nullifier.inner()))) - .collect(); - - let locked_notes = self.notes.read().await; - let note_proofs = notes - .filter_map(|id| locked_notes.get(id).map(|proof| (*id, proof.clone()))) - .collect::>(); - - let locked_headers = self.block_headers.read().await; - let latest_header = - *locked_headers.iter().max_by_key(|(block_num, _)| *block_num).unwrap().1; - - let locked_chain_mmr = self.chain_mmr.read().await; - let chain_length = latest_header.block_num(); - let block_proofs = note_proofs - .values() - .map(|note_proof| { - let block_num = note_proof.location().block_num(); - let block_header = *locked_headers.get(&block_num).unwrap(); - let mmr_path = locked_chain_mmr.open(block_num.as_usize()).unwrap().merkle_path; - - BlockInclusionProof { block_header, mmr_path, chain_length } - }) - .collect(); - - let found_unauthenticated_notes = NoteAuthenticationInfo { block_proofs, note_proofs }; - - Ok(BlockInputs { - block_header: latest_header, - chain_peaks, - accounts, - nullifiers, - found_unauthenticated_notes, - }) - } } diff --git a/crates/proto/src/domain/account.rs b/crates/proto/src/domain/account.rs index 3d6680065..36fadc4a0 100644 --- a/crates/proto/src/domain/account.rs +++ b/crates/proto/src/domain/account.rs @@ -149,49 +149,45 @@ impl TryInto for proto::requests::get_account_proofs_reques } } -// ACCOUNT INPUT RECORD +// ACCOUNT WITNESS RECORD // ================================================================================================ #[derive(Clone, Debug)] -pub struct AccountInputRecord { +pub struct AccountWitnessRecord { pub account_id: AccountId, - pub account_hash: Digest, + pub initial_state_commitment: Digest, pub proof: MerklePath, } -impl From for proto::responses::AccountBlockInputRecord { - fn from(from: AccountInputRecord) -> Self { +impl From for proto::responses::AccountWitness { + fn from(from: AccountWitnessRecord) -> Self { Self { account_id: Some(from.account_id.into()), - account_hash: Some(from.account_hash.into()), + initial_state_commitment: Some(from.initial_state_commitment.into()), proof: Some(Into::into(&from.proof)), } } } -impl TryFrom for AccountInputRecord { +impl TryFrom for AccountWitnessRecord { type Error = ConversionError; fn try_from( - account_input_record: proto::responses::AccountBlockInputRecord, + account_witness_record: proto::responses::AccountWitness, ) -> Result { Ok(Self { - account_id: account_input_record + account_id: account_witness_record .account_id - .ok_or(proto::responses::AccountBlockInputRecord::missing_field(stringify!( - account_id - )))? + .ok_or(proto::responses::AccountWitness::missing_field(stringify!(account_id)))? .try_into()?, - account_hash: account_input_record - .account_hash - .ok_or(proto::responses::AccountBlockInputRecord::missing_field(stringify!( - account_hash - )))? + initial_state_commitment: account_witness_record + .initial_state_commitment + .ok_or(proto::responses::AccountWitness::missing_field(stringify!(account_hash)))? .try_into()?, - proof: account_input_record + proof: account_witness_record .proof .as_ref() - .ok_or(proto::responses::AccountBlockInputRecord::missing_field(stringify!(proof)))? + .ok_or(proto::responses::AccountWitness::missing_field(stringify!(proof)))? .try_into()?, }) } diff --git a/crates/proto/src/domain/block.rs b/crates/proto/src/domain/block.rs index fa7e4bcfb..ff19518bb 100644 --- a/crates/proto/src/domain/block.rs +++ b/crates/proto/src/domain/block.rs @@ -1,11 +1,18 @@ +use std::collections::BTreeMap; + use miden_objects::{ - block::{BlockHeader, BlockNumber}, - crypto::merkle::MerklePath, + block::{AccountWitness, BlockHeader, BlockInputs, NullifierWitness}, + note::{NoteId, NoteInclusionProof}, + transaction::ChainMmr, + utils::{Deserializable, Serializable}, }; use crate::{ errors::{ConversionError, MissingFieldHelper}, - generated::block as proto, + generated::{ + block as proto, note::NoteInclusionInBlockProof, responses::GetBlockInputsResponse, + }, + AccountWitnessRecord, NullifierWitnessRecord, }; // BLOCK HEADER @@ -87,40 +94,97 @@ impl TryFrom for BlockHeader { } } -/// Data required to verify a block's inclusion proof. -#[derive(Clone, Debug)] -pub struct BlockInclusionProof { - pub block_header: BlockHeader, - pub mmr_path: MerklePath, - pub chain_length: BlockNumber, -} +// BLOCK INPUTS +// ================================================================================================ -impl From for proto::BlockInclusionProof { - fn from(value: BlockInclusionProof) -> Self { - Self { - block_header: Some(value.block_header.into()), - mmr_path: Some((&value.mmr_path).into()), - chain_length: value.chain_length.as_u32(), +impl From for GetBlockInputsResponse { + fn from(inputs: BlockInputs) -> Self { + let ( + prev_block_header, + chain_mmr, + account_witnesses, + nullifier_witnesses, + unauthenticated_note_proofs, + ) = inputs.into_parts(); + + GetBlockInputsResponse { + latest_block_header: Some(prev_block_header.into()), + account_witnesses: account_witnesses + .into_iter() + .map(|(id, witness)| { + let (initial_state_commitment, proof) = witness.into_parts(); + AccountWitnessRecord { + account_id: id, + initial_state_commitment, + proof, + } + .into() + }) + .collect(), + nullifier_witnesses: nullifier_witnesses + .into_iter() + .map(|(nullifier, witness)| { + let proof = witness.into_proof(); + NullifierWitnessRecord { nullifier, proof }.into() + }) + .collect(), + chain_mmr: chain_mmr.to_bytes(), + unauthenticated_note_proofs: unauthenticated_note_proofs + .iter() + .map(NoteInclusionInBlockProof::from) + .collect(), } } } -impl TryFrom for BlockInclusionProof { +impl TryFrom for BlockInputs { type Error = ConversionError; - fn try_from(value: proto::BlockInclusionProof) -> Result { - let result = Self { - block_header: value - .block_header - .ok_or(proto::BlockInclusionProof::missing_field("block_header"))? - .try_into()?, - mmr_path: (&value - .mmr_path - .ok_or(proto::BlockInclusionProof::missing_field("mmr_path"))?) - .try_into()?, - chain_length: value.chain_length.into(), - }; + fn try_from(response: GetBlockInputsResponse) -> Result { + let latest_block_header: BlockHeader = response + .latest_block_header + .ok_or(proto::BlockHeader::missing_field("block_header"))? + .try_into()?; + + let account_witnesses = response + .account_witnesses + .into_iter() + .map(|entry| { + let witness_record: AccountWitnessRecord = entry.try_into()?; + Ok(( + witness_record.account_id, + AccountWitness::new( + witness_record.initial_state_commitment, + witness_record.proof, + ), + )) + }) + .collect::, ConversionError>>()?; + + let nullifier_witnesses = response + .nullifier_witnesses + .into_iter() + .map(|entry| { + let witness: NullifierWitnessRecord = entry.try_into()?; + Ok((witness.nullifier, NullifierWitness::new(witness.proof))) + }) + .collect::, ConversionError>>()?; - Ok(result) + let unauthenticated_note_proofs = response + .unauthenticated_note_proofs + .iter() + .map(<(NoteId, NoteInclusionProof)>::try_from) + .collect::>()?; + + let chain_mmr = ChainMmr::read_from_bytes(&response.chain_mmr) + .map_err(|source| ConversionError::deserialization_error("ChainMmr", source))?; + + Ok(BlockInputs::new( + latest_block_header, + chain_mmr, + account_witnesses, + nullifier_witnesses, + unauthenticated_note_proofs, + )) } } diff --git a/crates/proto/src/domain/note.rs b/crates/proto/src/domain/note.rs index 14131bc5b..6c3b7f165 100644 --- a/crates/proto/src/domain/note.rs +++ b/crates/proto/src/domain/note.rs @@ -1,16 +1,11 @@ -use std::collections::{BTreeMap, BTreeSet}; - use miden_objects::{ note::{NoteExecutionHint, NoteId, NoteInclusionProof, NoteMetadata, NoteTag, NoteType}, Digest, Felt, }; use crate::{ - convert, - domain::block::BlockInclusionProof, errors::{ConversionError, MissingFieldHelper}, generated::note as proto, - try_convert, }; impl TryFrom for NoteMetadata { @@ -89,41 +84,3 @@ impl TryFrom<&proto::NoteInclusionInBlockProof> for (NoteId, NoteInclusionProof) )) } } - -#[derive(Clone, Default, Debug)] -pub struct NoteAuthenticationInfo { - pub block_proofs: Vec, - pub note_proofs: BTreeMap, -} - -impl NoteAuthenticationInfo { - pub fn contains_note(&self, note: &NoteId) -> bool { - self.note_proofs.contains_key(note) - } - - pub fn note_ids(&self) -> BTreeSet { - self.note_proofs.keys().copied().collect() - } -} - -impl From for proto::NoteAuthenticationInfo { - fn from(value: NoteAuthenticationInfo) -> Self { - Self { - note_proofs: convert(&value.note_proofs), - block_proofs: convert(value.block_proofs), - } - } -} - -impl TryFrom for NoteAuthenticationInfo { - type Error = ConversionError; - - fn try_from(value: proto::NoteAuthenticationInfo) -> Result { - let result = Self { - block_proofs: try_convert(value.block_proofs)?, - note_proofs: try_convert(&value.note_proofs)?, - }; - - Ok(result) - } -} diff --git a/crates/proto/src/domain/nullifier.rs b/crates/proto/src/domain/nullifier.rs index 482183a0f..ef19397ca 100644 --- a/crates/proto/src/domain/nullifier.rs +++ b/crates/proto/src/domain/nullifier.rs @@ -35,40 +35,36 @@ impl TryFrom for Nullifier { } } -// NULLIFIER INPUT RECORD +// NULLIFIER WITNESS RECORD // ================================================================================================ #[derive(Clone, Debug)] -pub struct NullifierWitness { +pub struct NullifierWitnessRecord { pub nullifier: Nullifier, pub proof: SmtProof, } -impl TryFrom for NullifierWitness { +impl TryFrom for NullifierWitnessRecord { type Error = ConversionError; fn try_from( - nullifier_input_record: proto::responses::NullifierBlockInputRecord, + nullifier_witness_record: proto::responses::NullifierWitness, ) -> Result { Ok(Self { - nullifier: nullifier_input_record + nullifier: nullifier_witness_record .nullifier - .ok_or(proto::responses::NullifierBlockInputRecord::missing_field(stringify!( - nullifier - )))? + .ok_or(proto::responses::NullifierWitness::missing_field(stringify!(nullifier)))? .try_into()?, - proof: nullifier_input_record + proof: nullifier_witness_record .opening - .ok_or(proto::responses::NullifierBlockInputRecord::missing_field(stringify!( - opening - )))? + .ok_or(proto::responses::NullifierWitness::missing_field(stringify!(opening)))? .try_into()?, }) } } -impl From for proto::responses::NullifierBlockInputRecord { - fn from(value: NullifierWitness) -> Self { +impl From for proto::responses::NullifierWitness { + fn from(value: NullifierWitnessRecord) -> Self { Self { nullifier: Some(value.nullifier.into()), opening: Some(value.proof.into()), diff --git a/crates/proto/src/generated/block.rs b/crates/proto/src/generated/block.rs index 915dce7ec..1dd74f21c 100644 --- a/crates/proto/src/generated/block.rs +++ b/crates/proto/src/generated/block.rs @@ -36,16 +36,3 @@ pub struct BlockHeader { #[prost(fixed32, tag = "11")] pub timestamp: u32, } -/// Represents a block inclusion proof. -#[derive(Clone, PartialEq, ::prost::Message)] -pub struct BlockInclusionProof { - /// Block header associated with the inclusion proof. - #[prost(message, optional, tag = "1")] - pub block_header: ::core::option::Option, - /// Merkle path associated with the inclusion proof. - #[prost(message, optional, tag = "2")] - pub mmr_path: ::core::option::Option, - /// The chain length associated with `mmr_path`. - #[prost(fixed32, tag = "3")] - pub chain_length: u32, -} diff --git a/crates/proto/src/generated/note.rs b/crates/proto/src/generated/note.rs index 77a9bae41..293eaddab 100644 --- a/crates/proto/src/generated/note.rs +++ b/crates/proto/src/generated/note.rs @@ -78,13 +78,3 @@ pub struct NoteSyncRecord { #[prost(message, optional, tag = "4")] pub merkle_path: ::core::option::Option, } -/// Represents proof of notes inclusion in the block(s) and block(s) inclusion in the chain. -#[derive(Clone, PartialEq, ::prost::Message)] -pub struct NoteAuthenticationInfo { - /// Proof of each note's inclusion in a block. - #[prost(message, repeated, tag = "1")] - pub note_proofs: ::prost::alloc::vec::Vec, - /// Proof of each block's inclusion in the chain. - #[prost(message, repeated, tag = "2")] - pub block_proofs: ::prost::alloc::vec::Vec, -} diff --git a/crates/proto/src/generated/requests.rs b/crates/proto/src/generated/requests.rs index 9194e084f..e4dc3e131 100644 --- a/crates/proto/src/generated/requests.rs +++ b/crates/proto/src/generated/requests.rs @@ -81,15 +81,26 @@ pub struct SyncNoteRequest { /// Returns data required to prove the next block. #[derive(Clone, PartialEq, ::prost::Message)] pub struct GetBlockInputsRequest { - /// ID of the account against which a transaction is executed. + /// IDs of all accounts updated in the proposed block for which to retrieve account witnesses. #[prost(message, repeated, tag = "1")] pub account_ids: ::prost::alloc::vec::Vec, - /// Set of nullifiers consumed by this transaction. + /// Nullifiers of all notes consumed by the block for which to retrieve witnesses. + /// + /// Due to note erasure it will generally not be possible to know the exact set of nullifiers + /// a block will create, unless we pre-execute note erasure. So in practice, this set of + /// nullifiers will be the set of nullifiers of all proven batches in the block, which is a + /// superset of the nullifiers the block may create. + /// + /// However, if it is known that a certain note will be erased, it would not be necessary to + /// provide a nullifier witness for it. #[prost(message, repeated, tag = "2")] pub nullifiers: ::prost::alloc::vec::Vec, - /// Array of note IDs to be checked for existence in the database. + /// Array of note IDs for which to retrieve note inclusion proofs, **if they exist in the store**. #[prost(message, repeated, tag = "3")] pub unauthenticated_notes: ::prost::alloc::vec::Vec, + /// Array of block numbers referenced by all batches in the block. + #[prost(fixed32, repeated, tag = "4")] + pub reference_blocks: ::prost::alloc::vec::Vec, } /// Returns the inputs for a transaction batch. #[derive(Clone, PartialEq, ::prost::Message)] diff --git a/crates/proto/src/generated/responses.rs b/crates/proto/src/generated/responses.rs index ac764735d..f0fd0d8ac 100644 --- a/crates/proto/src/generated/responses.rs +++ b/crates/proto/src/generated/responses.rs @@ -83,24 +83,25 @@ pub struct SyncNoteResponse { } /// An account returned as a response to the `GetBlockInputs`. #[derive(Clone, PartialEq, ::prost::Message)] -pub struct AccountBlockInputRecord { +pub struct AccountWitness { /// The account ID. #[prost(message, optional, tag = "1")] pub account_id: ::core::option::Option, - /// The latest account hash, zero hash if the account doesn't exist. + /// The latest account state commitment used as the initial state of the requested block. + /// This will be the zero digest if the account doesn't exist. #[prost(message, optional, tag = "2")] - pub account_hash: ::core::option::Option, - /// Merkle path to verify the account's inclusion in the MMR. + pub initial_state_commitment: ::core::option::Option, + /// Merkle path to verify the account's inclusion in the account tree. #[prost(message, optional, tag = "3")] pub proof: ::core::option::Option, } /// A nullifier returned as a response to the `GetBlockInputs`. #[derive(Clone, PartialEq, ::prost::Message)] -pub struct NullifierBlockInputRecord { - /// The nullifier ID. +pub struct NullifierWitness { + /// The nullifier. #[prost(message, optional, tag = "1")] pub nullifier: ::core::option::Option, - /// Merkle path to verify the nullifier's inclusion in the MMR. + /// The SMT proof to verify the nullifier's inclusion in the nullifier tree. #[prost(message, optional, tag = "2")] pub opening: ::core::option::Option, } @@ -109,21 +110,24 @@ pub struct NullifierBlockInputRecord { pub struct GetBlockInputsResponse { /// The latest block header. #[prost(message, optional, tag = "1")] - pub block_header: ::core::option::Option, - /// Peaks of the above block's mmr, The `forest` value is equal to the block number. + pub latest_block_header: ::core::option::Option, + /// Proof of each requested unauthenticated note's inclusion in a block, **if it existed in + /// the store**. #[prost(message, repeated, tag = "2")] - pub mmr_peaks: ::prost::alloc::vec::Vec, - /// The hashes of the requested accounts and their authentication paths. - #[prost(message, repeated, tag = "3")] - pub account_states: ::prost::alloc::vec::Vec, - /// The requested nullifiers and their authentication paths. - #[prost(message, repeated, tag = "4")] - pub nullifiers: ::prost::alloc::vec::Vec, - /// The list of requested notes which were found in the database. - #[prost(message, optional, tag = "5")] - pub found_unauthenticated_notes: ::core::option::Option< - super::note::NoteAuthenticationInfo, + pub unauthenticated_note_proofs: ::prost::alloc::vec::Vec< + super::note::NoteInclusionInBlockProof, >, + /// The serialized chain MMR which includes proofs for all blocks referenced by the + /// above note inclusion proofs as well as proofs for inclusion of the requested blocks + /// referenced by the batches in the block. + #[prost(bytes = "vec", tag = "3")] + pub chain_mmr: ::prost::alloc::vec::Vec, + /// The state commitments of the requested accounts and their authentication paths. + #[prost(message, repeated, tag = "4")] + pub account_witnesses: ::prost::alloc::vec::Vec, + /// The requested nullifiers and their authentication paths. + #[prost(message, repeated, tag = "5")] + pub nullifier_witnesses: ::prost::alloc::vec::Vec, } /// Represents the result of getting batch inputs. #[derive(Clone, PartialEq, ::prost::Message)] diff --git a/crates/proto/src/lib.rs b/crates/proto/src/lib.rs index 9290fc739..ca950f020 100644 --- a/crates/proto/src/lib.rs +++ b/crates/proto/src/lib.rs @@ -8,8 +8,8 @@ pub mod generated; // ================================================================================================ pub use domain::{ - account::{AccountInputRecord, AccountState}, + account::{AccountState, AccountWitnessRecord}, convert, - nullifier::NullifierWitness, + nullifier::NullifierWitnessRecord, try_convert, }; diff --git a/crates/rpc-proto/proto/block.proto b/crates/rpc-proto/proto/block.proto index f9a41a99c..74229793c 100644 --- a/crates/rpc-proto/proto/block.proto +++ b/crates/rpc-proto/proto/block.proto @@ -39,15 +39,3 @@ message BlockHeader { // The time when the block was created. fixed32 timestamp = 11; } - -// Represents a block inclusion proof. -message BlockInclusionProof { - // Block header associated with the inclusion proof. - BlockHeader block_header = 1; - - // Merkle path associated with the inclusion proof. - merkle.MerklePath mmr_path = 2; - - // The chain length associated with `mmr_path`. - fixed32 chain_length = 3; -} diff --git a/crates/rpc-proto/proto/note.proto b/crates/rpc-proto/proto/note.proto index 9acfbd847..4a7a69703 100644 --- a/crates/rpc-proto/proto/note.proto +++ b/crates/rpc-proto/proto/note.proto @@ -80,12 +80,3 @@ message NoteSyncRecord { // The note's inclusion proof in the block. merkle.MerklePath merkle_path = 4; } - -// Represents proof of notes inclusion in the block(s) and block(s) inclusion in the chain. -message NoteAuthenticationInfo { - // Proof of each note's inclusion in a block. - repeated note.NoteInclusionInBlockProof note_proofs = 1; - - // Proof of each block's inclusion in the chain. - repeated block.BlockInclusionProof block_proofs = 2; -} diff --git a/crates/rpc-proto/proto/requests.proto b/crates/rpc-proto/proto/requests.proto index bf9fd557a..13d4568b1 100644 --- a/crates/rpc-proto/proto/requests.proto +++ b/crates/rpc-proto/proto/requests.proto @@ -76,12 +76,25 @@ message SyncNoteRequest { // Returns data required to prove the next block. message GetBlockInputsRequest { - // ID of the account against which a transaction is executed. + // IDs of all accounts updated in the proposed block for which to retrieve account witnesses. repeated account.AccountId account_ids = 1; - // Set of nullifiers consumed by this transaction. + + // Nullifiers of all notes consumed by the block for which to retrieve witnesses. + // + // Due to note erasure it will generally not be possible to know the exact set of nullifiers + // a block will create, unless we pre-execute note erasure. So in practice, this set of + // nullifiers will be the set of nullifiers of all proven batches in the block, which is a + // superset of the nullifiers the block may create. + // + // However, if it is known that a certain note will be erased, it would not be necessary to + // provide a nullifier witness for it. repeated digest.Digest nullifiers = 2; - // Array of note IDs to be checked for existence in the database. + + // Array of note IDs for which to retrieve note inclusion proofs, **if they exist in the store**. repeated digest.Digest unauthenticated_notes = 3; + + // Array of block numbers referenced by all batches in the block. + repeated fixed32 reference_blocks = 4; } // Returns the inputs for a transaction batch. diff --git a/crates/rpc-proto/proto/responses.proto b/crates/rpc-proto/proto/responses.proto index ad1f353a5..3163f993c 100644 --- a/crates/rpc-proto/proto/responses.proto +++ b/crates/rpc-proto/proto/responses.proto @@ -87,42 +87,46 @@ message SyncNoteResponse { } // An account returned as a response to the `GetBlockInputs`. -message AccountBlockInputRecord { +message AccountWitness { // The account ID. account.AccountId account_id = 1; - // The latest account hash, zero hash if the account doesn't exist. - digest.Digest account_hash = 2; + // The latest account state commitment used as the initial state of the requested block. + // This will be the zero digest if the account doesn't exist. + digest.Digest initial_state_commitment = 2; - // Merkle path to verify the account's inclusion in the MMR. + // Merkle path to verify the account's inclusion in the account tree. merkle.MerklePath proof = 3; } // A nullifier returned as a response to the `GetBlockInputs`. -message NullifierBlockInputRecord { - // The nullifier ID. +message NullifierWitness { + // The nullifier. digest.Digest nullifier = 1; - // Merkle path to verify the nullifier's inclusion in the MMR. + // The SMT proof to verify the nullifier's inclusion in the nullifier tree. smt.SmtOpening opening = 2; } // Represents the result of getting block inputs. message GetBlockInputsResponse { // The latest block header. - block.BlockHeader block_header = 1; + block.BlockHeader latest_block_header = 1; - // Peaks of the above block's mmr, The `forest` value is equal to the block number. - repeated digest.Digest mmr_peaks = 2; + // Proof of each requested unauthenticated note's inclusion in a block, **if it existed in + // the store**. + repeated note.NoteInclusionInBlockProof unauthenticated_note_proofs = 2; - // The hashes of the requested accounts and their authentication paths. - repeated AccountBlockInputRecord account_states = 3; + // The serialized chain MMR which includes proofs for all blocks referenced by the + // above note inclusion proofs as well as proofs for inclusion of the requested blocks + // referenced by the batches in the block. + bytes chain_mmr = 3; - // The requested nullifiers and their authentication paths. - repeated NullifierBlockInputRecord nullifiers = 4; + // The state commitments of the requested accounts and their authentication paths. + repeated AccountWitness account_witnesses = 4; - // The list of requested notes which were found in the database. - note.NoteAuthenticationInfo found_unauthenticated_notes = 5; + // The requested nullifiers and their authentication paths. + repeated NullifierWitness nullifier_witnesses = 5; } // Represents the result of getting batch inputs. diff --git a/crates/store/src/db/mod.rs b/crates/store/src/db/mod.rs index 6df767f9e..3b2027455 100644 --- a/crates/store/src/db/mod.rs +++ b/crates/store/src/db/mod.rs @@ -11,7 +11,7 @@ use miden_node_proto::{ }; use miden_objects::{ account::{AccountDelta, AccountId}, - block::{Block, BlockHeader, BlockNoteIndex, BlockNumber}, + block::{BlockHeader, BlockNoteIndex, BlockNumber, ProvenBlock}, crypto::{hash::rpo::RpoDigest, merkle::MerklePath, utils::Deserializable}, note::{NoteId, NoteInclusionProof, NoteMetadata, Nullifier}, transaction::TransactionId, @@ -87,7 +87,12 @@ impl NoteRecord { /// ensure ordering is correct. fn from_row(row: &rusqlite::Row<'_>) -> Result { let block_num = read_block_number(row, 0)?; - let note_index = BlockNoteIndex::new(row.get(1)?, row.get(2)?)?; + let batch_idx = row.get(1)?; + let note_idx_in_batch = row.get(2)?; + // SAFETY: We can assume the batch and note indices stored in the DB are valid so this + // should never panic. + let note_index = BlockNoteIndex::new(batch_idx, note_idx_in_batch) + .expect("batch and note index from DB should be valid"); let note_id = row.get_ref(3)?.as_blob()?; let note_id = RpoDigest::read_from_bytes(note_id)?; let note_type = row.get::<_, u8>(4)?.try_into()?; @@ -454,7 +459,7 @@ impl Db { &self, allow_acquire: oneshot::Sender<()>, acquire_done: oneshot::Receiver<()>, - block: Block, + block: ProvenBlock, notes: Vec<(NoteRecord, Option)>, ) -> Result<()> { self.pool @@ -469,7 +474,7 @@ impl Db { &transaction, &block.header(), ¬es, - block.nullifiers(), + block.created_nullifiers(), block.updated_accounts(), )?; diff --git a/crates/store/src/db/sql/mod.rs b/crates/store/src/db/sql/mod.rs index 25cb00d8c..c4f1894ba 100644 --- a/crates/store/src/db/sql/mod.rs +++ b/crates/store/src/db/sql/mod.rs @@ -783,7 +783,12 @@ pub fn select_notes_since_block_by_tag_and_sender( let mut res = Vec::new(); while let Some(row) = rows.next()? { let block_num = read_block_number(row, 0)?; - let note_index = BlockNoteIndex::new(row.get(1)?, row.get(2)?)?; + let batch_idx = row.get(1)?; + let note_idx_in_batch = row.get(2)?; + // SAFETY: We can assume the batch and note indices stored in the DB are valid so this + // should never panic. + let note_index = BlockNoteIndex::new(batch_idx, note_idx_in_batch) + .expect("batch and note index from DB should be valid"); let note_id = read_from_blob_column(row, 3)?; let note_type = row.get::<_, u8>(4)?; let sender = read_from_blob_column(row, 5)?; @@ -875,7 +880,11 @@ pub fn select_note_inclusion_proofs( let batch_index = row.get(2)?; let note_index = row.get(3)?; - let node_index_in_block = BlockNoteIndex::new(batch_index, note_index)?.leaf_index_value(); + // SAFETY: We can assume the batch and note indices stored in the DB are valid so this + // should never panic. + let node_index_in_block = BlockNoteIndex::new(batch_index, note_index) + .expect("batch and note index from DB should be valid") + .leaf_index_value(); let merkle_path_data = row.get_ref(4)?.as_blob()?; let merkle_path = MerklePath::read_from_bytes(merkle_path_data)?; diff --git a/crates/store/src/errors.rs b/crates/store/src/errors.rs index 2c6fa9e36..ebca9f753 100644 --- a/crates/store/src/errors.rs +++ b/crates/store/src/errors.rs @@ -11,7 +11,7 @@ use miden_objects::{ }, note::Nullifier, transaction::OutputNote, - AccountDeltaError, AccountError, BlockError, NoteError, + AccountDeltaError, AccountError, NoteError, }; use rusqlite::types::FromSqlError; use thiserror::Error; @@ -41,8 +41,6 @@ pub enum DatabaseError { AccountError(#[from] AccountError), #[error("account delta error")] AccountDeltaError(#[from] AccountDeltaError), - #[error("block error")] - BlockError(#[from] BlockError), #[error("closed channel")] ClosedChannel(#[from] RecvError), #[error("deserialization failed")] @@ -75,8 +73,6 @@ pub enum DatabaseError { AccountsNotFoundInDb(Vec), #[error("account {0} is not on the chain")] AccountNotPublic(AccountId), - #[error("block {0} not found")] - BlockNotFoundInDb(BlockNumber), #[error("data corrupted: {0}")] DataCorrupted(String), #[error("SQLite pool interaction failed: {0}")] @@ -95,8 +91,7 @@ impl From for Status { match err { DatabaseError::AccountNotFoundInDb(_) | DatabaseError::AccountsNotFoundInDb(_) - | DatabaseError::AccountNotPublic(_) - | DatabaseError::BlockNotFoundInDb(_) => Status::not_found(err.to_string()), + | DatabaseError::AccountNotPublic(_) => Status::not_found(err.to_string()), _ => Status::internal(err.to_string()), } @@ -136,8 +131,9 @@ pub enum GenesisError { // --------------------------------------------------------------------------------------------- #[error("database error")] DatabaseError(#[from] DatabaseError), + // TODO: Check if needed. #[error("block error")] - BlockError(#[from] BlockError), + BlockError, #[error("merkle error")] MerkleError(#[from] MerkleError), #[error("failed to deserialize genesis file")] @@ -230,27 +226,15 @@ pub enum GetBlockHeaderError { #[derive(Error, Debug)] pub enum GetBlockInputsError { - #[error("account error")] - AccountError(#[from] AccountError), - #[error("database error")] - DatabaseError(#[from] DatabaseError), - #[error("database doesn't have any block header data")] - DbBlockHeaderEmpty, - #[error("failed to get MMR peaks for forest ({forest}): {error}")] - FailedToGetMmrPeaksForForest { forest: usize, error: MmrError }, - #[error("chain MMR forest expected to be 1 less than latest header's block num. Chain MMR forest: {forest}, block num: {block_num}")] - IncorrectChainMmrForestNumber { forest: usize, block_num: BlockNumber }, - #[error("note inclusion proof MMR error")] - NoteInclusionMmr(#[from] MmrError), -} - -impl From for GetBlockInputsError { - fn from(value: GetNoteAuthenticationInfoError) -> Self { - match value { - GetNoteAuthenticationInfoError::DatabaseError(db_err) => db_err.into(), - GetNoteAuthenticationInfoError::MmrError(mmr_err) => Self::NoteInclusionMmr(mmr_err), - } - } + #[error("failed to select note inclusion proofs")] + SelectNoteInclusionProofError(#[source] DatabaseError), + #[error("failed to select block headers")] + SelectBlockHeaderError(#[source] DatabaseError), + #[error("highest block number {highest_block_number} referenced by a batch is newer than the latest block {latest_block_number}")] + UnknownBatchBlockReference { + highest_block_number: BlockNumber, + latest_block_number: BlockNumber, + }, } #[derive(Error, Debug)] @@ -273,14 +257,6 @@ pub enum NoteSyncError { MmrError(#[from] MmrError), } -#[derive(Error, Debug)] -pub enum GetNoteAuthenticationInfoError { - #[error("database error")] - DatabaseError(#[from] DatabaseError), - #[error("Mmr error")] - MmrError(#[from] MmrError), -} - #[derive(Error, Debug)] pub enum GetBatchInputsError { #[error("failed to select note inclusion proofs")] @@ -290,7 +266,7 @@ pub enum GetBatchInputsError { #[error("set of blocks refernced by transactions is empty")] TransactionBlockReferencesEmpty, #[error("highest block number {highest_block_num} referenced by a transaction is newer than the latest block {latest_block_num}")] - TransactionBlockReferenceNewerThanLatestBlock { + UnknownTransactionBlockReference { highest_block_num: BlockNumber, latest_block_num: BlockNumber, }, diff --git a/crates/store/src/genesis.rs b/crates/store/src/genesis.rs index de128879c..d78990734 100644 --- a/crates/store/src/genesis.rs +++ b/crates/store/src/genesis.rs @@ -1,10 +1,11 @@ use miden_lib::transaction::TransactionKernel; use miden_objects::{ account::{delta::AccountUpdateDetails, Account}, - block::{Block, BlockAccountUpdate, BlockHeader, BlockNumber}, - crypto::merkle::{EmptySubtreeRoots, MmrPeaks, SimpleSmt, Smt}, + block::{BlockAccountUpdate, BlockHeader, BlockNoteTree, BlockNumber, ProvenBlock}, + crypto::merkle::{MmrPeaks, SimpleSmt, Smt}, + note::Nullifier, utils::serde::{ByteReader, ByteWriter, Deserializable, DeserializationError, Serializable}, - Digest, ACCOUNT_TREE_DEPTH, BLOCK_NOTE_TREE_DEPTH, + Digest, ACCOUNT_TREE_DEPTH, }; use crate::errors::GenesisError; @@ -26,7 +27,7 @@ impl GenesisState { } /// Returns the block header and the account SMT - pub fn into_block(self) -> Result { + pub fn into_block(self) -> Result { let accounts: Vec = self .accounts .iter() @@ -51,21 +52,35 @@ impl GenesisState { (update.account_id().prefix().into(), update.final_state_commitment().into()) }))?; + let empty_nullifiers: Vec = Vec::new(); + let empty_nullifier_tree = Smt::new(); + + let empty_output_notes = Vec::new(); + let empty_block_note_tree = BlockNoteTree::empty(); + let header = BlockHeader::new( self.version, Digest::default(), BlockNumber::GENESIS, MmrPeaks::new(0, Vec::new()).unwrap().hash_peaks(), account_smt.root(), - Smt::default().root(), - *EmptySubtreeRoots::entry(BLOCK_NOTE_TREE_DEPTH, 0), + empty_nullifier_tree.root(), + empty_block_note_tree.root(), Digest::default(), TransactionKernel::kernel_root(), Digest::default(), self.timestamp, ); - Block::new(header, accounts, vec![], vec![]).map_err(Into::into) + // SAFETY: Header and accounts should be valid by construction. + // No notes or nullifiers are created at genesis, which is consistent with the above empty + // block note tree root and empty nullifier tree root. + Ok(ProvenBlock::new_unchecked( + header, + accounts, + empty_output_notes, + empty_nullifiers, + )) } } diff --git a/crates/store/src/server/api.rs b/crates/store/src/server/api.rs index be0ea560d..f69c8779a 100644 --- a/crates/store/src/server/api.rs +++ b/crates/store/src/server/api.rs @@ -29,7 +29,7 @@ use miden_node_proto::{ }; use miden_objects::{ account::AccountId, - block::{Block, BlockNumber}, + block::{BlockNumber, ProvenBlock}, crypto::hash::rpo::RpoDigest, note::{NoteId, Nullifier}, utils::{Deserializable, Serializable}, @@ -304,7 +304,7 @@ impl api_server::Api for StoreApi { debug!(target: COMPONENT, ?request); - let block = Block::read_from_bytes(&request.block).map_err(|err| { + let block = ProvenBlock::read_from_bytes(&request.block).map_err(|err| { Status::invalid_argument(format!("Block deserialization error: {err}")) })?; @@ -315,8 +315,8 @@ impl api_server::Api for StoreApi { block_num, block_hash = %block.hash(), account_count = block.updated_accounts().len(), - note_count = block.notes().count(), - nullifier_count = block.nullifiers().len(), + note_count = block.output_notes().count(), + nullifier_count = block.created_nullifiers().len(), ); self.state.apply_block(block).await?; @@ -338,15 +338,16 @@ impl api_server::Api for StoreApi { ) -> Result, Status> { let request = request.into_inner(); - let nullifiers = validate_nullifiers(&request.nullifiers)?; let account_ids = read_account_ids(&request.account_ids)?; + let nullifiers = validate_nullifiers(&request.nullifiers)?; let unauthenticated_notes = validate_notes(&request.unauthenticated_notes)?; + let reference_blocks = read_block_numbers(&request.reference_blocks); let unauthenticated_notes = unauthenticated_notes.into_iter().collect(); self.state - .get_block_inputs(&account_ids, &nullifiers, unauthenticated_notes) + .get_block_inputs(account_ids, nullifiers, unauthenticated_notes, reference_blocks) .await - .map(Into::into) + .map(GetBlockInputsResponse::from) .map(Response::new) .map_err(internal_error) } @@ -569,3 +570,8 @@ fn validate_notes(notes: &[generated::digest::Digest]) -> Result, St .collect::>() .map_err(|_| invalid_argument("Digest field is not in the modulus range")) } + +#[instrument(target = COMPONENT, skip_all)] +fn read_block_numbers(block_numbers: &[u32]) -> BTreeSet { + block_numbers.iter().map(|raw_number| BlockNumber::from(*raw_number)).collect() +} diff --git a/crates/store/src/state.rs b/crates/store/src/state.rs index c6b9e1ac9..d1ec2b48f 100644 --- a/crates/store/src/state.rs +++ b/crates/store/src/state.rs @@ -10,22 +10,16 @@ use std::{ }; use miden_node_proto::{ - convert, domain::{ account::{AccountInfo, AccountProofRequest, StorageMapKeysProof}, batch::BatchInputs, - block::BlockInclusionProof, - note::NoteAuthenticationInfo, }, - generated::responses::{ - AccountProofsResponse, AccountStateHeader, GetBlockInputsResponse, StorageSlotMapProof, - }, - AccountInputRecord, NullifierWitness, + generated::responses::{AccountProofsResponse, AccountStateHeader, StorageSlotMapProof}, }; use miden_node_utils::formatting::format_array; use miden_objects::{ account::{AccountDelta, AccountHeader, AccountId, StorageSlot}, - block::{Block, BlockHeader, BlockNumber}, + block::{AccountWitness, BlockHeader, BlockInputs, BlockNumber, NullifierWitness, ProvenBlock}, crypto::{ hash::rpo::RpoDigest, merkle::{ @@ -49,8 +43,8 @@ use crate::{ db::{Db, NoteRecord, NoteSyncUpdate, NullifierInfo, StateSyncUpdate}, errors::{ ApplyBlockError, DatabaseError, GetBatchInputsError, GetBlockHeaderError, - GetBlockInputsError, GetNoteAuthenticationInfoError, InvalidBlockError, NoteSyncError, - StateInitializationError, StateSyncError, + GetBlockInputsError, InvalidBlockError, NoteSyncError, StateInitializationError, + StateSyncError, }, nullifier_tree::NullifierTree, COMPONENT, @@ -58,37 +52,6 @@ use crate::{ // STRUCTURES // ================================================================================================ -/// Information needed from the store to validate and build a block -#[derive(Debug)] -pub struct BlockInputs { - /// Previous block header - pub block_header: BlockHeader, - - /// MMR peaks for the current chain state - pub chain_peaks: MmrPeaks, - - /// The hashes of the requested accounts and their authentication paths - pub account_states: Vec, - - /// The requested nullifiers and their authentication paths - pub nullifiers: Vec, - - /// List of notes found in the store - pub found_unauthenticated_notes: NoteAuthenticationInfo, -} - -impl From for GetBlockInputsResponse { - fn from(value: BlockInputs) -> Self { - Self { - block_header: Some(value.block_header.into()), - mmr_peaks: convert(value.chain_peaks.peaks()), - account_states: convert(value.account_states), - nullifiers: convert(value.nullifiers), - found_unauthenticated_notes: Some(value.found_unauthenticated_notes.into()), - } - } -} - #[derive(Debug)] pub struct TransactionInputs { pub account_hash: RpoDigest, @@ -150,12 +113,14 @@ impl Blockchain { &self.0 } - /// Returns the latest block number and partial mmr. + /// Creates a [`PartialMmr`] at the state of the latest block (i.e. the block's chain root will + /// match the hashed peaks of the returned partial MMR). This MMR will include authentication + /// paths for all blocks in the provided set. pub fn partial_mmr_from_blocks( &self, blocks: &BTreeSet, latest_block_number: BlockNumber, - ) -> Result { + ) -> PartialMmr { // Using latest block as the target forest means we take the state of the MMR one before // the latest block. This is because the latest block will be used as the reference // block of the batch and will be added to the MMR by the batch kernel. @@ -183,7 +148,8 @@ impl Blockchain { .track(block_num, leaf, &path) .expect("filling partial mmr with data from mmr should succeed"); } - Ok(partial_mmr) + + partial_mmr } } @@ -268,12 +234,12 @@ impl State { /// released. // TODO: This span is logged in a root span, we should connect it to the parent span. #[instrument(target = COMPONENT, skip_all, err)] - pub async fn apply_block(&self, block: Block) -> Result<(), ApplyBlockError> { + pub async fn apply_block(&self, block: ProvenBlock) -> Result<(), ApplyBlockError> { let _lock = self.writer.try_lock().map_err(|_| ApplyBlockError::ConcurrentWrite)?; let header = block.header(); - let tx_hash = block.compute_tx_hash(); + let tx_hash = BlockHeader::compute_tx_commitment(block.transactions()); if header.tx_hash() != tx_hash { return Err(InvalidBlockError::InvalidBlockTxHash { expected: tx_hash, @@ -324,7 +290,7 @@ impl State { // nullifiers can be produced only once let duplicate_nullifiers: Vec<_> = block - .nullifiers() + .created_nullifiers() .iter() .filter(|&n| inner.nullifier_tree.get_block_num(n).is_some()) .copied() @@ -343,7 +309,7 @@ impl State { // compute update for nullifier tree let nullifier_tree_update = inner.nullifier_tree.compute_mutations( - block.nullifiers().iter().map(|nullifier| (*nullifier, block_num)), + block.created_nullifiers().iter().map(|nullifier| (*nullifier, block_num)), ); if nullifier_tree_update.root() != header.nullifier_root() { @@ -373,13 +339,13 @@ impl State { }; // build note tree - let note_tree = block.build_note_tree(); + let note_tree = block.build_output_note_tree(); if note_tree.root() != header.note_root() { return Err(InvalidBlockError::NewBlockInvalidNoteRoot.into()); } let notes = block - .notes() + .output_notes() .map(|(note_index, note)| { let (details, nullifier) = match note { OutputNote::Full(note) => (Some(note.to_bytes()), Some(note.nullifier())), @@ -532,65 +498,6 @@ impl State { self.db.select_notes_by_id(note_ids).await } - /// Queries all the note inclusion proofs matching a certain Note IDs from the database. - pub async fn get_note_authentication_info( - &self, - note_ids: BTreeSet, - ) -> Result { - // First we grab note inclusion proofs for the known notes. These proofs only - // prove that the note was included in a given block. We then also need to prove that - // each of those blocks is included in the chain. - let note_proofs = self.db.select_note_inclusion_proofs(note_ids).await?; - - // The set of blocks that the notes are included in. - let blocks = note_proofs - .values() - .map(|proof| proof.location().block_num()) - .collect::>(); - - // Grab the block merkle paths from the inner state. - // - // NOTE: Scoped block to automatically drop the mutex guard asap. - // - // We also avoid accessing the db in the block as this would delay - // dropping the guard. - let (chain_length, merkle_paths) = { - let state = self.inner.read().await; - let chain_length = state.blockchain.chain_length().as_usize(); - - let paths = blocks - .iter() - .map(|&block_num| { - let proof = state.blockchain.open(block_num.as_usize())?.merkle_path; - - Ok::<_, MmrError>((block_num, proof)) - }) - .collect::, MmrError>>()?; - - let chain_length = u32::try_from(chain_length) - .expect("Forest is a chain length so should fit into a u32"); - - (chain_length.into(), paths) - }; - - let headers = self.db.select_block_headers(blocks.into_iter()).await?; - - let headers = headers - .into_iter() - .map(|header| (header.block_num(), header)) - .collect::>(); - - let mut block_proofs = Vec::with_capacity(merkle_paths.len()); - for (block_num, mmr_path) in merkle_paths { - let block_header = - *headers.get(&block_num).ok_or(DatabaseError::BlockNotFoundInDb(block_num))?; - - block_proofs.push(BlockInclusionProof { block_header, mmr_path, chain_length }); - } - - Ok(NoteAuthenticationInfo { block_proofs, note_proofs }) - } - /// Fetches the inputs for a transaction batch from the database. /// /// ## Inputs @@ -633,7 +540,7 @@ impl State { // Collect all blocks we need to query without duplicates, which is: // - all blocks for which we need to prove note inclusion. // - all blocks referenced by transactions in the batch. - let mut blocks = tx_reference_blocks; + let mut blocks: BTreeSet = tx_reference_blocks; blocks.extend(note_blocks); // Scoped block to automatically drop the read lock guard as soon as we're done. @@ -646,7 +553,7 @@ impl State { let highest_block_num = *blocks.last().expect("we should have checked for empty block references"); if highest_block_num > latest_block_num { - return Err(GetBatchInputsError::TransactionBlockReferenceNewerThanLatestBlock { + return Err(GetBatchInputsError::UnknownTransactionBlockReference { highest_block_num, latest_block_num, }); @@ -659,7 +566,7 @@ impl State { ( latest_block_num, - inner_state.blockchain.partial_mmr_from_blocks(&blocks, latest_block_num)?, + inner_state.blockchain.partial_mmr_from_blocks(&blocks, latest_block_num), ) }; @@ -779,64 +686,135 @@ impl State { /// Returns data needed by the block producer to construct and prove the next block. pub async fn get_block_inputs( &self, - account_ids: &[AccountId], - nullifiers: &[Nullifier], + account_ids: Vec, + nullifiers: Vec, unauthenticated_notes: BTreeSet, + reference_blocks: BTreeSet, ) -> Result { - let inner = self.inner.read().await; + // Get the note inclusion proofs from the DB. + // We do this first so we have to acquire the lock to the state just once. There we need the + // reference blocks of the note proofs to get their authentication paths in the chain MMR. + let unauthenticated_note_proofs = self + .db + .select_note_inclusion_proofs(unauthenticated_notes) + .await + .map_err(GetBlockInputsError::SelectNoteInclusionProofError)?; - let latest = self + // The set of blocks that the notes are included in. + let note_proof_reference_blocks = + unauthenticated_note_proofs.values().map(|proof| proof.location().block_num()); + + // Collect all blocks we need to prove inclusion for, without duplicates. + let mut blocks = reference_blocks; + blocks.extend(note_proof_reference_blocks); + + let (latest_block_number, account_witnesses, nullifier_witnesses, partial_mmr) = + self.get_block_inputs_witnesses(&mut blocks, account_ids, nullifiers).await?; + + // Fetch the block headers for all blocks in the partial MMR plus the latest one which will + // be used as the previous block header of the block being built. + let mut headers = self .db - .select_block_header_by_block_num(None) - .await? - .ok_or(GetBlockInputsError::DbBlockHeaderEmpty)?; + .select_block_headers(blocks.into_iter().chain(std::iter::once(latest_block_number))) + .await + .map_err(GetBlockInputsError::SelectBlockHeaderError)?; + + // Find and remove the latest block as we must not add it to the chain MMR, since it is + // not yet in the chain. + let latest_block_header_index = headers + .iter() + .enumerate() + .find_map(|(index, header)| { + (header.block_num() == latest_block_number).then_some(index) + }) + .expect("DB should have returned the header of the latest block header"); + + // The order doesn't matter for ChainMmr::new, so swap remove is fine. + let latest_block_header = headers.swap_remove(latest_block_header_index); + + // SAFETY: This should not error because: + // - we're passing exactly the block headers that we've added to the partial MMR, + // - so none of the block header's block numbers should exceed the chain length of the + // partial MMR, + // - and we've added blocks to a BTreeSet, so there can be no duplicates. + let chain_mmr = ChainMmr::new(partial_mmr, headers) + .expect("partial mmr and block headers should be consistent"); + + Ok(BlockInputs::new( + latest_block_header, + chain_mmr, + account_witnesses, + nullifier_witnesses, + unauthenticated_note_proofs, + )) + } + + /// Get account and nullifier witnesses for the requested account IDs and nullifier as well as + /// the [`PartialMmr`] for the given blocks. The MMR won't contain the latest block and its + /// number is removed from `blocks` and returned separately. + /// + /// This method acquires the lock to the inner state and does not access the DB so we release + /// the lock asap. + async fn get_block_inputs_witnesses( + &self, + blocks: &mut BTreeSet, + account_ids: Vec, + nullifiers: Vec, + ) -> Result< + ( + BlockNumber, + BTreeMap, + BTreeMap, + PartialMmr, + ), + GetBlockInputsError, + > { + let inner = self.inner.read().await; + + let latest_block_number = inner.latest_block_num(); - // sanity check - if inner.blockchain.chain_tip() != latest.block_num() { - return Err(GetBlockInputsError::IncorrectChainMmrForestNumber { - forest: inner.blockchain.chain_tip().as_usize(), - block_num: latest.block_num(), + // If `blocks` is empty, use the latest block number which will never trigger the error. + let highest_block_number = blocks.last().copied().unwrap_or(latest_block_number); + if highest_block_number > latest_block_number { + return Err(GetBlockInputsError::UnknownBatchBlockReference { + highest_block_number, + latest_block_number, }); } - // using current block number gets us the peaks of the chain MMR as of one block ago; - // this is done so that latest.chain_root matches the returned peaks - let chain_peaks = - inner.blockchain.peaks_at(latest.block_num().as_usize()).map_err(|error| { - GetBlockInputsError::FailedToGetMmrPeaksForForest { - forest: latest.block_num().as_usize(), - error, - } - })?; - let account_states = account_ids + // The latest block is not yet in the chain MMR, so we can't (and don't need to) prove its + // inclusion in the chain. + blocks.remove(&latest_block_number); + + // Fetch the partial MMR at the state of the latest block with authentication paths for the + // provided set of blocks. + let partial_mmr = inner.blockchain.partial_mmr_from_blocks(blocks, latest_block_number); + + // Fetch witnesses for all acounts. + let account_witnesses = account_ids .iter() .copied() .map(|account_id| { - let ValuePath { value: account_hash, path: proof } = - inner.account_tree.open(&LeafIndex::new_max_depth(account_id.prefix().into())); - Ok(AccountInputRecord { account_id, account_hash, proof }) + let ValuePath { + value: latest_state_commitment, + path: proof, + } = inner.account_tree.open(&account_id.into()); + (account_id, AccountWitness::new(latest_state_commitment, proof)) }) - .collect::>()?; + .collect::>(); - let nullifiers: Vec = nullifiers + // Fetch witnesses for all nullifiers. We don't check whether the nullifiers are spent or + // not as this is done as part of proposing the block. + let nullifier_witnesses: BTreeMap = nullifiers .iter() + .copied() .map(|nullifier| { - let proof = inner.nullifier_tree.open(nullifier); - - NullifierWitness { nullifier: *nullifier, proof } + let proof = inner.nullifier_tree.open(&nullifier); + (nullifier, NullifierWitness::new(proof)) }) .collect(); - let found_unauthenticated_notes = - self.get_note_authentication_info(unauthenticated_notes).await?; - - Ok(BlockInputs { - block_header: latest, - chain_peaks, - account_states, - nullifiers, - found_unauthenticated_notes, - }) + Ok((latest_block_number, account_witnesses, nullifier_witnesses, partial_mmr)) } /// Returns data needed by the block producer to verify transactions validity. diff --git a/proto/block.proto b/proto/block.proto index f9a41a99c..74229793c 100644 --- a/proto/block.proto +++ b/proto/block.proto @@ -39,15 +39,3 @@ message BlockHeader { // The time when the block was created. fixed32 timestamp = 11; } - -// Represents a block inclusion proof. -message BlockInclusionProof { - // Block header associated with the inclusion proof. - BlockHeader block_header = 1; - - // Merkle path associated with the inclusion proof. - merkle.MerklePath mmr_path = 2; - - // The chain length associated with `mmr_path`. - fixed32 chain_length = 3; -} diff --git a/proto/note.proto b/proto/note.proto index 9acfbd847..4a7a69703 100644 --- a/proto/note.proto +++ b/proto/note.proto @@ -80,12 +80,3 @@ message NoteSyncRecord { // The note's inclusion proof in the block. merkle.MerklePath merkle_path = 4; } - -// Represents proof of notes inclusion in the block(s) and block(s) inclusion in the chain. -message NoteAuthenticationInfo { - // Proof of each note's inclusion in a block. - repeated note.NoteInclusionInBlockProof note_proofs = 1; - - // Proof of each block's inclusion in the chain. - repeated block.BlockInclusionProof block_proofs = 2; -} diff --git a/proto/requests.proto b/proto/requests.proto index bf9fd557a..13d4568b1 100644 --- a/proto/requests.proto +++ b/proto/requests.proto @@ -76,12 +76,25 @@ message SyncNoteRequest { // Returns data required to prove the next block. message GetBlockInputsRequest { - // ID of the account against which a transaction is executed. + // IDs of all accounts updated in the proposed block for which to retrieve account witnesses. repeated account.AccountId account_ids = 1; - // Set of nullifiers consumed by this transaction. + + // Nullifiers of all notes consumed by the block for which to retrieve witnesses. + // + // Due to note erasure it will generally not be possible to know the exact set of nullifiers + // a block will create, unless we pre-execute note erasure. So in practice, this set of + // nullifiers will be the set of nullifiers of all proven batches in the block, which is a + // superset of the nullifiers the block may create. + // + // However, if it is known that a certain note will be erased, it would not be necessary to + // provide a nullifier witness for it. repeated digest.Digest nullifiers = 2; - // Array of note IDs to be checked for existence in the database. + + // Array of note IDs for which to retrieve note inclusion proofs, **if they exist in the store**. repeated digest.Digest unauthenticated_notes = 3; + + // Array of block numbers referenced by all batches in the block. + repeated fixed32 reference_blocks = 4; } // Returns the inputs for a transaction batch. diff --git a/proto/responses.proto b/proto/responses.proto index ad1f353a5..3163f993c 100644 --- a/proto/responses.proto +++ b/proto/responses.proto @@ -87,42 +87,46 @@ message SyncNoteResponse { } // An account returned as a response to the `GetBlockInputs`. -message AccountBlockInputRecord { +message AccountWitness { // The account ID. account.AccountId account_id = 1; - // The latest account hash, zero hash if the account doesn't exist. - digest.Digest account_hash = 2; + // The latest account state commitment used as the initial state of the requested block. + // This will be the zero digest if the account doesn't exist. + digest.Digest initial_state_commitment = 2; - // Merkle path to verify the account's inclusion in the MMR. + // Merkle path to verify the account's inclusion in the account tree. merkle.MerklePath proof = 3; } // A nullifier returned as a response to the `GetBlockInputs`. -message NullifierBlockInputRecord { - // The nullifier ID. +message NullifierWitness { + // The nullifier. digest.Digest nullifier = 1; - // Merkle path to verify the nullifier's inclusion in the MMR. + // The SMT proof to verify the nullifier's inclusion in the nullifier tree. smt.SmtOpening opening = 2; } // Represents the result of getting block inputs. message GetBlockInputsResponse { // The latest block header. - block.BlockHeader block_header = 1; + block.BlockHeader latest_block_header = 1; - // Peaks of the above block's mmr, The `forest` value is equal to the block number. - repeated digest.Digest mmr_peaks = 2; + // Proof of each requested unauthenticated note's inclusion in a block, **if it existed in + // the store**. + repeated note.NoteInclusionInBlockProof unauthenticated_note_proofs = 2; - // The hashes of the requested accounts and their authentication paths. - repeated AccountBlockInputRecord account_states = 3; + // The serialized chain MMR which includes proofs for all blocks referenced by the + // above note inclusion proofs as well as proofs for inclusion of the requested blocks + // referenced by the batches in the block. + bytes chain_mmr = 3; - // The requested nullifiers and their authentication paths. - repeated NullifierBlockInputRecord nullifiers = 4; + // The state commitments of the requested accounts and their authentication paths. + repeated AccountWitness account_witnesses = 4; - // The list of requested notes which were found in the database. - note.NoteAuthenticationInfo found_unauthenticated_notes = 5; + // The requested nullifiers and their authentication paths. + repeated NullifierWitness nullifier_witnesses = 5; } // Represents the result of getting batch inputs.