From 8437e82f7a346f08a1bfa34cc171d2a4ff95dfde Mon Sep 17 00:00:00 2001 From: Bobbin Threadbare Date: Thu, 23 Jan 2025 17:48:08 -0800 Subject: [PATCH 01/27] chore: increment crate versions to v0.8.0 and MSRV to 1.84 --- CHANGELOG.md | 4 ++++ Cargo.lock | 18 +++++++++--------- Cargo.toml | 14 +++++++------- README.md | 4 ++-- rust-toolchain.toml | 2 +- 5 files changed, 23 insertions(+), 19 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 8ed656eaf..1824a4dbb 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,10 @@ ## Unreleased +### Changes + +- [BREAKING] Updated minimum Rust version to 1.84. + ## v0.7.0 (2025-01-23) ### Enhancements diff --git a/Cargo.lock b/Cargo.lock index a1363a3c7..9009ea63b 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1,6 +1,6 @@ # This file is automatically @generated by Cargo. # It is not intended for manual editing. -version = 3 +version = 4 [[package]] name = "addr2line" @@ -1481,7 +1481,7 @@ dependencies = [ [[package]] name = "miden-faucet" -version = "0.7.0" +version = "0.8.0" dependencies = [ "anyhow", "axum", @@ -1574,7 +1574,7 @@ dependencies = [ [[package]] name = "miden-node" -version = "0.7.0" +version = "0.8.0" dependencies = [ "anyhow", "clap", @@ -1595,7 +1595,7 @@ dependencies = [ [[package]] name = "miden-node-block-producer" -version = "0.7.0" +version = "0.8.0" dependencies = [ "assert_matches", "async-trait", @@ -1623,7 +1623,7 @@ dependencies = [ [[package]] name = "miden-node-proto" -version = "0.7.0" +version = "0.8.0" dependencies = [ "anyhow", "hex", @@ -1640,7 +1640,7 @@ dependencies = [ [[package]] name = "miden-node-rpc" -version = "0.7.0" +version = "0.8.0" dependencies = [ "miden-node-proto", "miden-node-utils", @@ -1656,7 +1656,7 @@ dependencies = [ [[package]] name = "miden-node-store" -version = "0.7.0" +version = "0.8.0" dependencies = [ "assert_matches", "deadpool-sqlite", @@ -1685,7 +1685,7 @@ dependencies = [ [[package]] name = "miden-node-utils" -version = "0.7.0" +version = "0.8.0" dependencies = [ "anyhow", "figment", @@ -1751,7 +1751,7 @@ dependencies = [ [[package]] name = "miden-rpc-proto" -version = "0.7.0" +version = "0.8.0" [[package]] name = "miden-stdlib" diff --git a/Cargo.toml b/Cargo.toml index 92dbb1847..ff800d2d3 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -15,8 +15,8 @@ resolver = "2" [workspace.package] edition = "2021" -rust-version = "1.82" -version = "0.7.0" +rust-version = "1.84" +version = "0.8.0" license = "MIT" authors = ["Miden contributors"] homepage = "https://polygon.technology/polygon-miden" @@ -28,12 +28,12 @@ readme = "README.md" assert_matches = { version = "1.5" } miden-air = { version = "0.12" } miden-lib = { version = "0.7" } -miden-node-block-producer = { path = "crates/block-producer", version = "0.7" } -miden-node-proto = { path = "crates/proto", version = "0.7" } -miden-node-rpc = { path = "crates/rpc", version = "0.7" } -miden-node-store = { path = "crates/store", version = "0.7" } +miden-node-block-producer = { path = "crates/block-producer", version = "0.8" } +miden-node-proto = { path = "crates/proto", version = "0.8" } +miden-node-rpc = { path = "crates/rpc", version = "0.8" } +miden-node-store = { path = "crates/store", version = "0.8" } miden-node-test-macro = { path = "crates/test-macro" } -miden-node-utils = { path = "crates/utils", version = "0.7" } +miden-node-utils = { path = "crates/utils", version = "0.8" } miden-objects = { version = "0.7" } miden-processor = { version = "0.12" } miden-stdlib = { version = "0.12", default-features = false } diff --git a/README.md b/README.md index 6ea1de0c4..390488218 100644 --- a/README.md +++ b/README.md @@ -2,7 +2,7 @@ [![LICENSE](https://img.shields.io/badge/license-MIT-blue.svg)](https://github.com/0xPolygonMiden/miden-node/blob/main/LICENSE) [![test](https://github.com/0xPolygonMiden/miden-node/actions/workflows/test.yml/badge.svg)](https://github.com/0xPolygonMiden/miden-node/actions/workflows/test.yml) -[![RUST_VERSION](https://img.shields.io/badge/rustc-1.82+-lightgray.svg)](https://www.rust-lang.org/tools/install) +[![RUST_VERSION](https://img.shields.io/badge/rustc-1.84+-lightgray.svg)](https://www.rust-lang.org/tools/install) [![crates.io](https://img.shields.io/crates/v/miden-node)](https://crates.io/crates/miden-node) This repository holds the Miden node; that is, the software which processes transactions and creates blocks for the Miden rollup. @@ -58,7 +58,7 @@ sudo dpkg -i $package_name.deb ### Install using `cargo` -Install Rust version **1.82** or greater using the official Rust installation [instructions](https://www.rust-lang.org/tools/install). +Install Rust version **1.84** or greater using the official Rust installation [instructions](https://www.rust-lang.org/tools/install). Depending on the platform, you may need to install additional libraries. For example, on Ubuntu 22.04 the following command ensures that all required libraries are installed. diff --git a/rust-toolchain.toml b/rust-toolchain.toml index 6ad542bbc..217b108a4 100644 --- a/rust-toolchain.toml +++ b/rust-toolchain.toml @@ -1,4 +1,4 @@ [toolchain] -channel = "1.82" +channel = "1.84" components = ["rustfmt", "rust-src", "clippy"] profile = "minimal" From 44394e4a5b8c9f4313c1059486a4e474a1741a5d Mon Sep 17 00:00:00 2001 From: Serge Radinovich <47865535+sergerad@users.noreply.github.com> Date: Sun, 26 Jan 2025 21:57:14 +1300 Subject: [PATCH 02/27] ci: fmt check for TOML files (#645) --- .github/workflows/lint.yml | 10 +++++ .taplo.toml | 6 +++ Cargo.toml | 75 ++++++++++++++++---------------- Makefile | 12 ++++- bin/faucet/Cargo.toml | 62 +++++++++++++------------- bin/node/Cargo.toml | 48 ++++++++++---------- config/genesis.toml | 10 ++--- config/miden-faucet.toml | 10 ++--- config/miden-node.toml | 6 +-- crates/block-producer/Cargo.toml | 66 ++++++++++++++-------------- crates/proto/Cargo.toml | 36 +++++++-------- crates/rpc-proto/Cargo.toml | 24 +++++----- crates/rpc/Cargo.toml | 36 +++++++-------- crates/store/Cargo.toml | 50 ++++++++++----------- crates/test-macro/Cargo.toml | 22 +++++----- crates/utils/Cargo.toml | 44 +++++++++---------- rust-toolchain.toml | 6 +-- rustfmt.toml | 34 +++++++-------- 18 files changed, 292 insertions(+), 265 deletions(-) create mode 100644 .taplo.toml diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index 5c898fde8..756c80f08 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -44,6 +44,16 @@ jobs: - name: Clippy run: make clippy + toml: + runs-on: ubuntu-latest + timeout-minutes: 5 + steps: + - uses: actions/checkout@v4 + - uses: taiki-e/install-action@v2 + with: + tool: taplo-cli + - run: make toml-check + doc: name: doc runs-on: ubuntu-latest diff --git a/.taplo.toml b/.taplo.toml new file mode 100644 index 000000000..b735451f6 --- /dev/null +++ b/.taplo.toml @@ -0,0 +1,6 @@ +[formatting] +align_entries = true +column_width = 120 +reorder_arrays = true +reorder_inline_tables = true +reorder_keys = true diff --git a/Cargo.toml b/Cargo.toml index ff800d2d3..ccf2199df 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,62 +1,63 @@ [workspace] members = [ - "bin/node", "bin/faucet", + "bin/node", "crates/block-producer", "crates/proto", - "crates/rpc-proto", "crates/rpc", + "crates/rpc-proto", "crates/store", - "crates/utils", "crates/test-macro", + "crates/utils", ] resolver = "2" [workspace.package] -edition = "2021" +authors = ["Miden contributors"] +edition = "2021" +exclude = [".github/"] +homepage = "https://polygon.technology/polygon-miden" +license = "MIT" +readme = "README.md" +repository = "https://github.com/0xPolygonMiden/miden-node" rust-version = "1.84" -version = "0.8.0" -license = "MIT" -authors = ["Miden contributors"] -homepage = "https://polygon.technology/polygon-miden" -repository = "https://github.com/0xPolygonMiden/miden-node" -exclude = [".github/"] -readme = "README.md" +version = "0.8.0" [workspace.dependencies] -assert_matches = { version = "1.5" } -miden-air = { version = "0.12" } -miden-lib = { version = "0.7" } +assert_matches = { version = "1.5" } +miden-air = { version = "0.12" } +miden-lib = { version = "0.7" } miden-node-block-producer = { path = "crates/block-producer", version = "0.8" } -miden-node-proto = { path = "crates/proto", version = "0.8" } -miden-node-rpc = { path = "crates/rpc", version = "0.8" } -miden-node-store = { path = "crates/store", version = "0.8" } -miden-node-test-macro = { path = "crates/test-macro" } -miden-node-utils = { path = "crates/utils", version = "0.8" } -miden-objects = { version = "0.7" } -miden-processor = { version = "0.12" } -miden-stdlib = { version = "0.12", default-features = false } -miden-tx = { version = "0.7" } -prost = { version = "0.13" } -rand = { version = "0.8" } -thiserror = { version = "2.0", default-features = false } -tokio = { version = "1.40", features = ["rt-multi-thread"] } -tokio-stream = { version = "0.1" } -tonic = { version = "0.12" } -tracing = { version = "0.1" } -tracing-subscriber = { version = "0.3", features = ["fmt", "json", "env-filter"] } +miden-node-proto = { path = "crates/proto", version = "0.8" } +miden-node-rpc = { path = "crates/rpc", version = "0.8" } +miden-node-store = { path = "crates/store", version = "0.8" } +miden-node-test-macro = { path = "crates/test-macro" } +miden-node-utils = { path = "crates/utils", version = "0.8" } +miden-objects = { version = "0.7" } +miden-processor = { version = "0.12" } +miden-stdlib = { version = "0.12", default-features = false } +miden-tx = { version = "0.7" } +prost = { version = "0.13" } +rand = { version = "0.8" } +thiserror = { version = "2.0", default-features = false } +tokio = { version = "1.40", features = ["rt-multi-thread"] } +tokio-stream = { version = "0.1" } +tonic = { version = "0.12" } +tracing = { version = "0.1" } +tracing-subscriber = { version = "0.3", features = ["env-filter", "fmt", "json"] } # Lints are set to warn for development, which are promoted to errors in CI. [workspace.lints.clippy] # Pedantic lints are set to a lower priority which allows lints in the group to be selectively enabled. pedantic = { level = "warn", priority = -1 } -cast_possible_truncation = "allow" # Overly many instances especially regarding indices. -ignored_unit_patterns = "allow" # Stylistic choice. + +cast_possible_truncation = "allow" # Overly many instances especially regarding indices. +ignored_unit_patterns = "allow" # Stylistic choice. large_types_passed_by_value = "allow" # Triggered by BlockHeader being Copy + 334 bytes. -missing_errors_doc = "allow" # TODO: fixup and enable this. -missing_panics_doc = "allow" # TODO: fixup and enable this. -module_name_repetitions = "allow" # Many triggers, and is a stylistic choice. -must_use_candidate = "allow" # This marks many fn's which isn't helpful. +missing_errors_doc = "allow" # TODO: fixup and enable this. +missing_panics_doc = "allow" # TODO: fixup and enable this. +module_name_repetitions = "allow" # Many triggers, and is a stylistic choice. +must_use_candidate = "allow" # This marks many fn's which isn't helpful. should_panic_without_expect = "allow" # We don't care about the specific panic message. # End of pedantic lints. diff --git a/Makefile b/Makefile index b8f6bc3c6..920390135 100644 --- a/Makefile +++ b/Makefile @@ -31,8 +31,18 @@ format-check: ## Runs Format using nightly toolchain but only in check mode cargo +nightly fmt --all --check +.PHONY: toml +toml: ## Runs Format for all TOML files + taplo fmt + + +.PHONY: toml-check +toml-check: ## Runs Format for all TOML files but only in check mode + taplo fmt --check --verbose + + .PHONY: lint -lint: format fix clippy ## Runs all linting tasks at once (Clippy, fixing, formatting) +lint: format fix clippy toml ## Runs all linting tasks at once (Clippy, fixing, formatting) # --- docs ---------------------------------------------------------------------------------------- diff --git a/bin/faucet/Cargo.toml b/bin/faucet/Cargo.toml index cce8844fd..ce942f121 100644 --- a/bin/faucet/Cargo.toml +++ b/bin/faucet/Cargo.toml @@ -1,44 +1,44 @@ [package] -name = "miden-faucet" -version.workspace = true -description = "Miden node token faucet" -readme = "README.md" -keywords = ["miden", "node", "faucet"] -edition.workspace = true +authors.workspace = true +description = "Miden node token faucet" +edition.workspace = true +homepage.workspace = true +keywords = ["faucet", "miden", "node"] +license.workspace = true +name = "miden-faucet" +readme = "README.md" +repository.workspace = true rust-version.workspace = true -license.workspace = true -authors.workspace = true -homepage.workspace = true -repository.workspace = true +version.workspace = true [lints] workspace = true [dependencies] -anyhow = "1.0" -axum = { version = "0.7", features = ["tokio"] } -clap = { version = "4.5", features = ["derive", "string"] } -http = "1.1" -http-body-util = "0.1" -miden-lib = { workspace = true } +anyhow = "1.0" +axum = { version = "0.7", features = ["tokio"] } +clap = { version = "4.5", features = ["derive", "string"] } +http = "1.1" +http-body-util = "0.1" +miden-lib = { workspace = true } miden-node-proto = { workspace = true } miden-node-utils = { workspace = true } -miden-objects = { workspace = true } -miden-tx = { workspace = true, features = ["concurrent"] } -mime = "0.3" -rand = { workspace = true } -rand_chacha = "0.3" -serde = { version = "1.0", features = ["derive"] } -static-files = "0.2" -thiserror = { workspace = true } -tokio = { workspace = true, features = ["fs"] } -toml = { version = "0.8" } -tonic = { workspace = true } -tower = "0.5" -tower-http = { version = "0.6", features = ["cors", "set-header", "trace"] } -tracing = { workspace = true } +miden-objects = { workspace = true } +miden-tx = { workspace = true, features = ["concurrent"] } +mime = "0.3" +rand = { workspace = true } +rand_chacha = "0.3" +serde = { version = "1.0", features = ["derive"] } +static-files = "0.2" +thiserror = { workspace = true } +tokio = { workspace = true, features = ["fs"] } +toml = { version = "0.8" } +tonic = { workspace = true } +tower = "0.5" +tower-http = { version = "0.6", features = ["cors", "set-header", "trace"] } +tracing = { workspace = true } [build-dependencies] # Required to inject build metadata. miden-node-utils = { workspace = true, features = ["vergen"] } -static-files = "0.2" +static-files = "0.2" diff --git a/bin/node/Cargo.toml b/bin/node/Cargo.toml index 37eda9a12..0f1e505fc 100644 --- a/bin/node/Cargo.toml +++ b/bin/node/Cargo.toml @@ -1,15 +1,15 @@ [package] -name = "miden-node" -version.workspace = true -description = "Miden node binary" -readme.workspace = true -keywords = ["miden", "node"] -edition.workspace = true +authors.workspace = true +description = "Miden node binary" +edition.workspace = true +homepage.workspace = true +keywords = ["miden", "node"] +license.workspace = true +name = "miden-node" +readme.workspace = true +repository.workspace = true rust-version.workspace = true -license.workspace = true -authors.workspace = true -homepage.workspace = true -repository.workspace = true +version.workspace = true [lints] workspace = true @@ -18,23 +18,23 @@ workspace = true tracing-forest = ["miden-node-block-producer/tracing-forest"] [dependencies] -anyhow = { version = "1.0" } -clap = { version = "4.5", features = ["derive", "string"] } -miden-lib = { workspace = true } +anyhow = { version = "1.0" } +clap = { version = "4.5", features = ["derive", "string"] } +miden-lib = { workspace = true } miden-node-block-producer = { workspace = true } -miden-node-rpc = { workspace = true } -miden-node-store = { workspace = true } -miden-node-utils = { workspace = true } -miden-objects = { workspace = true } -rand = { workspace = true } -rand_chacha = "0.3" -serde = { version = "1.0", features = ["derive"] } -tokio = { workspace = true, features = ["rt-multi-thread", "net", "macros"] } -toml = { version = "0.8" } -tracing = { workspace = true } +miden-node-rpc = { workspace = true } +miden-node-store = { workspace = true } +miden-node-utils = { workspace = true } +miden-objects = { workspace = true } +rand = { workspace = true } +rand_chacha = "0.3" +serde = { version = "1.0", features = ["derive"] } +tokio = { workspace = true, features = ["macros", "net", "rt-multi-thread"] } +toml = { version = "0.8" } +tracing = { workspace = true } [dev-dependencies] -figment = { version = "0.10", features = ["toml", "env", "test"] } +figment = { version = "0.10", features = ["env", "test", "toml"] } miden-node-utils = { workspace = true, features = ["tracing-forest"] } [build-dependencies] diff --git a/config/genesis.toml b/config/genesis.toml index 8c74a4dd3..52d98d142 100644 --- a/config/genesis.toml +++ b/config/genesis.toml @@ -1,11 +1,11 @@ # This is an example genesis input file for the Miden node. -version = 1 timestamp = 1672531200 +version = 1 [[accounts]] -type = "BasicFungibleFaucet" +auth_scheme = "RpoFalcon512" +decimals = 12 +max_supply = 1000000 storage_mode = "public" -auth_scheme = "RpoFalcon512" token_symbol = "POL" -decimals = 12 -max_supply = 1000000 +type = "BasicFungibleFaucet" diff --git a/config/miden-faucet.toml b/config/miden-faucet.toml index 8124e33db..6005ef6a5 100644 --- a/config/miden-faucet.toml +++ b/config/miden-faucet.toml @@ -1,5 +1,5 @@ -endpoint = { host = "localhost", port = 8080 } -node_url = "http://localhost:57291" -timeout_ms = 10000 -asset_amount_options = [100, 500, 1000] -faucet_account_path = "accounts/faucet.mac" +asset_amount_options = [100, 1000, 500] +endpoint = { host = "localhost", port = 8080 } +faucet_account_path = "accounts/faucet.mac" +node_url = "http://localhost:57291" +timeout_ms = 10000 diff --git a/config/miden-node.toml b/config/miden-node.toml index b4b133c19..d260f189e 100644 --- a/config/miden-node.toml +++ b/config/miden-node.toml @@ -13,7 +13,7 @@ endpoint = { host = "0.0.0.0", port = 57291 } [store] # port defined as: sum(ord(c)**p for (p, c) in enumerate('miden-store', 1)) % 2**16 -endpoint = { host = "localhost", port = 28943 } +blockstore_dir = "/opt/miden/blocks" database_filepath = "/opt/miden/miden-store.sqlite3" -genesis_filepath = "/opt/miden/genesis.dat" -blockstore_dir = "/opt/miden/blocks" +endpoint = { host = "localhost", port = 28943 } +genesis_filepath = "/opt/miden/genesis.dat" diff --git a/crates/block-producer/Cargo.toml b/crates/block-producer/Cargo.toml index 27d52be8a..e48af5ee8 100644 --- a/crates/block-producer/Cargo.toml +++ b/crates/block-producer/Cargo.toml @@ -1,15 +1,15 @@ [package] -name = "miden-node-block-producer" -version.workspace = true -description = "Miden node's block producer component" -readme = "README.md" -keywords = ["miden", "node", "block-producer"] -edition.workspace = true +authors.workspace = true +description = "Miden node's block producer component" +edition.workspace = true +homepage.workspace = true +keywords = ["block-producer", "miden", "node"] +license.workspace = true +name = "miden-node-block-producer" +readme = "README.md" +repository.workspace = true rust-version.workspace = true -license.workspace = true -authors.workspace = true -homepage.workspace = true -repository.workspace = true +version.workspace = true [lints] workspace = true @@ -18,31 +18,31 @@ workspace = true tracing-forest = ["miden-node-utils/tracing-forest"] [dependencies] -async-trait = { version = "0.1" } -itertools = { version = "0.13" } -miden-lib = { workspace = true } +async-trait = { version = "0.1" } +itertools = { version = "0.13" } +miden-lib = { workspace = true } miden-node-proto = { workspace = true } miden-node-utils = { workspace = true } -miden-objects = { workspace = true } -miden-processor = { workspace = true } -miden-stdlib = { workspace = true } -miden-tx = { workspace = true } -rand = { version = "0.8" } -serde = { version = "1.0", features = ["derive"] } -thiserror = { workspace = true } -tokio = { workspace = true, features = ["rt-multi-thread", "net", "macros", "sync", "time"] } -tokio-stream = { workspace = true, features = ["net"] } -tonic = { workspace = true } -tracing = { workspace = true } +miden-objects = { workspace = true } +miden-processor = { workspace = true } +miden-stdlib = { workspace = true } +miden-tx = { workspace = true } +rand = { version = "0.8" } +serde = { version = "1.0", features = ["derive"] } +thiserror = { workspace = true } +tokio = { workspace = true, features = ["macros", "net", "rt-multi-thread", "sync", "time"] } +tokio-stream = { workspace = true, features = ["net"] } +tonic = { workspace = true } +tracing = { workspace = true } [dev-dependencies] -assert_matches = { workspace = true} -miden-air = { workspace = true } -miden-lib = { workspace = true, features = ["testing"] } +assert_matches = { workspace = true } +miden-air = { workspace = true } +miden-lib = { workspace = true, features = ["testing"] } miden-node-test-macro = { path = "../test-macro" } -miden-objects = { workspace = true, features = ["testing"] } -miden-tx = { workspace = true, features = ["testing"] } -pretty_assertions = "1.4" -rand_chacha = { version = "0.3", default-features = false } -tokio = { workspace = true, features = ["test-util"] } -winterfell = { version = "0.11" } +miden-objects = { workspace = true, features = ["testing"] } +miden-tx = { workspace = true, features = ["testing"] } +pretty_assertions = "1.4" +rand_chacha = { version = "0.3", default-features = false } +tokio = { workspace = true, features = ["test-util"] } +winterfell = { version = "0.11" } diff --git a/crates/proto/Cargo.toml b/crates/proto/Cargo.toml index 445a93a78..84cf5ae3e 100644 --- a/crates/proto/Cargo.toml +++ b/crates/proto/Cargo.toml @@ -1,33 +1,33 @@ [package] -name = "miden-node-proto" -version.workspace = true -description = "Miden node message definitions (Store, Block Producer and RPC)" -readme = "README.md" -keywords = ["miden", "node", "protobuf", "rpc"] -edition.workspace = true +authors.workspace = true +description = "Miden node message definitions (Store, Block Producer and RPC)" +edition.workspace = true +homepage.workspace = true +keywords = ["miden", "node", "protobuf", "rpc"] +license.workspace = true +name = "miden-node-proto" +readme = "README.md" +repository.workspace = true rust-version.workspace = true -license.workspace = true -authors.workspace = true -homepage.workspace = true -repository.workspace = true +version.workspace = true [lints] workspace = true [dependencies] -hex = { version = "0.4" } +hex = { version = "0.4" } miden-node-utils = { workspace = true } -miden-objects = { workspace = true } -prost = { workspace = true } -thiserror = { workspace = true } -tonic = { workspace = true } +miden-objects = { workspace = true } +prost = { workspace = true } +thiserror = { workspace = true } +tonic = { workspace = true } [dev-dependencies] proptest = { version = "1.5" } [build-dependencies] -anyhow = { version = "1.0" } -prost = { workspace = true } +anyhow = { version = "1.0" } +prost = { workspace = true } prost-build = { version = "0.13" } -protox = { version = "0.7" } +protox = { version = "0.7" } tonic-build = { version = "0.12" } diff --git a/crates/rpc-proto/Cargo.toml b/crates/rpc-proto/Cargo.toml index a9015ed67..d4ee41367 100644 --- a/crates/rpc-proto/Cargo.toml +++ b/crates/rpc-proto/Cargo.toml @@ -1,20 +1,20 @@ [package] -name = "miden-rpc-proto" -version.workspace = true -description = "Miden node RPC message definitions" -readme = "README.md" -keywords = ["miden", "node", "protobuf", "rpc"] -categories = ["no-std::no-alloc"] -edition.workspace = true +authors.workspace = true +categories = ["no-std::no-alloc"] +description = "Miden node RPC message definitions" +edition.workspace = true +homepage.workspace = true +keywords = ["miden", "node", "protobuf", "rpc"] +license.workspace = true +name = "miden-rpc-proto" +readme = "README.md" +repository.workspace = true rust-version.workspace = true -license.workspace = true -authors.workspace = true -homepage.workspace = true -repository.workspace = true +version.workspace = true [lints] workspace = true [features] default = ["std"] -std = [] +std = [] diff --git a/crates/rpc/Cargo.toml b/crates/rpc/Cargo.toml index 3dfba5246..593915098 100644 --- a/crates/rpc/Cargo.toml +++ b/crates/rpc/Cargo.toml @@ -1,15 +1,15 @@ [package] -name = "miden-node-rpc" -version.workspace = true -description = "Miden node's front-end RPC server" -readme = "README.md" -keywords = ["miden", "node", "rpc"] -edition.workspace = true +authors.workspace = true +description = "Miden node's front-end RPC server" +edition.workspace = true +homepage.workspace = true +keywords = ["miden", "node", "rpc"] +license.workspace = true +name = "miden-node-rpc" +readme = "README.md" +repository.workspace = true rust-version.workspace = true -license.workspace = true -authors.workspace = true -homepage.workspace = true -repository.workspace = true +version.workspace = true [lints] workspace = true @@ -17,14 +17,14 @@ workspace = true [dependencies] miden-node-proto = { workspace = true } miden-node-utils = { workspace = true } -miden-objects = { workspace = true } -miden-tx = { workspace = true } -serde = { version = "1.0", features = ["derive"] } -tokio = { workspace = true, features = ["rt-multi-thread", "net", "macros"] } -tokio-stream = { workspace = true, features = ["net"] } -tonic = { workspace = true } -tonic-web = { version = "0.12" } -tracing = { workspace = true } +miden-objects = { workspace = true } +miden-tx = { workspace = true } +serde = { version = "1.0", features = ["derive"] } +tokio = { workspace = true, features = ["macros", "net", "rt-multi-thread"] } +tokio-stream = { workspace = true, features = ["net"] } +tonic = { workspace = true } +tonic-web = { version = "0.12" } +tracing = { workspace = true } [dev-dependencies] miden-node-utils = { workspace = true, features = ["tracing-forest"] } diff --git a/crates/store/Cargo.toml b/crates/store/Cargo.toml index 8eec38f27..f987304b7 100644 --- a/crates/store/Cargo.toml +++ b/crates/store/Cargo.toml @@ -1,36 +1,36 @@ [package] -name = "miden-node-store" -version.workspace = true -description = "Miden node's state store component" -readme = "README.md" -keywords = ["miden", "node", "store"] -edition.workspace = true +authors.workspace = true +description = "Miden node's state store component" +edition.workspace = true +homepage.workspace = true +keywords = ["miden", "node", "store"] +license.workspace = true +name = "miden-node-store" +readme = "README.md" +repository.workspace = true rust-version.workspace = true -license.workspace = true -authors.workspace = true -homepage.workspace = true -repository.workspace = true +version.workspace = true [lints] workspace = true [dependencies] -deadpool-sqlite = { version = "0.9.0", features = ["rt_tokio_1"] } -hex = { version = "0.4" } -miden-lib = { workspace = true } -miden-node-proto = { workspace = true } -miden-node-utils = { workspace = true } -miden-objects = { workspace = true } -rusqlite = { version = "0.32.1", features = ["array", "buildtime_bindgen", "bundled"] } +deadpool-sqlite = { version = "0.9.0", features = ["rt_tokio_1"] } +hex = { version = "0.4" } +miden-lib = { workspace = true } +miden-node-proto = { workspace = true } +miden-node-utils = { workspace = true } +miden-objects = { workspace = true } +rusqlite = { version = "0.32.1", features = ["array", "buildtime_bindgen", "bundled"] } rusqlite_migration = { version = "1.3" } -serde = { version = "1.0", features = ["derive"] } -thiserror = { workspace = true } -tokio = { workspace = true, features = ["fs", "net", "macros", "rt-multi-thread"] } -tokio-stream = { workspace = true, features = ["net"] } -tonic = { workspace = true } -tracing = { workspace = true } +serde = { version = "1.0", features = ["derive"] } +thiserror = { workspace = true } +tokio = { workspace = true, features = ["fs", "macros", "net", "rt-multi-thread"] } +tokio-stream = { workspace = true, features = ["net"] } +tonic = { workspace = true } +tracing = { workspace = true } [dev-dependencies] -assert_matches = { workspace = true} +assert_matches = { workspace = true } miden-node-utils = { workspace = true, features = ["tracing-forest"] } -miden-objects = { workspace = true, features = ["testing"] } +miden-objects = { workspace = true, features = ["testing"] } diff --git a/crates/test-macro/Cargo.toml b/crates/test-macro/Cargo.toml index 4f1b3b15b..b07307cfc 100644 --- a/crates/test-macro/Cargo.toml +++ b/crates/test-macro/Cargo.toml @@ -1,22 +1,22 @@ [package] -name = "miden-node-test-macro" -version = "0.1.0" -description = "Miden node's test macro" -readme = "README.md" -keywords = ["miden", "node", "utils", "macro"] -edition.workspace = true +authors.workspace = true +description = "Miden node's test macro" +edition.workspace = true +homepage.workspace = true +keywords = ["macro", "miden", "node", "utils"] +license.workspace = true +name = "miden-node-test-macro" +readme = "README.md" +repository.workspace = true rust-version.workspace = true -license.workspace = true -authors.workspace = true -homepage.workspace = true -repository.workspace = true +version = "0.1.0" [lints] workspace = true [dependencies] quote = { version = "1.0" } -syn = { version = "2.0" , features = ["full", "extra-traits"]} +syn = { version = "2.0", features = ["extra-traits", "full"] } [lib] proc-macro = true diff --git a/crates/utils/Cargo.toml b/crates/utils/Cargo.toml index 001f0d463..13474ae79 100644 --- a/crates/utils/Cargo.toml +++ b/crates/utils/Cargo.toml @@ -1,36 +1,36 @@ [package] -name = "miden-node-utils" -version.workspace = true -description = "Miden node's shared utilities" -readme = "README.md" -keywords = ["miden", "node", "utils"] -edition.workspace = true +authors.workspace = true +description = "Miden node's shared utilities" +edition.workspace = true +homepage.workspace = true +keywords = ["miden", "node", "utils"] +license.workspace = true +name = "miden-node-utils" +readme = "README.md" +repository.workspace = true rust-version.workspace = true -license.workspace = true -authors.workspace = true -homepage.workspace = true -repository.workspace = true +version.workspace = true [lints] workspace = true [features] # Enables depedencies intended for build script generation of version metadata. -vergen = ["dep:vergen", "dep:vergen-gitcl"] +vergen = ["dep:vergen", "dep:vergen-gitcl"] [dependencies] -anyhow = { version = "1.0" } -figment = { version = "0.10", features = ["toml", "env"] } -itertools = { version = "0.12" } -miden-objects = { workspace = true } -rand = { workspace = true } -serde = { version = "1.0", features = ["derive"] } -thiserror = { workspace = true } -tonic = { workspace = true } -tracing = { workspace = true } -tracing-forest = { version = "0.1", optional = true, features = ["chrono"] } +anyhow = { version = "1.0" } +figment = { version = "0.10", features = ["env", "toml"] } +itertools = { version = "0.12" } +miden-objects = { workspace = true } +rand = { workspace = true } +serde = { version = "1.0", features = ["derive"] } +thiserror = { workspace = true } +tonic = { workspace = true } +tracing = { workspace = true } +tracing-forest = { version = "0.1", optional = true, features = ["chrono"] } tracing-subscriber = { workspace = true } # Optional dependencies enabled by `vergen` feature. # This must match the version expected by `vergen-gitcl`. -vergen = { "version" = "9.0", optional = true } +vergen = { "version" = "9.0", optional = true } vergen-gitcl = { version = "1.0", features = ["cargo", "rustc"], optional = true } diff --git a/rust-toolchain.toml b/rust-toolchain.toml index 217b108a4..31c2f2645 100644 --- a/rust-toolchain.toml +++ b/rust-toolchain.toml @@ -1,4 +1,4 @@ [toolchain] -channel = "1.84" -components = ["rustfmt", "rust-src", "clippy"] -profile = "minimal" +channel = "1.84" +components = ["clippy", "rust-src", "rustfmt"] +profile = "minimal" diff --git a/rustfmt.toml b/rustfmt.toml index 59ee9ac84..20bb95358 100644 --- a/rustfmt.toml +++ b/rustfmt.toml @@ -1,18 +1,18 @@ -edition = "2021" -array_width = 80 -attr_fn_like_width = 80 -chain_width = 80 -comment_width = 100 -condense_wildcard_suffixes = true -fn_call_width = 80 -group_imports = "StdExternalCrate" -imports_granularity = "Crate" -newline_style = "Unix" -match_block_trailing_comma = true -single_line_if_else_max_width = 60 +array_width = 80 +attr_fn_like_width = 80 +chain_width = 80 +comment_width = 100 +condense_wildcard_suffixes = true +edition = "2021" +fn_call_width = 80 +group_imports = "StdExternalCrate" +imports_granularity = "Crate" +match_block_trailing_comma = true +newline_style = "Unix" +single_line_if_else_max_width = 60 single_line_let_else_max_width = 60 -struct_lit_width = 40 -struct_variant_width = 40 -use_field_init_shorthand = true -use_try_shorthand = true -wrap_comments = true +struct_lit_width = 40 +struct_variant_width = 40 +use_field_init_shorthand = true +use_try_shorthand = true +wrap_comments = true From 485082828ffa72c3cf72f4cae28c435673ce691c Mon Sep 17 00:00:00 2001 From: Serge Radinovich <47865535+sergerad@users.noreply.github.com> Date: Tue, 28 Jan 2025 20:00:49 +1300 Subject: [PATCH 03/27] chore: update itertools and axum (#646) --- Cargo.lock | 73 +++++++++++++++++++++++++++++--- Cargo.toml | 1 + bin/faucet/Cargo.toml | 2 +- crates/block-producer/Cargo.toml | 2 +- crates/utils/Cargo.toml | 2 +- 5 files changed, 71 insertions(+), 9 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 9009ea63b..c76165203 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -197,16 +197,43 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "edca88bc138befd0323b20752846e6587272d3b03b0343c8ea28a6f819e6e71f" dependencies = [ "async-trait", - "axum-core", + "axum-core 0.4.5", "bytes", "futures-util", "http", "http-body", "http-body-util", + "itoa", + "matchit 0.7.3", + "memchr", + "mime", + "percent-encoding", + "pin-project-lite", + "rustversion", + "serde", + "sync_wrapper", + "tower 0.5.2", + "tower-layer", + "tower-service", +] + +[[package]] +name = "axum" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6d6fd624c75e18b3b4c6b9caf42b1afe24437daaee904069137d8bab077be8b8" +dependencies = [ + "axum-core 0.5.0", + "bytes", + "form_urlencoded", + "futures-util", + "http", + "http-body", + "http-body-util", "hyper", "hyper-util", "itoa", - "matchit", + "matchit 0.8.4", "memchr", "mime", "percent-encoding", @@ -242,6 +269,25 @@ dependencies = [ "sync_wrapper", "tower-layer", "tower-service", +] + +[[package]] +name = "axum-core" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "df1362f362fd16024ae199c1970ce98f9661bf5ef94b9808fee734bc3698b733" +dependencies = [ + "bytes", + "futures-util", + "http", + "http-body", + "http-body-util", + "mime", + "pin-project-lite", + "rustversion", + "sync_wrapper", + "tower-layer", + "tower-service", "tracing", ] @@ -1203,6 +1249,15 @@ dependencies = [ "either", ] +[[package]] +name = "itertools" +version = "0.14.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2b192c782037fadd9cfa75548310488aabdbf3d2da73885b31bd0abd03351285" +dependencies = [ + "either", +] + [[package]] name = "itoa" version = "1.0.14" @@ -1403,6 +1458,12 @@ version = "0.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0e7465ac9959cc2b1404e8e2367b43684a6d13790fe23056cc8c6c5a6b7bcb94" +[[package]] +name = "matchit" +version = "0.8.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "47e1ffaa40ddd1f3ed91f717a33c8c0ee23fff369e3aa8772b9605cc1d22f4c3" + [[package]] name = "memchr" version = "2.7.4" @@ -1484,7 +1545,7 @@ name = "miden-faucet" version = "0.8.0" dependencies = [ "anyhow", - "axum", + "axum 0.8.1", "clap", "http", "http-body-util", @@ -1599,7 +1660,7 @@ version = "0.8.0" dependencies = [ "assert_matches", "async-trait", - "itertools 0.13.0", + "itertools 0.14.0", "miden-air", "miden-lib", "miden-node-proto", @@ -1689,7 +1750,7 @@ version = "0.8.0" dependencies = [ "anyhow", "figment", - "itertools 0.12.1", + "itertools 0.14.0", "miden-objects", "rand", "serde", @@ -3063,7 +3124,7 @@ checksum = "877c5b330756d856ffcc4553ab34a5684481ade925ecc54bcd1bf02b1d0d4d52" dependencies = [ "async-stream", "async-trait", - "axum", + "axum 0.7.9", "base64", "bytes", "h2", diff --git a/Cargo.toml b/Cargo.toml index ccf2199df..33c7a64b3 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -26,6 +26,7 @@ version = "0.8.0" [workspace.dependencies] assert_matches = { version = "1.5" } +itertools = { version = "0.14" } miden-air = { version = "0.12" } miden-lib = { version = "0.7" } miden-node-block-producer = { path = "crates/block-producer", version = "0.8" } diff --git a/bin/faucet/Cargo.toml b/bin/faucet/Cargo.toml index ce942f121..16cf9b188 100644 --- a/bin/faucet/Cargo.toml +++ b/bin/faucet/Cargo.toml @@ -16,7 +16,7 @@ workspace = true [dependencies] anyhow = "1.0" -axum = { version = "0.7", features = ["tokio"] } +axum = { version = "0.8", features = ["tokio"] } clap = { version = "4.5", features = ["derive", "string"] } http = "1.1" http-body-util = "0.1" diff --git a/crates/block-producer/Cargo.toml b/crates/block-producer/Cargo.toml index e48af5ee8..d9b15f3cf 100644 --- a/crates/block-producer/Cargo.toml +++ b/crates/block-producer/Cargo.toml @@ -19,7 +19,7 @@ tracing-forest = ["miden-node-utils/tracing-forest"] [dependencies] async-trait = { version = "0.1" } -itertools = { version = "0.13" } +itertools = { workspace = true } miden-lib = { workspace = true } miden-node-proto = { workspace = true } miden-node-utils = { workspace = true } diff --git a/crates/utils/Cargo.toml b/crates/utils/Cargo.toml index 13474ae79..78c921c51 100644 --- a/crates/utils/Cargo.toml +++ b/crates/utils/Cargo.toml @@ -21,7 +21,7 @@ vergen = ["dep:vergen", "dep:vergen-gitcl"] [dependencies] anyhow = { version = "1.0" } figment = { version = "0.10", features = ["env", "toml"] } -itertools = { version = "0.12" } +itertools = { workspace = true } miden-objects = { workspace = true } rand = { workspace = true } serde = { version = "1.0", features = ["derive"] } From 8e361c38b4799d06042f57170fc95ee282a19a0c Mon Sep 17 00:00:00 2001 From: Serge Radinovich <47865535+sergerad@users.noreply.github.com> Date: Tue, 28 Jan 2025 21:25:31 +1300 Subject: [PATCH 04/27] ci: add workspace-lints to Makefile and lint.yml (#648) --- .github/workflows/lint.yml | 11 +++++++++++ Makefile | 7 ++++++- 2 files changed, 17 insertions(+), 1 deletion(-) diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index 756c80f08..e4ac4540a 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -54,6 +54,17 @@ jobs: tool: taplo-cli - run: make toml-check + workspace-lints: + runs-on: ubuntu-latest + timeout-minutes: 5 + steps: + - uses: actions/checkout@v4 + - uses: taiki-e/install-action@v2 + with: + tool: cargo-workspace-lints + - run: | + make workspace-check + doc: name: doc runs-on: ubuntu-latest diff --git a/Makefile b/Makefile index 920390135..5a6d691b0 100644 --- a/Makefile +++ b/Makefile @@ -41,8 +41,13 @@ toml-check: ## Runs Format for all TOML files but only in check mode taplo fmt --check --verbose +.PHONY: workspace-check +workspace-check: ## Runs a check that all packages have `lints.workspace = true` + cargo workspace-lints + + .PHONY: lint -lint: format fix clippy toml ## Runs all linting tasks at once (Clippy, fixing, formatting) +lint: format fix clippy toml workspace-check ## Runs all linting tasks at once (Clippy, fixing, formatting, workspace) # --- docs ---------------------------------------------------------------------------------------- From 5b55f309a1975d24b990ac3dc599cdbb8ddd05d6 Mon Sep 17 00:00:00 2001 From: Serge Radinovich <47865535+sergerad@users.noreply.github.com> Date: Thu, 30 Jan 2025 22:33:28 +1300 Subject: [PATCH 05/27] chore: remove Endpoint and Protocol types (#654) --- CHANGELOG.md | 1 + Cargo.lock | 291 ++++++++++++++++++++++++++++ Cargo.toml | 1 + bin/faucet/Cargo.toml | 1 + bin/faucet/src/client.rs | 2 +- bin/faucet/src/config.rs | 38 ++-- bin/faucet/src/main.rs | 8 +- bin/node/Cargo.toml | 1 + bin/node/src/config.rs | 39 ++-- config/miden-node.toml | 6 +- crates/block-producer/Cargo.toml | 1 + crates/block-producer/src/config.rs | 38 ++-- crates/block-producer/src/server.rs | 5 +- crates/rpc/Cargo.toml | 1 + crates/rpc/src/config.rs | 41 ++-- crates/rpc/src/server/api.rs | 9 +- crates/rpc/src/server/mod.rs | 5 +- crates/store/Cargo.toml | 1 + crates/store/src/config.rs | 31 ++- crates/store/src/server/mod.rs | 5 +- crates/utils/src/config.rs | 61 +----- 21 files changed, 441 insertions(+), 145 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index d89a0cd53..ac87a64ad 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -5,6 +5,7 @@ ### Changes - [BREAKING] Updated minimum Rust version to 1.84. +- [BREAKING] `Endpoint` configuration simplified to a single string (#654). ## v0.7.2 (2025-01-29) diff --git a/Cargo.lock b/Cargo.lock index 264d733be..f08cd3eff 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -782,6 +782,17 @@ dependencies = [ "winapi", ] +[[package]] +name = "displaydoc" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + [[package]] name = "dissimilar" version = "1.0.9" @@ -1184,12 +1195,151 @@ dependencies = [ "cc", ] +[[package]] +name = "icu_collections" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "db2fa452206ebee18c4b5c2274dbf1de17008e874b4dc4f0aea9d01ca79e4526" +dependencies = [ + "displaydoc", + "yoke", + "zerofrom", + "zerovec", +] + +[[package]] +name = "icu_locid" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "13acbb8371917fc971be86fc8057c41a64b521c184808a698c02acc242dbf637" +dependencies = [ + "displaydoc", + "litemap", + "tinystr", + "writeable", + "zerovec", +] + +[[package]] +name = "icu_locid_transform" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "01d11ac35de8e40fdeda00d9e1e9d92525f3f9d887cdd7aa81d727596788b54e" +dependencies = [ + "displaydoc", + "icu_locid", + "icu_locid_transform_data", + "icu_provider", + "tinystr", + "zerovec", +] + +[[package]] +name = "icu_locid_transform_data" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fdc8ff3388f852bede6b579ad4e978ab004f139284d7b28715f773507b946f6e" + +[[package]] +name = "icu_normalizer" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "19ce3e0da2ec68599d193c93d088142efd7f9c5d6fc9b803774855747dc6a84f" +dependencies = [ + "displaydoc", + "icu_collections", + "icu_normalizer_data", + "icu_properties", + "icu_provider", + "smallvec", + "utf16_iter", + "utf8_iter", + "write16", + "zerovec", +] + +[[package]] +name = "icu_normalizer_data" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f8cafbf7aa791e9b22bec55a167906f9e1215fd475cd22adfcf660e03e989516" + +[[package]] +name = "icu_properties" +version = "1.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "93d6020766cfc6302c15dbbc9c8778c37e62c14427cb7f6e601d849e092aeef5" +dependencies = [ + "displaydoc", + "icu_collections", + "icu_locid_transform", + "icu_properties_data", + "icu_provider", + "tinystr", + "zerovec", +] + +[[package]] +name = "icu_properties_data" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "67a8effbc3dd3e4ba1afa8ad918d5684b8868b3b26500753effea8d2eed19569" + +[[package]] +name = "icu_provider" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6ed421c8a8ef78d3e2dbc98a973be2f3770cb42b606e3ab18d6237c4dfde68d9" +dependencies = [ + "displaydoc", + "icu_locid", + "icu_provider_macros", + "stable_deref_trait", + "tinystr", + "writeable", + "yoke", + "zerofrom", + "zerovec", +] + +[[package]] +name = "icu_provider_macros" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1ec89e9337638ecdc08744df490b221a7399bf8d164eb52a665454e60e075ad6" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + [[package]] name = "ident_case" version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39" +[[package]] +name = "idna" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "686f825264d630750a544639377bae737628043f20d38bbc029e8f29ea968a7e" +dependencies = [ + "idna_adapter", + "smallvec", + "utf8_iter", +] + +[[package]] +name = "idna_adapter" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "daca1df1c957320b2cf139ac61e7bd64fed304c5040df000a745aa1de3b4ef71" +dependencies = [ + "icu_normalizer", + "icu_properties", +] + [[package]] name = "indenter" version = "0.3.3" @@ -1393,6 +1543,12 @@ version = "0.4.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d26c52dbd32dccf2d10cac7725f8eae5296885fb5703b261f7d0a0739ec807ab" +[[package]] +name = "litemap" +version = "0.7.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4ee93343901ab17bd981295f2cf0026d4ad018c7c31ba84549a4ddbb47a45104" + [[package]] name = "lock_api" version = "0.4.12" @@ -1578,6 +1734,7 @@ dependencies = [ "tower 0.5.2", "tower-http 0.6.2", "tracing", + "url", ] [[package]] @@ -1664,6 +1821,7 @@ dependencies = [ "tokio", "toml", "tracing", + "url", ] [[package]] @@ -1691,6 +1849,7 @@ dependencies = [ "tokio-stream", "tonic", "tracing", + "url", "winterfell", ] @@ -1725,6 +1884,7 @@ dependencies = [ "tonic", "tonic-web", "tracing", + "url", ] [[package]] @@ -1746,6 +1906,7 @@ dependencies = [ "tokio-stream", "tonic", "tracing", + "url", ] [[package]] @@ -2813,6 +2974,12 @@ version = "0.9.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6980e8d7511241f8acf4aebddbb1ff938df5eebe98691418c4468d0b72a96a67" +[[package]] +name = "stable_deref_trait" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3" + [[package]] name = "static-files" version = "0.2.4" @@ -2890,6 +3057,17 @@ version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0bf256ce5efdfa370213c1dabab5935a12e49f2c58d15e9eac2870d3b4f27263" +[[package]] +name = "synstructure" +version = "0.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c8af7666ab7b6390ab78131fb5b0fce11d6b7a6951602017c35fa82800708971" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + [[package]] name = "target-triple" version = "0.1.3" @@ -3043,6 +3221,16 @@ dependencies = [ "crunchy", ] +[[package]] +name = "tinystr" +version = "0.7.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9117f5d4db391c1cf6927e7bea3db74b9a1c1add8f7eda9ffd5364f40f57b82f" +dependencies = [ + "displaydoc", + "zerovec", +] + [[package]] name = "tokio" version = "1.43.0" @@ -3439,6 +3627,30 @@ version = "0.2.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ebc1c04c71510c7f702b52b7c350734c9ff1295c464a03335b00bb84fc54f853" +[[package]] +name = "url" +version = "2.5.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32f8b686cadd1473f4bd0117a5d28d36b1ade384ea9b5069a1c40aefed7fda60" +dependencies = [ + "form_urlencoded", + "idna", + "percent-encoding", + "serde", +] + +[[package]] +name = "utf16_iter" +version = "1.0.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c8232dd3cdaed5356e0f716d285e4b40b932ac434100fe9b7e0e8e935b9e6246" + +[[package]] +name = "utf8_iter" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6c140620e7ffbb22c2dee59cafe6084a59b5ffc27a8859a5f0d494b5d52b6be" + [[package]] name = "utf8parse" version = "0.2.2" @@ -3996,12 +4208,48 @@ dependencies = [ "bitflags", ] +[[package]] +name = "write16" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d1890f4022759daae28ed4fe62859b1236caebfc61ede2f63ed4e695f3f6d936" + +[[package]] +name = "writeable" +version = "0.5.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e9df38ee2d2c3c5948ea468a8406ff0db0b29ae1ffde1bcf20ef305bcc95c51" + [[package]] name = "yansi" version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cfe53a6657fd280eaa890a3bc59152892ffa3e30101319d168b781ed6529b049" +[[package]] +name = "yoke" +version = "0.7.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "120e6aef9aa629e3d4f52dc8cc43a015c7724194c97dfaf45180d2daf2b77f40" +dependencies = [ + "serde", + "stable_deref_trait", + "yoke-derive", + "zerofrom", +] + +[[package]] +name = "yoke-derive" +version = "0.7.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2380878cad4ac9aac1e2435f3eb4020e8374b5f13c296cb75b4620ff8e229154" +dependencies = [ + "proc-macro2", + "quote", + "syn", + "synstructure", +] + [[package]] name = "zerocopy" version = "0.7.35" @@ -4022,3 +4270,46 @@ dependencies = [ "quote", "syn", ] + +[[package]] +name = "zerofrom" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cff3ee08c995dee1859d998dea82f7374f2826091dd9cd47def953cae446cd2e" +dependencies = [ + "zerofrom-derive", +] + +[[package]] +name = "zerofrom-derive" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "595eed982f7d355beb85837f651fa22e90b3c044842dc7f2c2842c086f295808" +dependencies = [ + "proc-macro2", + "quote", + "syn", + "synstructure", +] + +[[package]] +name = "zerovec" +version = "0.10.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "aa2b893d79df23bfb12d5461018d408ea19dfafe76c2c7ef6d4eba614f8ff079" +dependencies = [ + "yoke", + "zerofrom", + "zerovec-derive", +] + +[[package]] +name = "zerovec-derive" +version = "0.10.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6eafa6dfb17584ea3e2bd6e76e0cc15ad7af12b09abdd1ca55961bed9b1063c6" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] diff --git a/Cargo.toml b/Cargo.toml index 33c7a64b3..bdfa58e34 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -47,6 +47,7 @@ tokio-stream = { version = "0.1" } tonic = { version = "0.12" } tracing = { version = "0.1" } tracing-subscriber = { version = "0.3", features = ["env-filter", "fmt", "json"] } +url = { version = "2.5", features = ["serde"] } # Lints are set to warn for development, which are promoted to errors in CI. [workspace.lints.clippy] diff --git a/bin/faucet/Cargo.toml b/bin/faucet/Cargo.toml index 16cf9b188..83194cc0f 100644 --- a/bin/faucet/Cargo.toml +++ b/bin/faucet/Cargo.toml @@ -37,6 +37,7 @@ tonic = { workspace = true } tower = "0.5" tower-http = { version = "0.6", features = ["cors", "set-header", "trace"] } tracing = { workspace = true } +url = { workspace = true } [build-dependencies] # Required to inject build metadata. diff --git a/bin/faucet/src/client.rs b/bin/faucet/src/client.rs index 44a9ee19a..faf9b17b5 100644 --- a/bin/faucet/src/client.rs +++ b/bin/faucet/src/client.rs @@ -200,7 +200,7 @@ impl FaucetClient { pub async fn initialize_faucet_client( config: &FaucetConfig, ) -> Result<(ApiClient, BlockHeader, ChainMmr), ClientError> { - let endpoint = tonic::transport::Endpoint::try_from(config.node_url.clone()) + let endpoint = tonic::transport::Endpoint::try_from(config.node_url.to_string()) .context("Failed to parse node URL from configuration file")? .timeout(Duration::from_millis(config.timeout_ms)); diff --git a/bin/faucet/src/config.rs b/bin/faucet/src/config.rs index 24eb6a96a..20b49b277 100644 --- a/bin/faucet/src/config.rs +++ b/bin/faucet/src/config.rs @@ -3,10 +3,9 @@ use std::{ path::PathBuf, }; -use miden_node_utils::config::{ - Endpoint, Protocol, DEFAULT_FAUCET_SERVER_PORT, DEFAULT_NODE_RPC_PORT, -}; +use miden_node_utils::config::{DEFAULT_FAUCET_SERVER_PORT, DEFAULT_NODE_RPC_PORT}; use serde::{Deserialize, Serialize}; +use url::Url; // Faucet config // ================================================================================================ @@ -20,10 +19,10 @@ pub const DEFAULT_RPC_TIMEOUT_MS: u64 = 10000; #[derive(Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Debug, Serialize, Deserialize)] #[serde(deny_unknown_fields)] pub struct FaucetConfig { - /// Endpoint of the faucet - pub endpoint: Endpoint, + /// Endpoint of the faucet in the format `:` + pub endpoint: Url, /// Node RPC gRPC endpoint in the format `http://[:]` - pub node_url: String, + pub node_url: Url, /// Timeout for RPC requests in milliseconds pub timeout_ms: u64, /// Possible options on the amount of asset that should be dispersed on each faucet request @@ -44,15 +43,30 @@ impl Display for FaucetConfig { impl Default for FaucetConfig { fn default() -> Self { Self { - endpoint: Endpoint { - host: "0.0.0.0".to_string(), - port: DEFAULT_FAUCET_SERVER_PORT, - protocol: Protocol::Http, - }, - node_url: Endpoint::localhost(DEFAULT_NODE_RPC_PORT).to_string(), + endpoint: Url::parse(format!("http://0.0.0.0:{DEFAULT_FAUCET_SERVER_PORT}").as_str()) + .unwrap(), + node_url: Url::parse(format!("http://127.0.0.1:{DEFAULT_NODE_RPC_PORT}").as_str()) + .unwrap(), timeout_ms: DEFAULT_RPC_TIMEOUT_MS, asset_amount_options: vec![100, 500, 1000], faucet_account_path: DEFAULT_FAUCET_ACCOUNT_PATH.into(), } } } + +#[cfg(test)] +mod tests { + use tokio::net::TcpListener; + + use super::FaucetConfig; + + #[tokio::test] + async fn default_faucet_config() { + // Default does not panic + let config = FaucetConfig::default(); + // Default can bind + let socket_addrs = config.endpoint.socket_addrs(|| None).unwrap(); + let socket_addr = socket_addrs.into_iter().next().unwrap(); + let _listener = TcpListener::bind(socket_addr).await.unwrap(); + } +} diff --git a/bin/faucet/src/main.rs b/bin/faucet/src/main.rs index 35dd9be65..f7ea8c91f 100644 --- a/bin/faucet/src/main.rs +++ b/bin/faucet/src/main.rs @@ -123,9 +123,11 @@ async fn main() -> anyhow::Result<()> { ) .with_state(faucet_state); - let listener = TcpListener::bind((config.endpoint.host.as_str(), config.endpoint.port)) - .await - .context("Failed to bind TCP listener")?; + let socket_addr = config.endpoint.socket_addrs(|| None)?.into_iter().next().ok_or( + anyhow::anyhow!("Couldn't get any socket addrs for endpoint: {}", config.endpoint), + )?; + let listener = + TcpListener::bind(socket_addr).await.context("Failed to bind TCP listener")?; info!(target: COMPONENT, endpoint = %config.endpoint, "Server started"); diff --git a/bin/node/Cargo.toml b/bin/node/Cargo.toml index 0f1e505fc..8487908e5 100644 --- a/bin/node/Cargo.toml +++ b/bin/node/Cargo.toml @@ -32,6 +32,7 @@ serde = { version = "1.0", features = ["derive"] } tokio = { workspace = true, features = ["macros", "net", "rt-multi-thread"] } toml = { version = "0.8" } tracing = { workspace = true } +url = { workspace = true } [dev-dependencies] figment = { version = "0.10", features = ["env", "test", "toml"] } diff --git a/bin/node/src/config.rs b/bin/node/src/config.rs index 21a018920..4fef3981f 100644 --- a/bin/node/src/config.rs +++ b/bin/node/src/config.rs @@ -1,8 +1,8 @@ use miden_node_block_producer::config::BlockProducerConfig; use miden_node_rpc::config::RpcConfig; use miden_node_store::config::StoreConfig; -use miden_node_utils::config::Endpoint; use serde::{Deserialize, Serialize}; +use url::Url; /// Node top-level configuration. #[derive(Clone, Default, Eq, PartialEq, Ord, PartialOrd, Hash, Debug, Serialize, Deserialize)] @@ -17,7 +17,7 @@ pub struct NodeConfig { #[derive(Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Debug, Serialize, Deserialize)] #[serde(deny_unknown_fields)] struct NormalizedRpcConfig { - endpoint: Endpoint, + endpoint: Url, } /// A specialized variant of [`BlockProducerConfig`] with redundant fields within [`NodeConfig`] @@ -25,7 +25,7 @@ struct NormalizedRpcConfig { #[derive(Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Debug, Serialize, Deserialize)] #[serde(deny_unknown_fields)] struct NormalizedBlockProducerConfig { - endpoint: Endpoint, + endpoint: Url, verify_tx_proofs: bool, } @@ -56,14 +56,14 @@ impl NodeConfig { let block_producer = BlockProducerConfig { endpoint: block_producer.endpoint, - store_url: store.endpoint_url(), + store_url: store.endpoint.clone(), verify_tx_proofs: block_producer.verify_tx_proofs, }; let rpc = RpcConfig { endpoint: rpc.endpoint, - store_url: store.endpoint_url(), - block_producer_url: block_producer.endpoint_url(), + store_url: store.endpoint.clone(), + block_producer_url: block_producer.endpoint.clone(), }; (block_producer, rpc, store) @@ -74,7 +74,8 @@ impl NodeConfig { mod tests { use figment::Jail; use miden_node_store::config::StoreConfig; - use miden_node_utils::config::{load_config, Endpoint, Protocol}; + use miden_node_utils::config::load_config; + use url::Url; use super::NodeConfig; use crate::{ @@ -89,14 +90,14 @@ mod tests { NODE_CONFIG_FILE_PATH, r#" [block_producer] - endpoint = { host = "127.0.0.1", port = 8080 } + endpoint = "http://127.0.0.1:8080" verify_tx_proofs = true [rpc] - endpoint = { host = "127.0.0.1", port = 8080, protocol = "Http" } + endpoint = "http://127.0.0.1:8080" [store] - endpoint = { host = "127.0.0.1", port = 8080, protocol = "Https" } + endpoint = "https://127.0.0.1:8080" database_filepath = "local.sqlite3" genesis_filepath = "genesis.dat" blockstore_dir = "blocks" @@ -109,26 +110,14 @@ mod tests { config, NodeConfig { block_producer: NormalizedBlockProducerConfig { - endpoint: Endpoint { - host: "127.0.0.1".to_string(), - port: 8080, - protocol: Protocol::default() - }, + endpoint: Url::parse("http://127.0.0.1:8080").unwrap(), verify_tx_proofs: true }, rpc: NormalizedRpcConfig { - endpoint: Endpoint { - host: "127.0.0.1".to_string(), - port: 8080, - protocol: Protocol::Http - }, + endpoint: Url::parse("http://127.0.0.1:8080").unwrap(), }, store: StoreConfig { - endpoint: Endpoint { - host: "127.0.0.1".to_string(), - port: 8080, - protocol: Protocol::Https - }, + endpoint: Url::parse("https://127.0.0.1:8080").unwrap(), database_filepath: "local.sqlite3".into(), genesis_filepath: "genesis.dat".into(), blockstore_dir: "blocks".into() diff --git a/config/miden-node.toml b/config/miden-node.toml index d260f189e..4356ce516 100644 --- a/config/miden-node.toml +++ b/config/miden-node.toml @@ -2,18 +2,18 @@ [block_producer] # port defined as: sum(ord(c)**p for (p, c) in enumerate('miden-block-producer', 1)) % 2**16 -endpoint = { host = "localhost", port = 48046 } +endpoint = "http://127.0.0.1:48046" # enables or disables the verification of transaction proofs before they are accepted into the # transaction queue. verify_tx_proofs = true [rpc] # port defined as: sum(ord(c)**p for (p, c) in enumerate('miden-rpc', 1)) % 2**16 -endpoint = { host = "0.0.0.0", port = 57291 } +endpoint = "http://0.0.0.0:57291" [store] # port defined as: sum(ord(c)**p for (p, c) in enumerate('miden-store', 1)) % 2**16 blockstore_dir = "/opt/miden/blocks" database_filepath = "/opt/miden/miden-store.sqlite3" -endpoint = { host = "localhost", port = 28943 } +endpoint = "http://127.0.0.1:28943" genesis_filepath = "/opt/miden/genesis.dat" diff --git a/crates/block-producer/Cargo.toml b/crates/block-producer/Cargo.toml index d9b15f3cf..d01bfebeb 100644 --- a/crates/block-producer/Cargo.toml +++ b/crates/block-producer/Cargo.toml @@ -34,6 +34,7 @@ tokio = { workspace = true, features = ["macros", "net", "rt-multi-th tokio-stream = { workspace = true, features = ["net"] } tonic = { workspace = true } tracing = { workspace = true } +url = { workspace = true } [dev-dependencies] assert_matches = { workspace = true } diff --git a/crates/block-producer/src/config.rs b/crates/block-producer/src/config.rs index e95b9fe8b..0ad19741b 100644 --- a/crates/block-producer/src/config.rs +++ b/crates/block-producer/src/config.rs @@ -1,7 +1,8 @@ use std::fmt::{Display, Formatter}; -use miden_node_utils::config::{Endpoint, DEFAULT_BLOCK_PRODUCER_PORT, DEFAULT_STORE_PORT}; +use miden_node_utils::config::{DEFAULT_BLOCK_PRODUCER_PORT, DEFAULT_STORE_PORT}; use serde::{Deserialize, Serialize}; +use url::Url; // Main config // ================================================================================================ @@ -10,10 +11,10 @@ use serde::{Deserialize, Serialize}; #[derive(Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Debug, Serialize, Deserialize)] #[serde(deny_unknown_fields)] pub struct BlockProducerConfig { - pub endpoint: Endpoint, + pub endpoint: Url, /// Store gRPC endpoint in the format `http://[:]`. - pub store_url: String, + pub store_url: Url, /// Enable or disable the verification of transaction proofs before they are accepted into the /// transaction queue. @@ -24,12 +25,6 @@ pub struct BlockProducerConfig { pub verify_tx_proofs: bool, } -impl BlockProducerConfig { - pub fn endpoint_url(&self) -> String { - self.endpoint.to_string() - } -} - impl Display for BlockProducerConfig { fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { f.write_fmt(format_args!( @@ -42,9 +37,30 @@ impl Display for BlockProducerConfig { impl Default for BlockProducerConfig { fn default() -> Self { Self { - endpoint: Endpoint::localhost(DEFAULT_BLOCK_PRODUCER_PORT), - store_url: Endpoint::localhost(DEFAULT_STORE_PORT).to_string(), + endpoint: Url::parse( + format!("http://127.0.0.1:{DEFAULT_BLOCK_PRODUCER_PORT}").as_str(), + ) + .unwrap(), + store_url: Url::parse(format!("http://127.0.0.1:{DEFAULT_STORE_PORT}").as_str()) + .unwrap(), verify_tx_proofs: true, } } } + +#[cfg(test)] +mod tests { + use tokio::net::TcpListener; + + use super::BlockProducerConfig; + + #[tokio::test] + async fn default_block_producer_config() { + // Default does not panic + let config = BlockProducerConfig::default(); + // Default can bind + let socket_addrs = config.endpoint.socket_addrs(|| None).unwrap(); + let socket_addr = socket_addrs.into_iter().next().unwrap(); + let _listener = TcpListener::bind(socket_addr).await.unwrap(); + } +} diff --git a/crates/block-producer/src/server.rs b/crates/block-producer/src/server.rs index 5c7a42ec5..2eaf0e506 100644 --- a/crates/block-producer/src/server.rs +++ b/crates/block-producer/src/server.rs @@ -1,4 +1,4 @@ -use std::{collections::HashMap, net::ToSocketAddrs}; +use std::collections::HashMap; use miden_node_proto::generated::{ block_producer::api_server, requests::SubmitProvenTransactionRequest, @@ -66,8 +66,9 @@ impl BlockProducer { let rpc_listener = config .endpoint - .to_socket_addrs() + .socket_addrs(|| None) .map_err(ApiError::EndpointToSocketFailed)? + .into_iter() .next() .ok_or_else(|| ApiError::AddressResolutionFailed(config.endpoint.to_string())) .map(TcpListener::bind)? diff --git a/crates/rpc/Cargo.toml b/crates/rpc/Cargo.toml index 593915098..51670ff91 100644 --- a/crates/rpc/Cargo.toml +++ b/crates/rpc/Cargo.toml @@ -25,6 +25,7 @@ tokio-stream = { workspace = true, features = ["net"] } tonic = { workspace = true } tonic-web = { version = "0.12" } tracing = { workspace = true } +url = { workspace = true } [dev-dependencies] miden-node-utils = { workspace = true, features = ["tracing-forest"] } diff --git a/crates/rpc/src/config.rs b/crates/rpc/src/config.rs index 07dac0778..a3b3bb2a9 100644 --- a/crates/rpc/src/config.rs +++ b/crates/rpc/src/config.rs @@ -1,9 +1,10 @@ use std::fmt::{Display, Formatter}; use miden_node_utils::config::{ - Endpoint, Protocol, DEFAULT_BLOCK_PRODUCER_PORT, DEFAULT_NODE_RPC_PORT, DEFAULT_STORE_PORT, + DEFAULT_BLOCK_PRODUCER_PORT, DEFAULT_NODE_RPC_PORT, DEFAULT_STORE_PORT, }; use serde::{Deserialize, Serialize}; +use url::Url; // Main config // ================================================================================================ @@ -11,11 +12,11 @@ use serde::{Deserialize, Serialize}; #[derive(Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Debug, Serialize, Deserialize)] #[serde(deny_unknown_fields)] pub struct RpcConfig { - pub endpoint: Endpoint, + pub endpoint: Url, /// Store gRPC endpoint in the format `http://[:]`. - pub store_url: String, + pub store_url: Url, /// Block producer gRPC endpoint in the format `http://[:]`. - pub block_producer_url: String, + pub block_producer_url: Url, } impl RpcConfig { @@ -36,13 +37,31 @@ impl Display for RpcConfig { impl Default for RpcConfig { fn default() -> Self { Self { - endpoint: Endpoint { - host: "0.0.0.0".to_string(), - port: DEFAULT_NODE_RPC_PORT, - protocol: Protocol::default(), - }, - store_url: Endpoint::localhost(DEFAULT_STORE_PORT).to_string(), - block_producer_url: Endpoint::localhost(DEFAULT_BLOCK_PRODUCER_PORT).to_string(), + endpoint: Url::parse(format!("http://0.0.0.0:{DEFAULT_NODE_RPC_PORT}").as_str()) + .unwrap(), + store_url: Url::parse(format!("http://127.0.0.1:{DEFAULT_STORE_PORT}").as_str()) + .unwrap(), + block_producer_url: Url::parse( + format!("http://127.0.0.1:{DEFAULT_BLOCK_PRODUCER_PORT}").as_str(), + ) + .unwrap(), } } } + +#[cfg(test)] +mod tests { + use tokio::net::TcpListener; + + use super::RpcConfig; + + #[tokio::test] + async fn default_rpc_config() { + // Default does not panic + let config = RpcConfig::default(); + // Default can bind + let socket_addrs = config.endpoint.socket_addrs(|| None).unwrap(); + let socket_addr = socket_addrs.into_iter().next().unwrap(); + let _listener = TcpListener::bind(socket_addr).await.unwrap(); + } +} diff --git a/crates/rpc/src/server/api.rs b/crates/rpc/src/server/api.rs index 609b87435..976ed4fc2 100644 --- a/crates/rpc/src/server/api.rs +++ b/crates/rpc/src/server/api.rs @@ -41,14 +41,15 @@ pub struct RpcApi { impl RpcApi { pub(super) async fn from_config(config: &RpcConfig) -> Result { - let store = store_client::ApiClient::connect(config.store_url.clone()).await?; - info!(target: COMPONENT, store_endpoint = config.store_url, "Store client initialized"); + let store = store_client::ApiClient::connect(config.store_url.to_string()).await?; + info!(target: COMPONENT, store_endpoint = config.store_url.as_str(), "Store client initialized"); let block_producer = - block_producer_client::ApiClient::connect(config.block_producer_url.clone()).await?; + block_producer_client::ApiClient::connect(config.block_producer_url.to_string()) + .await?; info!( target: COMPONENT, - block_producer_endpoint = config.block_producer_url, + block_producer_endpoint = config.block_producer_url.as_str(), "Block producer client initialized", ); diff --git a/crates/rpc/src/server/mod.rs b/crates/rpc/src/server/mod.rs index dcef246b7..d728eab45 100644 --- a/crates/rpc/src/server/mod.rs +++ b/crates/rpc/src/server/mod.rs @@ -1,5 +1,3 @@ -use std::net::ToSocketAddrs; - use api::RpcApi; use miden_node_proto::generated::rpc::api_server; use miden_node_utils::errors::ApiError; @@ -33,8 +31,9 @@ impl Rpc { let addr = config .endpoint - .to_socket_addrs() + .socket_addrs(|| None) .map_err(ApiError::EndpointToSocketFailed)? + .into_iter() .next() .ok_or_else(|| ApiError::AddressResolutionFailed(config.endpoint.to_string()))?; diff --git a/crates/store/Cargo.toml b/crates/store/Cargo.toml index f987304b7..b1fdf2610 100644 --- a/crates/store/Cargo.toml +++ b/crates/store/Cargo.toml @@ -29,6 +29,7 @@ tokio = { workspace = true, features = ["fs", "macros", "net", "rt- tokio-stream = { workspace = true, features = ["net"] } tonic = { workspace = true } tracing = { workspace = true } +url = { workspace = true } [dev-dependencies] assert_matches = { workspace = true } diff --git a/crates/store/src/config.rs b/crates/store/src/config.rs index 3cac20a77..3a065bcf3 100644 --- a/crates/store/src/config.rs +++ b/crates/store/src/config.rs @@ -3,8 +3,9 @@ use std::{ path::PathBuf, }; -use miden_node_utils::config::{Endpoint, DEFAULT_STORE_PORT}; +use miden_node_utils::config::DEFAULT_STORE_PORT; use serde::{Deserialize, Serialize}; +use url::Url; // Main config // ================================================================================================ @@ -13,7 +14,7 @@ use serde::{Deserialize, Serialize}; #[serde(deny_unknown_fields)] pub struct StoreConfig { /// Defines the listening socket. - pub endpoint: Endpoint, + pub endpoint: Url, /// `SQLite` database file pub database_filepath: PathBuf, /// Genesis file @@ -22,12 +23,6 @@ pub struct StoreConfig { pub blockstore_dir: PathBuf, } -impl StoreConfig { - pub fn endpoint_url(&self) -> String { - self.endpoint.to_string() - } -} - impl Display for StoreConfig { fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { f.write_fmt(format_args!( @@ -41,10 +36,28 @@ impl Default for StoreConfig { fn default() -> Self { const NODE_STORE_DIR: &str = "./"; Self { - endpoint: Endpoint::localhost(DEFAULT_STORE_PORT), + endpoint: Url::parse(format!("http://127.0.0.1:{DEFAULT_STORE_PORT}").as_str()) + .unwrap(), database_filepath: PathBuf::from(NODE_STORE_DIR.to_string() + "miden-store.sqlite3"), genesis_filepath: PathBuf::from(NODE_STORE_DIR.to_string() + "genesis.dat"), blockstore_dir: PathBuf::from(NODE_STORE_DIR.to_string() + "blocks"), } } } + +#[cfg(test)] +mod tests { + use tokio::net::TcpListener; + + use super::StoreConfig; + + #[tokio::test] + async fn default_store_config() { + // Default does not panic + let config = StoreConfig::default(); + // Default can bind + let socket_addrs = config.endpoint.socket_addrs(|| None).unwrap(); + let socket_addr = socket_addrs.into_iter().next().unwrap(); + let _listener = TcpListener::bind(socket_addr).await.unwrap(); + } +} diff --git a/crates/store/src/server/mod.rs b/crates/store/src/server/mod.rs index f73c9e9fe..6ef429d08 100644 --- a/crates/store/src/server/mod.rs +++ b/crates/store/src/server/mod.rs @@ -1,4 +1,4 @@ -use std::{net::ToSocketAddrs, sync::Arc}; +use std::sync::Arc; use miden_node_proto::generated::store::api_server; use miden_node_utils::errors::ApiError; @@ -44,8 +44,9 @@ impl Store { let addr = config .endpoint - .to_socket_addrs() + .socket_addrs(|| None) .map_err(ApiError::EndpointToSocketFailed)? + .into_iter() .next() .ok_or_else(|| ApiError::AddressResolutionFailed(config.endpoint.to_string()))?; diff --git a/crates/utils/src/config.rs b/crates/utils/src/config.rs index 5c70315df..4f79c6c49 100644 --- a/crates/utils/src/config.rs +++ b/crates/utils/src/config.rs @@ -1,73 +1,16 @@ -use std::{ - fmt::{Display, Formatter}, - io, - net::{SocketAddr, ToSocketAddrs}, - path::Path, - vec, -}; +use std::path::Path; use figment::{ providers::{Format, Toml}, Figment, }; -use serde::{Deserialize, Serialize}; +use serde::Deserialize; pub const DEFAULT_NODE_RPC_PORT: u16 = 57291; pub const DEFAULT_BLOCK_PRODUCER_PORT: u16 = 48046; pub const DEFAULT_STORE_PORT: u16 = 28943; pub const DEFAULT_FAUCET_SERVER_PORT: u16 = 8080; -#[derive(Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Debug, Serialize, Deserialize, Default)] -pub enum Protocol { - #[default] - Http, - Https, -} -/// The `(host, port)` pair for the server's listening socket. -#[derive(Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Debug, Serialize, Deserialize)] -pub struct Endpoint { - /// Host used by the store. - pub host: String, - /// Port number used by the store. - pub port: u16, - /// Protocol type: http or https. - #[serde(default)] - pub protocol: Protocol, -} - -impl Endpoint { - pub fn localhost(port: u16) -> Self { - Endpoint { - host: "localhost".to_string(), - port, - protocol: Protocol::default(), - } - } -} - -impl ToSocketAddrs for Endpoint { - type Iter = vec::IntoIter; - fn to_socket_addrs(&self) -> io::Result { - (self.host.as_ref(), self.port).to_socket_addrs() - } -} - -impl Display for Endpoint { - fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { - let Endpoint { protocol, host, port } = self; - f.write_fmt(format_args!("{protocol}://{host}:{port}")) - } -} - -impl Display for Protocol { - fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { - match self { - Protocol::Http => f.write_str("http"), - Protocol::Https => f.write_str("https"), - } - } -} - /// Loads the user configuration. /// /// This function will look for the configuration file at the provided path. If the path is From ab0701dd0eae8d8dd7dc7499332f2478cd36021a Mon Sep 17 00:00:00 2001 From: Mirko <48352201+Mirko-von-Leipzig@users.noreply.github.com> Date: Mon, 3 Feb 2025 09:03:21 +0200 Subject: [PATCH 06/27] feat: open-telemetry exporter (#660) --- CHANGELOG.md | 4 + Cargo.lock | 269 ++++++++++++++++++++++++++++++++++++ bin/node/src/main.rs | 14 +- crates/utils/Cargo.toml | 26 ++-- crates/utils/src/logging.rs | 73 +++++++++- 5 files changed, 371 insertions(+), 15 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index ac87a64ad..c9f5ba057 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,10 @@ ## Unreleased +### Enhancements + +- Add an optional open-telemetry trace exporter (#659). + ### Changes - [BREAKING] Updated minimum Rust version to 1.84. diff --git a/Cargo.lock b/Cargo.lock index f08cd3eff..2d8e3495c 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -574,6 +574,16 @@ version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7c74b8349d32d297c9134b8c88677813a227df8f779daa29bfc29c183fe3dca6" +[[package]] +name = "core-foundation" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b55271e5c8c478ad3f38ad24ef34923091e0548492a266d19b3c0b4d82574c63" +dependencies = [ + "core-foundation-sys", + "libc", +] + [[package]] name = "core-foundation-sys" version = "0.8.7" @@ -915,12 +925,34 @@ version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "05f29059c0c2090612e8d742178b0580d2dc940c837851ad723096f87af6663e" +[[package]] +name = "futures-executor" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e28d1d997f585e54aebc3f97d39e72338912123a67330d723fdbb564d646c9f" +dependencies = [ + "futures-core", + "futures-task", + "futures-util", +] + [[package]] name = "futures-io" version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9e5c1b78ca4aae1ac06c48a526a655760685149f0d465d21f37abfe57ce075c6" +[[package]] +name = "futures-macro" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "162ee34ebcb7c64a8abebc059ce0fee27c2262618d7b60ed8faf72fef13c3650" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + [[package]] name = "futures-sink" version = "0.3.31" @@ -940,10 +972,12 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9fa08315bb612088cc391249efdc3bc77536f16c91f6cf495e6fbe85b20a4a81" dependencies = [ "futures-core", + "futures-macro", "futures-sink", "futures-task", "pin-project-lite", "pin-utils", + "slab", ] [[package]] @@ -1925,12 +1959,16 @@ dependencies = [ "figment", "itertools 0.14.0", "miden-objects", + "opentelemetry", + "opentelemetry-otlp", + "opentelemetry_sdk", "rand", "serde", "thiserror 2.0.11", "tonic", "tracing", "tracing-forest", + "tracing-opentelemetry", "tracing-subscriber", "vergen", "vergen-gitcl", @@ -2249,6 +2287,78 @@ version = "1.20.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1261fe7e33c73b354eab43b1273a57c8f967d0391e80353e51f764ac02cf6775" +[[package]] +name = "openssl-probe" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d05e27ee213611ffe7d6348b942e8f942b37114c00cc03cec254295a4a17852e" + +[[package]] +name = "opentelemetry" +version = "0.27.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ab70038c28ed37b97d8ed414b6429d343a8bbf44c9f79ec854f3a643029ba6d7" +dependencies = [ + "futures-core", + "futures-sink", + "js-sys", + "pin-project-lite", + "thiserror 1.0.69", + "tracing", +] + +[[package]] +name = "opentelemetry-otlp" +version = "0.27.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "91cf61a1868dacc576bf2b2a1c3e9ab150af7272909e80085c3173384fe11f76" +dependencies = [ + "async-trait", + "futures-core", + "http", + "opentelemetry", + "opentelemetry-proto", + "opentelemetry_sdk", + "prost", + "thiserror 1.0.69", + "tokio", + "tonic", + "tracing", +] + +[[package]] +name = "opentelemetry-proto" +version = "0.27.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a6e05acbfada5ec79023c85368af14abd0b307c015e9064d249b2a950ef459a6" +dependencies = [ + "opentelemetry", + "opentelemetry_sdk", + "prost", + "tonic", +] + +[[package]] +name = "opentelemetry_sdk" +version = "0.27.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "231e9d6ceef9b0b2546ddf52335785ce41252bc7474ee8ba05bfad277be13ab8" +dependencies = [ + "async-trait", + "futures-channel", + "futures-executor", + "futures-util", + "glob", + "opentelemetry", + "percent-encoding", + "rand", + "serde_json", + "thiserror 1.0.69", + "tokio", + "tokio-stream", + "tracing", +] + [[package]] name = "overload" version = "0.1.1" @@ -2707,6 +2817,21 @@ version = "0.8.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2b15c43186be67a4fd63bee50d0303afffcef381492ebe2c5d87f324e1b8815c" +[[package]] +name = "ring" +version = "0.17.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c17fa4cb658e3583423e915b9f3acc01cceaee1860e33d59ebae66adc3a2dc0d" +dependencies = [ + "cc", + "cfg-if", + "getrandom 0.2.15", + "libc", + "spin", + "untrusted", + "windows-sys 0.52.0", +] + [[package]] name = "rusqlite" version = "0.32.1" @@ -2774,6 +2899,59 @@ dependencies = [ "windows-sys 0.59.0", ] +[[package]] +name = "rustls" +version = "0.23.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9fb9263ab4eb695e42321db096e3b8fbd715a59b154d5c88d82db2175b681ba7" +dependencies = [ + "log", + "once_cell", + "ring", + "rustls-pki-types", + "rustls-webpki", + "subtle", + "zeroize", +] + +[[package]] +name = "rustls-native-certs" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7fcff2dd52b58a8d98a70243663a0d234c4e2b79235637849d15913394a247d3" +dependencies = [ + "openssl-probe", + "rustls-pki-types", + "schannel", + "security-framework", +] + +[[package]] +name = "rustls-pemfile" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dce314e5fee3f39953d46bb63bb8a46d40c2f8fb7cc5a3b6cab2bde9721d6e50" +dependencies = [ + "rustls-pki-types", +] + +[[package]] +name = "rustls-pki-types" +version = "1.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "917ce264624a4b4db1c364dcc35bfca9ded014d0a958cd47ad3e960e988ea51c" + +[[package]] +name = "rustls-webpki" +version = "0.102.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "64ca1bc8749bd4cf37b5ce386cc146580777b4e8572c7b97baf22c83f444bee9" +dependencies = [ + "ring", + "rustls-pki-types", + "untrusted", +] + [[package]] name = "rustversion" version = "1.0.19" @@ -2807,6 +2985,15 @@ dependencies = [ "winapi-util", ] +[[package]] +name = "schannel" +version = "0.1.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1f29ebaa345f945cec9fbbc532eb307f0fdad8161f281b6369539c8d84876b3d" +dependencies = [ + "windows-sys 0.59.0", +] + [[package]] name = "scoped-tls" version = "1.0.1" @@ -2819,6 +3006,29 @@ version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" +[[package]] +name = "security-framework" +version = "3.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "271720403f46ca04f7ba6f55d438f8bd878d6b8ca0a1046e8228c4145bcbb316" +dependencies = [ + "bitflags", + "core-foundation", + "core-foundation-sys", + "libc", + "security-framework-sys", +] + +[[package]] +name = "security-framework-sys" +version = "2.14.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "49db231d56a190491cb4aeda9527f1ad45345af50b0851622a7adb8c03b01c32" +dependencies = [ + "core-foundation-sys", + "libc", +] + [[package]] name = "semver" version = "0.9.0" @@ -3019,6 +3229,12 @@ version = "0.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f" +[[package]] +name = "subtle" +version = "2.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292" + [[package]] name = "supports-color" version = "3.0.2" @@ -3258,6 +3474,16 @@ dependencies = [ "syn", ] +[[package]] +name = "tokio-rustls" +version = "0.26.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5f6d0975eaace0cf0fcadee4e4aaa5da15b5c079146f2cffb67c113be122bf37" +dependencies = [ + "rustls", + "tokio", +] + [[package]] name = "tokio-stream" version = "0.1.17" @@ -3337,8 +3563,11 @@ dependencies = [ "percent-encoding", "pin-project", "prost", + "rustls-native-certs", + "rustls-pemfile", "socket2", "tokio", + "tokio-rustls", "tokio-stream", "tower 0.4.13", "tower-layer", @@ -3517,6 +3746,24 @@ dependencies = [ "tracing-core", ] +[[package]] +name = "tracing-opentelemetry" +version = "0.28.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "97a971f6058498b5c0f1affa23e7ea202057a7301dbff68e968b2d578bcbd053" +dependencies = [ + "js-sys", + "once_cell", + "opentelemetry", + "opentelemetry_sdk", + "smallvec", + "tracing", + "tracing-core", + "tracing-log", + "tracing-subscriber", + "web-time", +] + [[package]] name = "tracing-serde" version = "0.2.0" @@ -3627,6 +3874,12 @@ version = "0.2.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ebc1c04c71510c7f702b52b7c350734c9ff1295c464a03335b00bb84fc54f853" +[[package]] +name = "untrusted" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1" + [[package]] name = "url" version = "2.5.4" @@ -3825,6 +4078,16 @@ dependencies = [ "unicode-ident", ] +[[package]] +name = "web-time" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5a6580f308b1fad9207618087a65c04e7a10bc77e02c8e84e9b00dd4b12fa0bb" +dependencies = [ + "js-sys", + "wasm-bindgen", +] + [[package]] name = "winapi" version = "0.3.9" @@ -4292,6 +4555,12 @@ dependencies = [ "synstructure", ] +[[package]] +name = "zeroize" +version = "1.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ced3678a2879b30306d323f4542626697a464a97c0a07c9aebf7ebca65cd4dde" + [[package]] name = "zerovec" version = "0.10.4" diff --git a/bin/node/src/main.rs b/bin/node/src/main.rs index 998b54461..1697f61f7 100644 --- a/bin/node/src/main.rs +++ b/bin/node/src/main.rs @@ -37,6 +37,9 @@ pub enum Command { #[arg(short, long, value_name = "FILE", default_value = NODE_CONFIG_FILE_PATH)] config: PathBuf, + + #[arg(long = "open-telemetry", default_value_t = false)] + open_telemetry: bool, }, /// Generates a genesis file and associated account files based on a specified genesis input @@ -82,12 +85,17 @@ pub enum StartCommand { #[tokio::main] async fn main() -> anyhow::Result<()> { - miden_node_utils::logging::setup_logging()?; - let cli = Cli::parse(); + // Open telemetry exporting is only valid for running the node. + let open_telemetry = match &cli.command { + Command::Start { open_telemetry, .. } => *open_telemetry, + _ => false, + }; + miden_node_utils::logging::setup_tracing(open_telemetry)?; + match &cli.command { - Command::Start { command, config } => match command { + Command::Start { command, config, .. } => match command { StartCommand::Node => { let config = load_config(config).context("Loading configuration file")?; start_node(config).await diff --git a/crates/utils/Cargo.toml b/crates/utils/Cargo.toml index 78c921c51..c37684c8f 100644 --- a/crates/utils/Cargo.toml +++ b/crates/utils/Cargo.toml @@ -19,17 +19,21 @@ workspace = true vergen = ["dep:vergen", "dep:vergen-gitcl"] [dependencies] -anyhow = { version = "1.0" } -figment = { version = "0.10", features = ["env", "toml"] } -itertools = { workspace = true } -miden-objects = { workspace = true } -rand = { workspace = true } -serde = { version = "1.0", features = ["derive"] } -thiserror = { workspace = true } -tonic = { workspace = true } -tracing = { workspace = true } -tracing-forest = { version = "0.1", optional = true, features = ["chrono"] } -tracing-subscriber = { workspace = true } +anyhow = { version = "1.0" } +figment = { version = "0.10", features = ["env", "toml"] } +itertools = { workspace = true } +miden-objects = { workspace = true } +opentelemetry = "0.27" +opentelemetry-otlp = { version = "0.27", features = ["tls-roots"] } +opentelemetry_sdk = { version = "0.27", features = ["rt-tokio"] } +rand = { workspace = true } +serde = { version = "1.0", features = ["derive"] } +thiserror = { workspace = true } +tonic = { workspace = true } +tracing = { workspace = true } +tracing-forest = { version = "0.1", optional = true, features = ["chrono"] } +tracing-opentelemetry = "0.28" +tracing-subscriber = { workspace = true } # Optional dependencies enabled by `vergen` feature. # This must match the version expected by `vergen-gitcl`. vergen = { "version" = "9.0", optional = true } diff --git a/crates/utils/src/logging.rs b/crates/utils/src/logging.rs index dc58d7e88..069685271 100644 --- a/crates/utils/src/logging.rs +++ b/crates/utils/src/logging.rs @@ -1,6 +1,19 @@ use anyhow::Result; +use opentelemetry::trace::TracerProvider as _; +use opentelemetry_otlp::WithTonicConfig; use tracing::subscriber::{self, Subscriber}; -use tracing_subscriber::EnvFilter; +use tracing_opentelemetry::OpenTelemetryLayer; +use tracing_subscriber::{layer::SubscriberExt, EnvFilter, Layer, Registry}; + +/// Configures tracing and optionally enables an open-telemetry OTLP exporter. +/// +/// The open-telemetry configuration is controlled via environment variables as defined in the +/// [specification](https://github.com/open-telemetry/opentelemetry-specification/blob/main/specification/protocol/exporter.md#opentelemetry-protocol-exporter) +pub fn setup_tracing(enable_otel: bool) -> Result<()> { + let otel_layer = enable_otel.then_some(open_telemetry_layer()); + let subscriber = Registry::default().with(stdout_layer()).with(otel_layer); + tracing::subscriber::set_global_default(subscriber).map_err(Into::into) +} pub fn setup_logging() -> Result<()> { subscriber::set_global_default(subscriber())?; @@ -8,6 +21,64 @@ pub fn setup_logging() -> Result<()> { Ok(()) } +fn open_telemetry_layer() -> Box + Send + Sync + 'static> +where + S: Subscriber + Sync + Send, + for<'a> S: tracing_subscriber::registry::LookupSpan<'a>, +{ + let exporter = opentelemetry_otlp::SpanExporter::builder() + .with_tonic() + .with_tls_config(tonic::transport::ClientTlsConfig::new().with_native_roots()) + .build() + .unwrap(); + + let tracer = opentelemetry_sdk::trace::TracerProvider::builder() + .with_batch_exporter(exporter, opentelemetry_sdk::runtime::Tokio) + .build(); + + let tracer = tracer.tracer("tracing-otel-subscriber"); + OpenTelemetryLayer::new(tracer).boxed() +} + +#[cfg(not(feature = "tracing-forest"))] +fn stdout_layer() -> Box + Send + Sync + 'static> +where + S: Subscriber, + for<'a> S: tracing_subscriber::registry::LookupSpan<'a>, +{ + use tracing_subscriber::fmt::format::FmtSpan; + + tracing_subscriber::fmt::layer() + .pretty() + .compact() + .with_level(true) + .with_file(true) + .with_line_number(true) + .with_target(true) + .with_span_events(FmtSpan::NEW | FmtSpan::CLOSE) + .with_filter(EnvFilter::try_from_default_env().unwrap_or_else(|_| { + // axum logs rejections from built-in extracts on the trace level, so we enable this + // manually. + "info,axum::rejection=trace".into() + })) + .boxed() +} + +#[cfg(feature = "tracing-forest")] +fn stdout_layer() -> Box + Send + Sync + 'static> +where + S: Subscriber, + for<'a> S: tracing_subscriber::registry::LookupSpan<'a>, +{ + tracing_forest::ForestLayer::default() + .with_filter(EnvFilter::try_from_default_env().unwrap_or_else(|_| { + // axum logs rejections from built-in extracts on the trace level, so we enable this + // manually. + "info,axum::rejection=trace".into() + })) + .boxed() +} + #[cfg(not(feature = "tracing-forest"))] pub fn subscriber() -> impl Subscriber + core::fmt::Debug { use tracing_subscriber::fmt::format::FmtSpan; From 4215341319cc1ab3d662d593e3d035c8b79bc011 Mon Sep 17 00:00:00 2001 From: Bobbin Threadbare Date: Mon, 3 Feb 2025 11:46:24 -0800 Subject: [PATCH 07/27] chore: fix typos --- crates/block-producer/README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/crates/block-producer/README.md b/crates/block-producer/README.md index ef6fb337b..d46377796 100644 --- a/crates/block-producer/README.md +++ b/crates/block-producer/README.md @@ -1,9 +1,9 @@ # Miden block producer -Contains code definining the [Miden node's block-producer](/README.md#architecture) component. It is responsible for +Contains code defining the [Miden node's block-producer](/README.md#architecture) component. It is responsible for ordering transactions into blocks and submitting these for inclusion in the blockchain. -It serves a small [gRPC](htts://grpc.io) API which the node's RPC component uses to submit new transactions. In turn, +It serves a small [gRPC](https://grpc.io) API which the node's RPC component uses to submit new transactions. In turn, the `block-producer` uses the store's gRPC API to submit blocks and query chain state. For more information on the installation and operation of this component, please see the [node's readme](../../README.md). From 87b4a70e300f532497d83eb4cad1f886a6753383 Mon Sep 17 00:00:00 2001 From: Philipp Gackstatter Date: Tue, 4 Feb 2025 10:40:16 +0100 Subject: [PATCH 08/27] Use batch prover from miden-base (#659) * feat: Add tx-batch prover crate as dependency * feat: Implement Batch Inputs retrieval from store * feat: Replace `TransactionBatch` in tests with `ProvenBatch` * chore: Rebuild proto files * chore: Add changelog entry * chore: Import proto types as `proto::` * chore: Modify changelog entry * feat: Remove outdated `GetNoteAuthenticationInfoRequest` * feat: Handle errors in `get_batch_inputs` * feat: Handle errors during batch building * chore: Update usages of `AccountFile` * chore: More updates of `AccountFile` * feat: Use latest block num method on `InnerState` * chore: Pin to miden-base current next branch --- CHANGELOG.md | 4 + Cargo.lock | 98 ++-- Cargo.toml | 7 +- bin/faucet/src/client.rs | 4 +- bin/faucet/src/main.rs | 4 +- bin/node/src/commands/genesis/mod.rs | 8 +- crates/block-producer/Cargo.toml | 35 +- .../block-producer/src/batch_builder/batch.rs | 453 ------------------ .../block-producer/src/batch_builder/mod.rs | 69 ++- .../block-producer/src/block_builder/mod.rs | 23 +- .../src/block_builder/prover/block_witness.rs | 26 +- .../src/block_builder/prover/tests.rs | 103 ++-- .../block-producer/src/domain/transaction.rs | 9 + crates/block-producer/src/errors.rs | 28 +- .../block-producer/src/mempool/batch_graph.rs | 64 +-- crates/block-producer/src/mempool/mod.rs | 22 +- crates/block-producer/src/mempool/tests.rs | 29 +- crates/block-producer/src/store/mod.rs | 22 +- crates/block-producer/src/test_utils/batch.rs | 66 ++- crates/block-producer/src/test_utils/block.rs | 23 +- crates/block-producer/src/test_utils/mod.rs | 5 + .../src/test_utils/proven_tx.rs | 1 + crates/block-producer/src/test_utils/store.rs | 15 +- crates/proto/src/domain/batch.rs | 53 ++ crates/proto/src/domain/mod.rs | 1 + crates/proto/src/errors.rs | 16 +- crates/proto/src/generated/requests.rs | 17 +- crates/proto/src/generated/responses.rs | 22 +- crates/proto/src/generated/store.rs | 42 +- crates/rpc-proto/proto/requests.proto | 14 +- crates/rpc-proto/proto/responses.proto | 20 +- crates/rpc-proto/proto/store.proto | 4 +- crates/rpc/src/server/api.rs | 2 +- crates/store/src/errors.rs | 25 +- crates/store/src/server/api.rs | 85 ++-- crates/store/src/state.rs | 155 +++++- proto/requests.proto | 14 +- proto/responses.proto | 20 +- proto/store.proto | 4 +- 39 files changed, 752 insertions(+), 860 deletions(-) delete mode 100644 crates/block-producer/src/batch_builder/batch.rs create mode 100644 crates/proto/src/domain/batch.rs diff --git a/CHANGELOG.md b/CHANGELOG.md index c9f5ba057..1637ec794 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -11,6 +11,10 @@ - [BREAKING] Updated minimum Rust version to 1.84. - [BREAKING] `Endpoint` configuration simplified to a single string (#654). +### Enhancements + +- Prove transaction batches using Rust batch prover reference implementation (#659). + ## v0.7.2 (2025-01-29) ### Fixes diff --git a/Cargo.lock b/Cargo.lock index 2d8e3495c..5e6116ea9 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -160,9 +160,9 @@ dependencies = [ [[package]] name = "async-trait" -version = "0.1.85" +version = "0.1.86" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3f934833b4b7233644e5848f235df3f57ed8c80f1528a26c3dfa13d2147fa056" +checksum = "644dd749086bf3771a2fbc5f256fdb982d53f011c7d5d560304eafeecebce79d" dependencies = [ "proc-macro2", "quote", @@ -425,9 +425,9 @@ checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" [[package]] name = "bytes" -version = "1.9.0" +version = "1.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "325918d6fe32f23b19878fe4b34794ae41fc19ddbe53b10571a4874d44ffd39b" +checksum = "f61dac84819c6588b558454b194026eb1f09c293b9036ae9b159e74e73ab6cf9" [[package]] name = "camino" @@ -463,9 +463,9 @@ dependencies = [ [[package]] name = "cc" -version = "1.2.10" +version = "1.2.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "13208fcbb66eaeffe09b99fffbe1af420f00a7b35aa99ad683dfc1aa76145229" +checksum = "e4730490333d58093109dc02c23174c3f4d490998c3fed3cc8e82d57afedb9cf" dependencies = [ "jobserver", "libc", @@ -524,9 +524,9 @@ dependencies = [ [[package]] name = "clap" -version = "4.5.27" +version = "4.5.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "769b0145982b4b48713e01ec42d61614425f27b7058bda7180a3a41f30104796" +checksum = "3e77c3243bd94243c03672cb5154667347c457ca271254724f9f393aee1c05ff" dependencies = [ "clap_builder", "clap_derive", @@ -546,9 +546,9 @@ dependencies = [ [[package]] name = "clap_derive" -version = "4.5.24" +version = "4.5.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "54b755194d6389280185988721fffba69495eed5ee9feeee9a599b53db80318c" +checksum = "bf4ced95c6f4a675af3da73304b9ac4ed991640c36374e4b46795c49e17cf1ed" dependencies = [ "heck", "proc-macro2", @@ -677,9 +677,9 @@ dependencies = [ [[package]] name = "deadpool" -version = "0.12.1" +version = "0.12.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6541a3916932fe57768d4be0b1ffb5ec7cbf74ca8c903fdfd5c0fe8aa958f0ed" +checksum = "5ed5957ff93768adf7a65ab167a17835c3d2c3c50d084fe305174c112f468e2f" dependencies = [ "deadpool-runtime", "num_cpus", @@ -1782,9 +1782,8 @@ dependencies = [ [[package]] name = "miden-lib" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3ee8babd17ea380c6c5b948761ca63208b633b7130379ee2a57c6d3732d2f8bc" +version = "0.8.0" +source = "git+https://github.com/0xPolygonMiden/miden-base.git?rev=e82dee03de7589ef3fb12b7fd901cef25ae5535d#e82dee03de7589ef3fb12b7fd901cef25ae5535d" dependencies = [ "miden-assembly", "miden-objects", @@ -1874,6 +1873,7 @@ dependencies = [ "miden-processor", "miden-stdlib", "miden-tx", + "miden-tx-batch-prover", "pretty_assertions", "rand", "rand_chacha", @@ -1976,9 +1976,8 @@ dependencies = [ [[package]] name = "miden-objects" -version = "0.7.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8fe3f10d0e3787176f0803be2ecb4646f3a17fe10af45a50736c8d079a3c94d8" +version = "0.8.0" +source = "git+https://github.com/0xPolygonMiden/miden-base.git?rev=e82dee03de7589ef3fb12b7fd901cef25ae5535d#e82dee03de7589ef3fb12b7fd901cef25ae5535d" dependencies = [ "getrandom 0.2.15", "miden-assembly", @@ -2036,9 +2035,8 @@ dependencies = [ [[package]] name = "miden-tx" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4371509f1e4c25dfe26b7ffcffbb34aaa152c6eaad400f2624240a941baed2d0" +version = "0.8.0" +source = "git+https://github.com/0xPolygonMiden/miden-base.git?rev=e82dee03de7589ef3fb12b7fd901cef25ae5535d#e82dee03de7589ef3fb12b7fd901cef25ae5535d" dependencies = [ "async-trait", "miden-lib", @@ -2052,6 +2050,19 @@ dependencies = [ "winter-maybe-async", ] +[[package]] +name = "miden-tx-batch-prover" +version = "0.8.0" +source = "git+https://github.com/0xPolygonMiden/miden-base.git?rev=e82dee03de7589ef3fb12b7fd901cef25ae5535d#e82dee03de7589ef3fb12b7fd901cef25ae5535d" +dependencies = [ + "miden-core", + "miden-crypto", + "miden-objects", + "miden-processor", + "miden-tx", + "thiserror 2.0.11", +] + [[package]] name = "miden-verifier" version = "0.12.0" @@ -2067,9 +2078,9 @@ dependencies = [ [[package]] name = "miette" -version = "7.4.0" +version = "7.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "317f146e2eb7021892722af37cf1b971f0a70c8406f487e24952667616192c64" +checksum = "1a955165f87b37fd1862df2a59547ac542c77ef6d17c666f619d1ad22dd89484" dependencies = [ "cfg-if", "miette-derive", @@ -2079,9 +2090,9 @@ dependencies = [ [[package]] name = "miette-derive" -version = "7.4.0" +version = "7.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "23c9b935fbe1d6cbd1dac857b54a688145e2d93f48db36010514d0f612d0ad67" +checksum = "bf45bf44ab49be92fd1227a3be6fc6f617f1a337c06af54981048574d8783147" dependencies = [ "proc-macro2", "quote", @@ -2450,27 +2461,27 @@ dependencies = [ [[package]] name = "phf_shared" -version = "0.10.0" +version = "0.11.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b6796ad771acdc0123d2a88dc428b5e38ef24456743ddb1744ed628f9815c096" +checksum = "67eabc2ef2a60eb7faa00097bd1ffdb5bd28e62bf39990626a582201b7a754e5" dependencies = [ "siphasher", ] [[package]] name = "pin-project" -version = "1.1.8" +version = "1.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e2ec53ad785f4d35dac0adea7f7dc6f1bb277ad84a680c7afefeae05d1f5916" +checksum = "dfe2e71e1471fe07709406bf725f710b02927c9c54b2b5b2ec0e8087d97c327d" dependencies = [ "pin-project-internal", ] [[package]] name = "pin-project-internal" -version = "1.1.8" +version = "1.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d56a66c0c55993aa927429d0f8a0abfd74f084e4d9c192cffed01e418d83eefb" +checksum = "f6e859e6e5bd50440ab63c47e3ebabc90f26251f7c73c3d3e837b74a1cc3fa67" dependencies = [ "proc-macro2", "quote", @@ -3143,9 +3154,9 @@ checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64" [[package]] name = "siphasher" -version = "0.3.11" +version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "38b58827f4464d87d377d175e90bf58eb00fd8716ff0a62f80356b5e61555d0d" +checksum = "56199f7ddabf13fe5074ce809e7d3f42b42ae711800501b5b16ea82ad029c39d" [[package]] name = "slab" @@ -3203,12 +3214,11 @@ dependencies = [ [[package]] name = "string_cache" -version = "0.8.7" +version = "0.8.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f91138e76242f575eb1d3b38b4f1362f10d3a43f47d182a5b359af488a02293b" +checksum = "938d512196766101d333398efde81bc1f37b00cb42c2f8350e5df639f040bbbe" dependencies = [ "new_debug_unreachable", - "once_cell", "parking_lot", "phf_shared", "precomputed-hash", @@ -3258,9 +3268,9 @@ checksum = "b7401a30af6cb5818bb64852270bb722533397edcfc7344954a38f420819ece2" [[package]] name = "syn" -version = "2.0.96" +version = "2.0.98" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d5d0adab1ae378d7f53bdebc67a39f1f151407ef230f0ce2883572f5d8985c80" +checksum = "36147f1a48ae0ec2b5b3bc5b537d267457555a10dc06f3dbc8cb11ba3006d3b1" dependencies = [ "proc-macro2", "quote", @@ -3531,9 +3541,9 @@ dependencies = [ [[package]] name = "toml_edit" -version = "0.22.22" +version = "0.22.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4ae48d6208a266e853d946088ed816055e556cc6028c5e8e2b84d9fa5dd7c7f5" +checksum = "02a8b472d1a3d7c18e2d61a489aee3453fd9031c33e4f55bd533f4a7adca1bee" dependencies = [ "indexmap 2.7.1", "serde", @@ -3979,9 +3989,9 @@ dependencies = [ [[package]] name = "wait-timeout" -version = "0.2.0" +version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9f200f5b12eb75f8c1ed65abd4b2db8a6e1b138a20de009dacee265a2498f3f6" +checksum = "09ac3b126d3914f9849036f826e054cbabdc8519970b8998ddaf3b5bd3c65f11" dependencies = [ "libc", ] @@ -4342,9 +4352,9 @@ checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" [[package]] name = "winnow" -version = "0.6.25" +version = "0.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ad699df48212c6cc6eb4435f35500ac6fd3b9913324f938aea302022ce19d310" +checksum = "86e376c75f4f43f44db463cf729e0d3acbf954d13e22c51e26e4c264b4ab545f" dependencies = [ "memchr", ] diff --git a/Cargo.toml b/Cargo.toml index bdfa58e34..bbb10141a 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -28,17 +28,18 @@ version = "0.8.0" assert_matches = { version = "1.5" } itertools = { version = "0.14" } miden-air = { version = "0.12" } -miden-lib = { version = "0.7" } +miden-lib = { git = "https://github.com/0xPolygonMiden/miden-base.git", rev = "e82dee03de7589ef3fb12b7fd901cef25ae5535d" } miden-node-block-producer = { path = "crates/block-producer", version = "0.8" } miden-node-proto = { path = "crates/proto", version = "0.8" } miden-node-rpc = { path = "crates/rpc", version = "0.8" } miden-node-store = { path = "crates/store", version = "0.8" } miden-node-test-macro = { path = "crates/test-macro" } miden-node-utils = { path = "crates/utils", version = "0.8" } -miden-objects = { version = "0.7" } +miden-objects = { git = "https://github.com/0xPolygonMiden/miden-base.git", rev = "e82dee03de7589ef3fb12b7fd901cef25ae5535d" } miden-processor = { version = "0.12" } miden-stdlib = { version = "0.12", default-features = false } -miden-tx = { version = "0.7" } +miden-tx = { git = "https://github.com/0xPolygonMiden/miden-base.git", rev = "e82dee03de7589ef3fb12b7fd901cef25ae5535d" } +miden-tx-batch-prover = { git = "https://github.com/0xPolygonMiden/miden-base.git", rev = "e82dee03de7589ef3fb12b7fd901cef25ae5535d" } prost = { version = "0.13" } rand = { version = "0.8" } thiserror = { version = "2.0", default-features = false } diff --git a/bin/faucet/src/client.rs b/bin/faucet/src/client.rs index faf9b17b5..cc3fa2618 100644 --- a/bin/faucet/src/client.rs +++ b/bin/faucet/src/client.rs @@ -9,7 +9,7 @@ use miden_node_proto::generated::{ rpc::api_client::ApiClient, }; use miden_objects::{ - account::{Account, AccountData, AccountId, AuthSecretKey}, + account::{Account, AccountFile, AccountId, AuthSecretKey}, asset::FungibleAsset, block::{BlockHeader, BlockNumber}, crypto::{ @@ -61,7 +61,7 @@ impl FaucetClient { let (mut rpc_api, root_block_header, root_chain_mmr) = initialize_faucet_client(config).await?; - let faucet_account_data = AccountData::read(&config.faucet_account_path) + let faucet_account_data = AccountFile::read(&config.faucet_account_path) .context("Failed to load faucet account from file")?; let id = faucet_account_data.account.id(); diff --git a/bin/faucet/src/main.rs b/bin/faucet/src/main.rs index f7ea8c91f..dc2d53384 100644 --- a/bin/faucet/src/main.rs +++ b/bin/faucet/src/main.rs @@ -19,7 +19,7 @@ use http::HeaderValue; use miden_lib::{account::faucets::create_basic_fungible_faucet, AuthScheme}; use miden_node_utils::{config::load_config, crypto::get_rpo_random_coin, version::LongVersion}; use miden_objects::{ - account::{AccountData, AccountStorageMode, AuthSecretKey}, + account::{AccountFile, AccountStorageMode, AuthSecretKey}, asset::TokenSymbol, crypto::dsa::rpo_falcon512::SecretKey, Felt, @@ -169,7 +169,7 @@ async fn main() -> anyhow::Result<()> { .context("Failed to create basic fungible faucet account")?; let account_data = - AccountData::new(account, Some(account_seed), AuthSecretKey::RpoFalcon512(secret)); + AccountFile::new(account, Some(account_seed), AuthSecretKey::RpoFalcon512(secret)); let output_path = current_dir.join(output_path); account_data diff --git a/bin/node/src/commands/genesis/mod.rs b/bin/node/src/commands/genesis/mod.rs index 08a65898b..a8e95087d 100644 --- a/bin/node/src/commands/genesis/mod.rs +++ b/bin/node/src/commands/genesis/mod.rs @@ -9,7 +9,7 @@ use miden_lib::{account::faucets::create_basic_fungible_faucet, AuthScheme}; use miden_node_store::genesis::GenesisState; use miden_node_utils::{config::load_config, crypto::get_rpo_random_coin}; use miden_objects::{ - account::{Account, AccountData, AccountIdAnchor, AuthSecretKey}, + account::{Account, AccountFile, AccountIdAnchor, AuthSecretKey}, asset::TokenSymbol, crypto::{dsa::rpo_falcon512::SecretKey, utils::Serializable}, Felt, ONE, @@ -134,7 +134,7 @@ fn create_accounts( ); faucet_count += 1; - (AccountData::new(account, Some(account_seed), auth_secret_key), name) + (AccountFile::new(account, Some(account_seed), auth_secret_key), name) }, }; @@ -182,7 +182,7 @@ mod tests { use figment::Jail; use miden_node_store::genesis::GenesisState; - use miden_objects::{account::AccountData, utils::serde::Deserializable}; + use miden_objects::{account::AccountFile, utils::serde::Deserializable}; use crate::DEFAULT_GENESIS_FILE_PATH; @@ -220,7 +220,7 @@ mod tests { assert!(a0_file_path.exists()); // deserialize account and genesis_state - let a0 = AccountData::read(a0_file_path).unwrap(); + let a0 = AccountFile::read(a0_file_path).unwrap(); // assert that the account has the corresponding storage mode assert!(a0.account.is_public()); diff --git a/crates/block-producer/Cargo.toml b/crates/block-producer/Cargo.toml index d01bfebeb..ccecfab44 100644 --- a/crates/block-producer/Cargo.toml +++ b/crates/block-producer/Cargo.toml @@ -18,23 +18,24 @@ workspace = true tracing-forest = ["miden-node-utils/tracing-forest"] [dependencies] -async-trait = { version = "0.1" } -itertools = { workspace = true } -miden-lib = { workspace = true } -miden-node-proto = { workspace = true } -miden-node-utils = { workspace = true } -miden-objects = { workspace = true } -miden-processor = { workspace = true } -miden-stdlib = { workspace = true } -miden-tx = { workspace = true } -rand = { version = "0.8" } -serde = { version = "1.0", features = ["derive"] } -thiserror = { workspace = true } -tokio = { workspace = true, features = ["macros", "net", "rt-multi-thread", "sync", "time"] } -tokio-stream = { workspace = true, features = ["net"] } -tonic = { workspace = true } -tracing = { workspace = true } -url = { workspace = true } +async-trait = { version = "0.1" } +itertools = { workspace = true } +miden-lib = { workspace = true } +miden-node-proto = { workspace = true } +miden-node-utils = { workspace = true } +miden-objects = { workspace = true } +miden-processor = { workspace = true } +miden-stdlib = { workspace = true } +miden-tx = { workspace = true } +miden-tx-batch-prover = { workspace = true } +rand = { version = "0.8" } +serde = { version = "1.0", features = ["derive"] } +thiserror = { workspace = true } +tokio = { workspace = true, features = ["macros", "net", "rt-multi-thread", "sync", "time"] } +tokio-stream = { workspace = true, features = ["net"] } +tonic = { workspace = true } +tracing = { workspace = true } +url = { workspace = true } [dev-dependencies] assert_matches = { workspace = true } diff --git a/crates/block-producer/src/batch_builder/batch.rs b/crates/block-producer/src/batch_builder/batch.rs deleted file mode 100644 index 33b27b058..000000000 --- a/crates/block-producer/src/batch_builder/batch.rs +++ /dev/null @@ -1,453 +0,0 @@ -use std::{ - borrow::Borrow, - collections::{btree_map::Entry, BTreeMap, BTreeSet}, - mem, -}; - -use miden_node_proto::domain::note::NoteAuthenticationInfo; -use miden_node_utils::formatting::format_blake3_digest; -use miden_objects::{ - account::{delta::AccountUpdateDetails, AccountId}, - batch::BatchNoteTree, - crypto::hash::blake::{Blake3Digest, Blake3_256}, - note::{NoteHeader, NoteId, Nullifier}, - transaction::{InputNoteCommitment, OutputNote, ProvenTransaction, TransactionId}, - AccountDeltaError, Digest, -}; -use tracing::instrument; - -use crate::{errors::BuildBatchError, COMPONENT}; - -// BATCH ID -// ================================================================================================ - -/// Uniquely identifies a [`TransactionBatch`]. -#[derive(Debug, Copy, Clone, Eq, Ord, PartialEq, PartialOrd)] -pub struct BatchId(Blake3Digest<32>); - -impl BatchId { - /// Calculates a batch ID from the given set of transactions. - pub fn compute(txs: impl Iterator) -> Self - where - T: Borrow, - { - let mut buf = Vec::with_capacity(32 * txs.size_hint().0); - for tx in txs { - buf.extend_from_slice(&tx.borrow().as_bytes()); - } - Self(Blake3_256::hash(&buf)) - } -} - -impl std::fmt::Display for BatchId { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - f.write_str(&format_blake3_digest(self.0)) - } -} - -// ACCOUNT UPDATE -// ================================================================================================ - -#[derive(Debug, Clone, PartialEq, Eq)] -pub struct AccountUpdate { - pub init_state: Digest, - pub final_state: Digest, - pub transactions: Vec, - pub details: AccountUpdateDetails, -} - -impl AccountUpdate { - fn new(tx: &ProvenTransaction) -> Self { - Self { - init_state: tx.account_update().init_state_hash(), - final_state: tx.account_update().final_state_hash(), - transactions: vec![tx.id()], - details: tx.account_update().details().clone(), - } - } - - /// Merges the transaction's update into this account update. - fn merge_tx(&mut self, tx: &ProvenTransaction) -> Result<(), AccountDeltaError> { - assert!( - self.final_state == tx.account_update().init_state_hash(), - "Transacion's initial state does not match current account state" - ); - - self.final_state = tx.account_update().final_state_hash(); - self.transactions.push(tx.id()); - self.details = self.details.clone().merge(tx.account_update().details().clone())?; - - Ok(()) - } -} - -// TRANSACTION BATCH -// ================================================================================================ - -/// A batch of transactions that share a common proof. -/// -/// Note: Until recursive proofs are available in the Miden VM, we don't include the common proof. -#[derive(Debug, Clone, PartialEq, Eq)] -pub struct TransactionBatch { - id: BatchId, - updated_accounts: BTreeMap, - input_notes: Vec, - output_notes_smt: BatchNoteTree, - output_notes: Vec, -} - -impl TransactionBatch { - // CONSTRUCTORS - // -------------------------------------------------------------------------------------------- - - /// Returns a new [TransactionBatch] built from the provided transactions. If a map of - /// unauthenticated notes found in the store is provided, it is used for transforming - /// unauthenticated notes into authenticated notes. - /// - /// The tx input takes an `IntoIterator` of a reference, which effectively allows for cheap - /// cloning of the iterator. Or put differently, we want something similar to `impl - /// Iterator + Clone` which this provides. - /// - /// # Errors - /// - /// Returns an error if: - /// - There are duplicated output notes or unauthenticated notes found across all transactions - /// in the batch. - /// - Hashes for corresponding input notes and output notes don't match. - #[instrument(target = COMPONENT, name = "new_batch", skip_all, err)] - pub fn new<'a, I>( - txs: impl IntoIterator, - found_unauthenticated_notes: NoteAuthenticationInfo, - ) -> Result - where - I: Iterator + Clone, - { - let tx_iter = txs.into_iter(); - let id = BatchId::compute(tx_iter.clone().map(ProvenTransaction::id)); - - // Populate batch output notes and updated accounts. - let mut output_notes = OutputNoteTracker::new(tx_iter.clone())?; - let mut updated_accounts = BTreeMap::::new(); - let mut unauthenticated_input_notes = BTreeSet::new(); - for tx in tx_iter.clone() { - // Merge account updates so that state transitions A->B->C become A->C. - match updated_accounts.entry(tx.account_id()) { - Entry::Vacant(vacant) => { - vacant.insert(AccountUpdate::new(tx)); - }, - Entry::Occupied(occupied) => { - occupied.into_mut().merge_tx(tx).map_err(|source| { - BuildBatchError::AccountUpdateError { account_id: tx.account_id(), source } - })?; - }, - }; - - // Check unauthenticated input notes for duplicates: - for note in tx.get_unauthenticated_notes() { - let id = note.id(); - if !unauthenticated_input_notes.insert(id) { - return Err(BuildBatchError::DuplicateUnauthenticatedNote(id)); - } - } - } - - // Populate batch produced nullifiers and match output notes with corresponding - // unauthenticated input notes in the same batch, which are removed from the unauthenticated - // input notes set. - // - // One thing to note: - // This still allows transaction `A` to consume an unauthenticated note `x` and output note - // `y` and for transaction `B` to consume an unauthenticated note `y` and output - // note `x` (i.e., have a circular dependency between transactions), but this is not - // a problem. - let mut input_notes = vec![]; - for tx in tx_iter { - for input_note in tx.input_notes().iter() { - // Header is presented only for unauthenticated input notes. - let input_note = match input_note.header() { - Some(input_note_header) => { - if output_notes.remove_note(input_note_header)? { - continue; - } - - // If an unauthenticated note was found in the store, transform it to an - // authenticated one (i.e. erase additional note details - // except the nullifier) - if found_unauthenticated_notes.contains_note(&input_note_header.id()) { - InputNoteCommitment::from(input_note.nullifier()) - } else { - input_note.clone() - } - }, - None => input_note.clone(), - }; - input_notes.push(input_note); - } - } - - let output_notes = output_notes.into_notes(); - - // Build the output notes SMT. - let output_notes_smt = BatchNoteTree::with_contiguous_leaves( - output_notes.iter().map(|note| (note.id(), note.metadata())), - ) - .expect("Unreachable: fails only if the output note list contains duplicates"); - - Ok(Self { - id, - updated_accounts, - input_notes, - output_notes_smt, - output_notes, - }) - } - - // PUBLIC ACCESSORS - // -------------------------------------------------------------------------------------------- - - /// Returns the batch ID. - pub fn id(&self) -> BatchId { - self.id - } - - /// Returns an iterator over (`account_id`, `init_state_hash`) tuples for accounts that were - /// modified in this transaction batch. - #[cfg(test)] - pub fn account_initial_states(&self) -> impl Iterator + '_ { - self.updated_accounts - .iter() - .map(|(&account_id, update)| (account_id, update.init_state)) - } - - /// Returns an iterator over (`account_id`, details, `new_state_hash`) tuples for accounts that - /// were modified in this transaction batch. - pub fn updated_accounts(&self) -> impl Iterator + '_ { - self.updated_accounts.iter() - } - - /// Returns input notes list consumed by the transactions in this batch. Any unauthenticated - /// input notes which have matching output notes within this batch are not included in this - /// list. - pub fn input_notes(&self) -> &[InputNoteCommitment] { - &self.input_notes - } - - /// Returns an iterator over produced nullifiers for all consumed notes. - pub fn produced_nullifiers(&self) -> impl Iterator + '_ { - self.input_notes.iter().map(InputNoteCommitment::nullifier) - } - - /// Returns the root hash of the output notes SMT. - pub fn output_notes_root(&self) -> Digest { - self.output_notes_smt.root() - } - - /// Returns output notes list. - pub fn output_notes(&self) -> &Vec { - &self.output_notes - } -} - -#[derive(Debug)] -struct OutputNoteTracker { - output_notes: Vec>, - output_note_index: BTreeMap, -} - -impl OutputNoteTracker { - fn new<'a>(txs: impl Iterator) -> Result { - let mut output_notes = vec![]; - let mut output_note_index = BTreeMap::new(); - for tx in txs { - for note in tx.output_notes().iter() { - if output_note_index.insert(note.id(), output_notes.len()).is_some() { - return Err(BuildBatchError::DuplicateOutputNote(note.id())); - } - output_notes.push(Some(note.clone())); - } - } - - Ok(Self { output_notes, output_note_index }) - } - - pub fn remove_note(&mut self, input_note_header: &NoteHeader) -> Result { - let id = input_note_header.id(); - if let Some(note_index) = self.output_note_index.remove(&id) { - if let Some(output_note) = mem::take(&mut self.output_notes[note_index]) { - let input_hash = input_note_header.hash(); - let output_hash = output_note.hash(); - if output_hash != input_hash { - return Err(BuildBatchError::NoteHashesMismatch { - id, - input_hash, - output_hash, - }); - } - - return Ok(true); - } - } - - Ok(false) - } - - pub fn into_notes(self) -> Vec { - self.output_notes.into_iter().flatten().collect() - } -} - -// TESTS -// ================================================================================================ - -#[cfg(test)] -mod tests { - use miden_objects::note::NoteInclusionProof; - use miden_processor::crypto::MerklePath; - - use super::*; - use crate::test_utils::{ - mock_proven_tx, - note::{mock_note, mock_output_note, mock_unauthenticated_note_commitment}, - }; - - #[test] - fn output_note_tracker_duplicate_output_notes() { - let mut txs = mock_proven_txs(); - - let result = OutputNoteTracker::new(txs.iter()); - assert!( - result.is_ok(), - "Creation of output note tracker was not expected to fail: {result:?}" - ); - - let duplicate_output_note = txs[1].output_notes().get_note(1).clone(); - - txs.push(mock_proven_tx( - 3, - vec![], - vec![duplicate_output_note.clone(), mock_output_note(8), mock_output_note(4)], - )); - - match OutputNoteTracker::new(txs.iter()) { - Err(BuildBatchError::DuplicateOutputNote(note_id)) => { - assert_eq!(note_id, duplicate_output_note.id()); - }, - res => panic!("Unexpected result: {res:?}"), - } - } - - #[test] - fn output_note_tracker_remove_in_place_consumed_note() { - let txs = mock_proven_txs(); - let mut tracker = OutputNoteTracker::new(txs.iter()).unwrap(); - - let note_to_remove = mock_note(4); - - assert!(tracker.remove_note(note_to_remove.header()).unwrap()); - assert!(!tracker.remove_note(note_to_remove.header()).unwrap()); - - // Check that output notes are in the expected order and consumed note was removed - assert_eq!( - tracker.into_notes(), - vec![ - mock_output_note(2), - mock_output_note(3), - mock_output_note(6), - mock_output_note(7), - mock_output_note(8), - ] - ); - } - - #[test] - fn duplicate_unauthenticated_notes() { - let mut txs = mock_proven_txs(); - let duplicate_note = mock_note(5); - txs.push(mock_proven_tx(4, vec![duplicate_note.clone()], vec![mock_output_note(9)])); - match TransactionBatch::new(&txs, NoteAuthenticationInfo::default()) { - Err(BuildBatchError::DuplicateUnauthenticatedNote(note_id)) => { - assert_eq!(note_id, duplicate_note.id()); - }, - res => panic!("Unexpected result: {res:?}"), - } - } - - #[test] - fn consume_notes_in_place() { - let mut txs = mock_proven_txs(); - let note_to_consume = mock_note(3); - txs.push(mock_proven_tx( - 3, - vec![mock_note(11), note_to_consume, mock_note(13)], - vec![mock_output_note(9), mock_output_note(10)], - )); - - let batch = TransactionBatch::new(&txs, NoteAuthenticationInfo::default()).unwrap(); - - // One of the unauthenticated notes must be removed from the batch due to the consumption - // of the corresponding output note - let expected_input_notes = vec![ - mock_unauthenticated_note_commitment(1), - mock_unauthenticated_note_commitment(5), - mock_unauthenticated_note_commitment(11), - mock_unauthenticated_note_commitment(13), - ]; - assert_eq!(batch.input_notes, expected_input_notes); - - // One of the output notes must be removed from the batch due to the consumption - // by the corresponding unauthenticated note - let expected_output_notes = vec![ - mock_output_note(2), - mock_output_note(4), - mock_output_note(6), - mock_output_note(7), - mock_output_note(8), - mock_output_note(9), - mock_output_note(10), - ]; - assert_eq!(batch.output_notes.len(), expected_output_notes.len()); - assert_eq!(batch.output_notes, expected_output_notes); - - // Ensure all nullifiers match the corresponding input notes' nullifiers - let expected_nullifiers: Vec<_> = - batch.input_notes().iter().map(InputNoteCommitment::nullifier).collect(); - let actual_nullifiers: Vec<_> = batch.produced_nullifiers().collect(); - assert_eq!(actual_nullifiers, expected_nullifiers); - } - - #[test] - fn convert_unauthenticated_note_to_authenticated() { - let txs = mock_proven_txs(); - let found_unauthenticated_notes = BTreeMap::from_iter([( - mock_note(5).id(), - NoteInclusionProof::new(0.into(), 0, MerklePath::default()).unwrap(), - )]); - let found_unauthenticated_notes = NoteAuthenticationInfo { - note_proofs: found_unauthenticated_notes, - block_proofs: Vec::default(), - }; - let batch = TransactionBatch::new(&txs, found_unauthenticated_notes).unwrap(); - - let expected_input_notes = - vec![mock_unauthenticated_note_commitment(1), mock_note(5).nullifier().into()]; - assert_eq!(batch.input_notes, expected_input_notes); - } - - // UTILITIES - // ============================================================================================= - - fn mock_proven_txs() -> Vec { - vec![ - mock_proven_tx( - 1, - vec![mock_note(1)], - vec![mock_output_note(2), mock_output_note(3), mock_output_note(4)], - ), - mock_proven_tx( - 2, - vec![mock_note(5)], - vec![mock_output_note(6), mock_output_note(7), mock_output_note(8)], - ), - ] - } -} diff --git a/crates/block-producer/src/batch_builder/mod.rs b/crates/block-producer/src/batch_builder/mod.rs index 68fae021d..550dcb736 100644 --- a/crates/block-producer/src/batch_builder/mod.rs +++ b/crates/block-producer/src/batch_builder/mod.rs @@ -1,22 +1,21 @@ use std::{num::NonZeroUsize, ops::Range, time::Duration}; -use batch::BatchId; -use miden_node_proto::domain::note::NoteAuthenticationInfo; +use miden_node_proto::domain::batch::BatchInputs; +use miden_node_utils::formatting::format_array; +use miden_objects::{ + batch::{BatchId, ProposedBatch, ProvenBatch}, + MIN_PROOF_SECURITY_LEVEL, +}; +use miden_tx_batch_prover::LocalBatchProver; use rand::Rng; use tokio::{task::JoinSet, time}; use tracing::{debug, info, instrument, Span}; use crate::{ - domain::transaction::AuthenticatedTransaction, mempool::SharedMempool, store::StoreClient, - COMPONENT, SERVER_BUILD_BATCH_FREQUENCY, + domain::transaction::AuthenticatedTransaction, errors::BuildBatchError, mempool::SharedMempool, + store::StoreClient, COMPONENT, SERVER_BUILD_BATCH_FREQUENCY, }; -pub mod batch; -pub use batch::TransactionBatch; -use miden_node_utils::formatting::format_array; - -use crate::errors::BuildBatchError; - // BATCH BUILDER // ================================================================================================ @@ -105,7 +104,7 @@ impl BatchBuilder { // BATCH WORKER // ================================================================================================ -type BatchResult = Result; +type BatchResult = Result; /// Represents a pool of batch provers. /// @@ -219,15 +218,19 @@ impl WorkerPool { async move { tracing::debug!("Begin proving batch."); - let inputs = store - .get_batch_inputs( - transactions - .iter() - .flat_map(AuthenticatedTransaction::unauthenticated_notes), - ) + let block_references = + transactions.iter().map(AuthenticatedTransaction::reference_block); + let unauthenticated_notes = transactions + .iter() + .flat_map(AuthenticatedTransaction::unauthenticated_notes); + + let batch_inputs = store + .get_batch_inputs(block_references, unauthenticated_notes) .await .map_err(|err| (id, BuildBatchError::FetchBatchInputsFailed(err)))?; - let batch = Self::build_batch(transactions, inputs).map_err(|err| (id, err))?; + + let batch = + Self::build_batch(transactions, batch_inputs).map_err(|err| (id, err))?; tokio::time::sleep(simulated_proof_time).await; if failed { @@ -250,19 +253,35 @@ impl WorkerPool { #[instrument(target = COMPONENT, skip_all, err, fields(batch_id))] fn build_batch( txs: Vec, - inputs: NoteAuthenticationInfo, - ) -> Result { + batch_inputs: BatchInputs, + ) -> Result { let num_txs = txs.len(); info!(target: COMPONENT, num_txs, "Building a transaction batch"); debug!(target: COMPONENT, txs = %format_array(txs.iter().map(|tx| tx.id().to_hex()))); - let txs = txs.iter().map(AuthenticatedTransaction::raw_proven_transaction); - let batch = TransactionBatch::new(txs, inputs)?; + let BatchInputs { + batch_reference_block_header, + note_proofs, + chain_mmr, + } = batch_inputs; + + let transactions = txs.iter().map(AuthenticatedTransaction::proven_transaction).collect(); + + let proposed_batch = + ProposedBatch::new(transactions, batch_reference_block_header, chain_mmr, note_proofs) + .map_err(BuildBatchError::ProposeBatchError)?; + + Span::current().record("batch_id", proposed_batch.id().to_string()); + info!(target: COMPONENT, "Proposed Batch built"); + + let proven_batch = LocalBatchProver::new(MIN_PROOF_SECURITY_LEVEL) + .prove(proposed_batch) + .map_err(BuildBatchError::ProveBatchError)?; - Span::current().record("batch_id", batch.id().to_string()); - info!(target: COMPONENT, "Transaction batch built"); + Span::current().record("batch_id", proven_batch.id().to_string()); + info!(target: COMPONENT, "Proven Batch built"); - Ok(batch) + Ok(proven_batch) } } diff --git a/crates/block-producer/src/block_builder/mod.rs b/crates/block-producer/src/block_builder/mod.rs index 32023a58f..21790dffc 100644 --- a/crates/block-producer/src/block_builder/mod.rs +++ b/crates/block-producer/src/block_builder/mod.rs @@ -3,6 +3,7 @@ use std::{collections::BTreeSet, ops::Range}; use miden_node_utils::formatting::format_array; use miden_objects::{ account::AccountId, + batch::ProvenBatch, block::Block, note::{NoteHeader, Nullifier}, transaction::{InputNoteCommitment, OutputNote}, @@ -12,8 +13,8 @@ use tokio::time::Duration; use tracing::{debug, info, instrument}; use crate::{ - batch_builder::batch::TransactionBatch, errors::BuildBlockError, mempool::SharedMempool, - store::StoreClient, COMPONENT, SERVER_BLOCK_FREQUENCY, + errors::BuildBlockError, mempool::SharedMempool, store::StoreClient, COMPONENT, + SERVER_BLOCK_FREQUENCY, }; pub(crate) mod prover; @@ -94,34 +95,36 @@ impl BlockBuilder { } #[instrument(target = COMPONENT, skip_all, err)] - async fn build_block(&self, batches: &[TransactionBatch]) -> Result<(), BuildBlockError> { + async fn build_block(&self, batches: &[ProvenBatch]) -> Result<(), BuildBlockError> { info!( target: COMPONENT, num_batches = batches.len(), - batches = %format_array(batches.iter().map(TransactionBatch::id)), + batches = %format_array(batches.iter().map(ProvenBatch::id)), ); let updated_account_set: BTreeSet = batches .iter() - .flat_map(TransactionBatch::updated_accounts) + .flat_map(ProvenBatch::account_updates) .map(|(account_id, _)| *account_id) .collect(); let output_notes: Vec<_> = - batches.iter().map(TransactionBatch::output_notes).cloned().collect(); + batches.iter().map(|batch| batch.output_notes().to_vec()).collect(); let produced_nullifiers: Vec = - batches.iter().flat_map(TransactionBatch::produced_nullifiers).collect(); + batches.iter().flat_map(ProvenBatch::produced_nullifiers).collect(); // Populate set of output notes from all batches - let output_notes_set: BTreeSet<_> = - output_notes.iter().flat_map(|batch| batch.iter().map(OutputNote::id)).collect(); + let output_notes_set: BTreeSet<_> = output_notes + .iter() + .flat_map(|output_notes| output_notes.iter().map(OutputNote::id)) + .collect(); // Build a set of unauthenticated input notes for this block which do not have a matching // output note produced in this block let dangling_notes: BTreeSet<_> = batches .iter() - .flat_map(TransactionBatch::input_notes) + .flat_map(ProvenBatch::input_notes) .filter_map(InputNoteCommitment::header) .map(NoteHeader::id) .filter(|note_id| !output_notes_set.contains(note_id)) diff --git a/crates/block-producer/src/block_builder/prover/block_witness.rs b/crates/block-producer/src/block_builder/prover/block_witness.rs index 9f453b598..a0972e541 100644 --- a/crates/block-producer/src/block_builder/prover/block_witness.rs +++ b/crates/block-producer/src/block_builder/prover/block_witness.rs @@ -2,6 +2,7 @@ use std::collections::{BTreeMap, BTreeSet}; use miden_objects::{ account::{delta::AccountUpdateDetails, AccountId}, + batch::{BatchAccountUpdate, ProvenBatch}, block::{BlockAccountUpdate, BlockHeader}, crypto::merkle::{EmptySubtreeRoots, MerklePath, MerkleStore, MmrPeaks, SmtProof}, note::Nullifier, @@ -11,7 +12,6 @@ use miden_objects::{ }; use crate::{ - batch_builder::batch::{AccountUpdate, TransactionBatch}, block::BlockInputs, errors::{BlockProverError, BuildBlockError}, }; @@ -33,7 +33,7 @@ pub struct BlockWitness { impl BlockWitness { pub fn new( mut block_inputs: BlockInputs, - batches: &[TransactionBatch], + batches: &[ProvenBatch], ) -> Result<(Self, Vec), BuildBlockError> { // This limit should be enforced by the mempool. assert!(batches.len() <= MAX_BATCHES_PER_BLOCK); @@ -44,18 +44,19 @@ impl BlockWitness { .iter() .enumerate() .filter(|(_, batch)| !batch.output_notes().is_empty()) - .map(|(batch_index, batch)| (batch_index, batch.output_notes_root())) + .map(|(batch_index, batch)| (batch_index, batch.output_notes_tree().root())) .collect(); // Order account updates by account ID and each update's initial state hash. // // This let's us chronologically order the updates per account across batches. - let mut updated_accounts = BTreeMap::>::new(); - for (account_id, update) in batches.iter().flat_map(TransactionBatch::updated_accounts) { + let mut updated_accounts = + BTreeMap::>::new(); + for (account_id, update) in batches.iter().flat_map(ProvenBatch::account_updates) { updated_accounts .entry(*account_id) .or_default() - .insert(update.init_state, update.clone()); + .insert(update.initial_state_commitment(), update.clone()); } // Build account witnesses. @@ -84,12 +85,13 @@ impl BlockWitness { ) })?; - transactions.extend(update.transactions); - current_hash = update.final_state; + current_hash = update.final_state_commitment(); + let (update_transactions, update_details) = update.into_parts(); + transactions.extend(update_transactions); details = Some(match details { - None => update.details, - Some(details) => details.merge(update.details).map_err(|source| { + None => update_details, + Some(details) => details.merge(update_details).map_err(|source| { BuildBlockError::AccountUpdateError { account_id, source } })?, }); @@ -156,13 +158,13 @@ impl BlockWitness { /// done in MASM. fn validate_nullifiers( block_inputs: &BlockInputs, - batches: &[TransactionBatch], + batches: &[ProvenBatch], ) -> Result<(), BuildBlockError> { let produced_nullifiers_from_store: BTreeSet = block_inputs.nullifiers.keys().copied().collect(); let produced_nullifiers_from_batches: BTreeSet = - batches.iter().flat_map(TransactionBatch::produced_nullifiers).collect(); + batches.iter().flat_map(ProvenBatch::produced_nullifiers).collect(); if produced_nullifiers_from_store == produced_nullifiers_from_batches { Ok(()) diff --git a/crates/block-producer/src/block_builder/prover/tests.rs b/crates/block-producer/src/block_builder/prover/tests.rs index d688cdcfd..f020faf62 100644 --- a/crates/block-producer/src/block_builder/prover/tests.rs +++ b/crates/block-producer/src/block_builder/prover/tests.rs @@ -6,6 +6,7 @@ use miden_objects::{ account::{ delta::AccountUpdateDetails, AccountId, AccountIdVersion, AccountStorageMode, AccountType, }, + batch::ProvenBatch, block::{BlockAccountUpdate, BlockNoteIndex, BlockNoteTree, BlockNumber}, crypto::merkle::{ EmptySubtreeRoots, LeafIndex, MerklePath, Mmr, MmrPeaks, Smt, SmtLeaf, SmtProof, SMT_DEPTH, @@ -21,9 +22,9 @@ use miden_objects::{ use self::block_witness::AccountUpdateWitness; use super::*; use crate::{ - batch_builder::batch::TransactionBatch, block::{AccountWitness, BlockInputs}, test_utils::{ + batch::TransactionBatchConstructor, block::{build_actual_block_header, build_expected_block_header, MockBlockBuilder}, MockProvenTxBuilder, MockStoreSuccessBuilder, }, @@ -75,7 +76,7 @@ fn block_witness_validation_inconsistent_account_ids() { } }; - let batches: Vec = { + let batches: Vec = { let batch_1 = { let tx = MockProvenTxBuilder::with_account( account_id_2, @@ -84,7 +85,7 @@ fn block_witness_validation_inconsistent_account_ids() { ) .build(); - TransactionBatch::new([&tx], NoteAuthenticationInfo::default()).unwrap() + ProvenBatch::mocked_from_transactions([&tx]) }; let batch_2 = { @@ -95,7 +96,7 @@ fn block_witness_validation_inconsistent_account_ids() { ) .build(); - TransactionBatch::new([&tx], NoteAuthenticationInfo::default()).unwrap() + ProvenBatch::mocked_from_transactions([&tx]) }; vec![batch_1, batch_2] @@ -146,26 +147,19 @@ fn block_witness_validation_inconsistent_account_hashes() { }; let batches = { - let batch_1 = TransactionBatch::new( - [&MockProvenTxBuilder::with_account( - account_id_1, - account_1_hash_batches, - Digest::default(), - ) - .build()], - NoteAuthenticationInfo::default(), + let batch_1 = ProvenBatch::mocked_from_transactions([&MockProvenTxBuilder::with_account( + account_id_1, + account_1_hash_batches, + Digest::default(), ) - .unwrap(); - let batch_2 = TransactionBatch::new( - [&MockProvenTxBuilder::with_account( - account_id_2, - Digest::default(), - Digest::default(), - ) - .build()], - NoteAuthenticationInfo::default(), + .build()]); + + let batch_2 = ProvenBatch::mocked_from_transactions([&MockProvenTxBuilder::with_account( + account_id_2, + Digest::default(), + Digest::default(), ) - .unwrap(); + .build()]); vec![batch_1, batch_2] }; @@ -248,12 +242,8 @@ fn block_witness_multiple_batches_per_account() { }; let batches = { - let batch_1 = - TransactionBatch::new([&x_txs[0], &y_txs[1]], NoteAuthenticationInfo::default()) - .unwrap(); - let batch_2 = - TransactionBatch::new([&y_txs[0], &x_txs[1]], NoteAuthenticationInfo::default()) - .unwrap(); + let batch_1 = ProvenBatch::mocked_from_transactions([&x_txs[0], &y_txs[1]]); + let batch_2 = ProvenBatch::mocked_from_transactions([&y_txs[0], &x_txs[1]]); vec![batch_1, batch_2] }; @@ -360,7 +350,7 @@ async fn compute_account_root_success() { .await .unwrap(); - let batches: Vec = { + let batches: Vec = { let txs: Vec<_> = account_ids .iter() .enumerate() @@ -374,8 +364,8 @@ async fn compute_account_root_success() { }) .collect(); - let batch_1 = TransactionBatch::new(&txs[..2], NoteAuthenticationInfo::default()).unwrap(); - let batch_2 = TransactionBatch::new(&txs[2..], NoteAuthenticationInfo::default()).unwrap(); + let batch_1 = ProvenBatch::mocked_from_transactions(&txs[..2]); + let batch_2 = ProvenBatch::mocked_from_transactions(&txs[2..]); vec![batch_1, batch_2] }; @@ -510,7 +500,7 @@ async fn compute_note_root_empty_batches_success() { .await .unwrap(); - let batches: Vec = Vec::new(); + let batches: Vec = Vec::new(); let (block_witness, _) = BlockWitness::new(block_inputs_from_store, &batches).unwrap(); @@ -542,8 +532,8 @@ async fn compute_note_root_empty_notes_success() { .await .unwrap(); - let batches: Vec = { - let batch = TransactionBatch::new(vec![], NoteAuthenticationInfo::default()).unwrap(); + let batches: Vec = { + let batch = ProvenBatch::mocked_from_transactions(vec![]); vec![batch] }; @@ -620,7 +610,7 @@ async fn compute_note_root_success() { .await .unwrap(); - let batches: Vec = { + let batches: Vec = { let txs: Vec<_> = notes_created .iter() .zip(account_ids.iter()) @@ -632,8 +622,8 @@ async fn compute_note_root_success() { }) .collect(); - let batch_1 = TransactionBatch::new(&txs[..2], NoteAuthenticationInfo::default()).unwrap(); - let batch_2 = TransactionBatch::new(&txs[2..], NoteAuthenticationInfo::default()).unwrap(); + let batch_1 = ProvenBatch::mocked_from_transactions(&txs[..2]); + let batch_2 = ProvenBatch::mocked_from_transactions(&txs[2..]); vec![batch_1, batch_2] }; @@ -686,17 +676,17 @@ async fn compute_note_root_success() { /// The transaction batches will contain nullifiers 1 & 2, while the store will contain 2 & 3. #[test] fn block_witness_validation_inconsistent_nullifiers() { - let batches: Vec = { + let batches: Vec = { let batch_1 = { let tx = MockProvenTxBuilder::with_account_index(0).nullifiers_range(0..1).build(); - TransactionBatch::new([&tx], NoteAuthenticationInfo::default()).unwrap() + ProvenBatch::mocked_from_transactions([&tx]) }; let batch_2 = { let tx = MockProvenTxBuilder::with_account_index(1).nullifiers_range(1..2).build(); - TransactionBatch::new([&tx], NoteAuthenticationInfo::default()).unwrap() + ProvenBatch::mocked_from_transactions([&tx]) }; vec![batch_1, batch_2] @@ -713,7 +703,12 @@ fn block_witness_validation_inconsistent_nullifiers() { let accounts = batches .iter() - .flat_map(TransactionBatch::account_initial_states) + .flat_map(|batch| { + batch + .account_updates() + .iter() + .map(|(account_id, update)| (*account_id, update.initial_state_commitment())) + }) .map(|(account_id, hash)| { (account_id, AccountWitness { hash, proof: MerklePath::default() }) }) @@ -765,17 +760,17 @@ fn block_witness_validation_inconsistent_nullifiers() { /// in the transaction #[tokio::test] async fn compute_nullifier_root_empty_success() { - let batches: Vec = { + let batches: Vec = { let batch_1 = { let tx = MockProvenTxBuilder::with_account_index(0).build(); - TransactionBatch::new([&tx], NoteAuthenticationInfo::default()).unwrap() + ProvenBatch::mocked_from_transactions([&tx]) }; let batch_2 = { let tx = MockProvenTxBuilder::with_account_index(1).build(); - TransactionBatch::new([&tx], NoteAuthenticationInfo::default()).unwrap() + ProvenBatch::mocked_from_transactions([&tx]) }; vec![batch_1, batch_2] @@ -783,7 +778,12 @@ async fn compute_nullifier_root_empty_success() { let account_ids: Vec = batches .iter() - .flat_map(TransactionBatch::account_initial_states) + .flat_map(|batch| { + batch + .account_updates() + .iter() + .map(|(account_id, update)| (*account_id, update.initial_state_commitment())) + }) .map(|(account_id, _)| account_id) .collect(); @@ -819,17 +819,17 @@ async fn compute_nullifier_root_empty_success() { /// present in the transaction #[tokio::test] async fn compute_nullifier_root_success() { - let batches: Vec = { + let batches: Vec = { let batch_1 = { let tx = MockProvenTxBuilder::with_account_index(0).nullifiers_range(0..1).build(); - TransactionBatch::new([&tx], NoteAuthenticationInfo::default()).unwrap() + ProvenBatch::mocked_from_transactions([&tx]) }; let batch_2 = { let tx = MockProvenTxBuilder::with_account_index(1).nullifiers_range(1..2).build(); - TransactionBatch::new([&tx], NoteAuthenticationInfo::default()).unwrap() + ProvenBatch::mocked_from_transactions([&tx]) }; vec![batch_1, batch_2] @@ -837,7 +837,12 @@ async fn compute_nullifier_root_success() { let account_ids: Vec = batches .iter() - .flat_map(TransactionBatch::account_initial_states) + .flat_map(|batch| { + batch + .account_updates() + .iter() + .map(|(account_id, update)| (*account_id, update.initial_state_commitment())) + }) .map(|(account_id, _)| account_id) .collect(); diff --git a/crates/block-producer/src/domain/transaction.rs b/crates/block-producer/src/domain/transaction.rs index 10c2f236d..c249709ed 100644 --- a/crates/block-producer/src/domain/transaction.rs +++ b/crates/block-producer/src/domain/transaction.rs @@ -101,6 +101,10 @@ impl AuthenticatedTransaction { self.inner.input_notes().num_notes() } + pub fn reference_block(&self) -> (BlockNumber, Digest) { + (self.inner.block_num(), self.inner.block_ref()) + } + /// Notes which were unauthenticate in the transaction __and__ which were /// not authenticated by the store inputs. pub fn unauthenticated_notes(&self) -> impl Iterator + '_ { @@ -111,6 +115,11 @@ impl AuthenticatedTransaction { .filter(|note_id| !self.notes_authenticated_by_store.contains(note_id)) } + pub fn proven_transaction(&self) -> Arc { + Arc::clone(&self.inner) + } + + #[cfg(test)] pub fn raw_proven_transaction(&self) -> &ProvenTransaction { &self.inner } diff --git a/crates/block-producer/src/errors.rs b/crates/block-producer/src/errors.rs index 785de16a2..f61cf6dc2 100644 --- a/crates/block-producer/src/errors.rs +++ b/crates/block-producer/src/errors.rs @@ -6,9 +6,10 @@ use miden_objects::{ crypto::merkle::MerkleError, note::{NoteId, Nullifier}, transaction::TransactionId, - AccountDeltaError, Digest, + AccountDeltaError, Digest, ProposedBatchError, }; use miden_processor::ExecutionError; +use miden_tx_batch_prover::errors::BatchProveError; use thiserror::Error; use tokio::task::JoinError; @@ -127,25 +128,6 @@ impl From for tonic::Status { /// Error encountered while building a batch. #[derive(Debug, Error)] pub enum BuildBatchError { - #[error("duplicated unauthenticated transaction input note ID in the batch: {0}")] - DuplicateUnauthenticatedNote(NoteId), - - #[error("duplicated transaction output note ID in the batch: {0}")] - DuplicateOutputNote(NoteId), - - #[error("note hashes mismatch for note {id}: (input: {input_hash}, output: {output_hash})")] - NoteHashesMismatch { - id: NoteId, - input_hash: Digest, - output_hash: Digest, - }, - - #[error("failed to merge transaction delta into account {account_id}")] - AccountUpdateError { - account_id: AccountId, - source: AccountDeltaError, - }, - /// We sometimes randomly inject errors into the batch building process to test our failure /// responses. #[error("nothing actually went wrong, failure was injected on purpose")] @@ -156,6 +138,12 @@ pub enum BuildBatchError { #[error("failed to fetch batch inputs from store")] FetchBatchInputsFailed(#[source] StoreError), + + #[error("failed to build proposed transaction batch")] + ProposeBatchError(#[source] ProposedBatchError), + + #[error("failed to prove proposed transaction batch")] + ProveBatchError(#[source] BatchProveError), } // Block prover errors diff --git a/crates/block-producer/src/mempool/batch_graph.rs b/crates/block-producer/src/mempool/batch_graph.rs index 73948f46e..256408c1f 100644 --- a/crates/block-producer/src/mempool/batch_graph.rs +++ b/crates/block-producer/src/mempool/batch_graph.rs @@ -1,12 +1,15 @@ use std::collections::{BTreeMap, BTreeSet}; -use miden_objects::transaction::TransactionId; +use miden_objects::{ + account::AccountId, + batch::{BatchId, ProvenBatch}, + transaction::TransactionId, +}; use super::{ graph::{DependencyGraph, GraphError}, BlockBudget, BudgetStatus, }; -use crate::batch_builder::batch::{BatchId, TransactionBatch}; // BATCH GRAPH // ================================================================================================ @@ -53,7 +56,7 @@ use crate::batch_builder::batch::{BatchId, TransactionBatch}; #[derive(Default, Debug, Clone, PartialEq)] pub struct BatchGraph { /// Tracks the interdependencies between batches. - inner: DependencyGraph, + inner: DependencyGraph, /// Maps each transaction to its batch, allowing for reverse lookups. /// @@ -97,12 +100,12 @@ impl BatchGraph { /// - any parent transactions are _not_ in the graph pub fn insert( &mut self, - transactions: Vec, + transactions: Vec<(TransactionId, AccountId)>, mut parents: BTreeSet, ) -> Result { let duplicates = transactions .iter() - .filter(|tx| self.transactions.contains_key(tx)) + .filter_map(|(tx, _)| self.transactions.contains_key(tx).then_some(tx)) .copied() .collect::>(); if !duplicates.is_empty() { @@ -111,7 +114,7 @@ impl BatchGraph { // Reverse lookup parent batch IDs. Take care to allow for parent transactions within this // batch i.e. internal dependencies. - for tx in &transactions { + for (tx, _) in &transactions { parents.remove(tx); } let parent_batches = parents @@ -124,13 +127,14 @@ impl BatchGraph { }) .collect::>()?; - let id = BatchId::compute(transactions.iter()); + let id = BatchId::from_ids(transactions.iter().copied()); self.inner.insert_pending(id, parent_batches)?; - for tx in transactions.iter().copied() { + for (tx, _) in transactions.iter().copied() { self.transactions.insert(tx, id); } - self.batches.insert(id, transactions); + + self.batches.insert(id, transactions.into_iter().map(|(tx, _)| tx).collect()); Ok(id) } @@ -231,7 +235,7 @@ impl BatchGraph { /// # Errors /// /// Returns an error if the batch is not in the graph or if it was already previously proven. - pub fn submit_proof(&mut self, batch: TransactionBatch) -> Result<(), GraphError> { + pub fn submit_proof(&mut self, batch: ProvenBatch) -> Result<(), GraphError> { self.inner.promote_pending(batch.id(), batch) } @@ -240,7 +244,7 @@ impl BatchGraph { /// /// Note that batch order should be maintained to allow for inter-batch dependencies to be /// correctly resolved. - pub fn select_block(&mut self, mut budget: BlockBudget) -> Vec { + pub fn select_block(&mut self, mut budget: BlockBudget) -> Vec { let mut batches = Vec::with_capacity(budget.batches); while let Some(batch_id) = self.inner.roots().first().copied() { @@ -289,14 +293,14 @@ mod tests { #[test] fn insert_rejects_duplicate_transactions() { let mut rng = Random::with_random_seed(); - let tx_dup = rng.draw_tx_id(); - let tx_non_dup = rng.draw_tx_id(); + let tx_dup = (rng.draw_tx_id(), rng.draw_account_id()); + let tx_non_dup = (rng.draw_tx_id(), rng.draw_account_id()); let mut uut = BatchGraph::default(); uut.insert(vec![tx_dup], BTreeSet::default()).unwrap(); let err = uut.insert(vec![tx_dup, tx_non_dup], BTreeSet::default()).unwrap_err(); - let expected = BatchInsertError::DuplicateTransactions([tx_dup].into()); + let expected = BatchInsertError::DuplicateTransactions([tx_dup.0].into()); assert_eq!(err, expected); } @@ -304,13 +308,13 @@ mod tests { #[test] fn insert_rejects_missing_parents() { let mut rng = Random::with_random_seed(); - let tx = rng.draw_tx_id(); - let missing = rng.draw_tx_id(); + let tx = (rng.draw_tx_id(), rng.draw_account_id()); + let missing = (rng.draw_tx_id(), rng.draw_account_id()); let mut uut = BatchGraph::default(); - let err = uut.insert(vec![tx], [missing].into()).unwrap_err(); - let expected = BatchInsertError::UnknownParentTransaction(missing); + let err = uut.insert(vec![tx], [missing.0].into()).unwrap_err(); + let expected = BatchInsertError::UnknownParentTransaction(missing.0); assert_eq!(err, expected); } @@ -319,11 +323,11 @@ mod tests { fn insert_with_internal_parent_succeeds() { // Ensure that a batch with internal dependencies can be inserted. let mut rng = Random::with_random_seed(); - let parent = rng.draw_tx_id(); - let child = rng.draw_tx_id(); + let parent = (rng.draw_tx_id(), rng.draw_account_id()); + let child = (rng.draw_tx_id(), rng.draw_account_id()); let mut uut = BatchGraph::default(); - uut.insert(vec![parent, child], [parent].into()).unwrap(); + uut.insert(vec![parent, child], [parent.0].into()).unwrap(); } // PURGE_SUBGRAPHS TESTS @@ -334,19 +338,25 @@ mod tests { // Ensure that purge_subgraphs returns both parent and child batches when the parent is // pruned. Further ensure that a disjoint batch is not pruned. let mut rng = Random::with_random_seed(); - let parent_batch_txs = (0..5).map(|_| rng.draw_tx_id()).collect::>(); - let child_batch_txs = (0..5).map(|_| rng.draw_tx_id()).collect::>(); - let disjoint_batch_txs = (0..5).map(|_| rng.draw_tx_id()).collect(); + let parent_batch_txs = + (0..5).map(|_| (rng.draw_tx_id(), rng.draw_account_id())).collect::>(); + let child_batch_txs = + (0..5).map(|_| (rng.draw_tx_id(), rng.draw_account_id())).collect::>(); + let disjoint_batch_txs = + (0..5).map(|_| (rng.draw_tx_id(), rng.draw_account_id())).collect(); let mut uut = BatchGraph::default(); let parent_batch_id = uut.insert(parent_batch_txs.clone(), BTreeSet::default()).unwrap(); let child_batch_id = - uut.insert(child_batch_txs.clone(), [parent_batch_txs[0]].into()).unwrap(); + uut.insert(child_batch_txs.clone(), [parent_batch_txs[0].0].into()).unwrap(); uut.insert(disjoint_batch_txs, BTreeSet::default()).unwrap(); let result = uut.remove_batches([parent_batch_id].into()).unwrap(); - let expected = - [(parent_batch_id, parent_batch_txs), (child_batch_id, child_batch_txs)].into(); + let expected = [ + (parent_batch_id, parent_batch_txs.into_iter().map(|(tx, _)| tx).collect()), + (child_batch_id, child_batch_txs.into_iter().map(|(tx, _)| tx).collect()), + ] + .into(); assert_eq!(result, expected); } diff --git a/crates/block-producer/src/mempool/mod.rs b/crates/block-producer/src/mempool/mod.rs index 08e332324..3eaa40526 100644 --- a/crates/block-producer/src/mempool/mod.rs +++ b/crates/block-producer/src/mempool/mod.rs @@ -4,8 +4,10 @@ use batch_graph::BatchGraph; use graph::GraphError; use inflight_state::InflightState; use miden_objects::{ - block::BlockNumber, transaction::TransactionId, MAX_ACCOUNTS_PER_BATCH, - MAX_INPUT_NOTES_PER_BATCH, MAX_OUTPUT_NOTES_PER_BATCH, + batch::{BatchId, ProvenBatch}, + block::BlockNumber, + transaction::TransactionId, + MAX_ACCOUNTS_PER_BATCH, MAX_INPUT_NOTES_PER_BATCH, MAX_OUTPUT_NOTES_PER_BATCH, }; use tokio::sync::Mutex; use tracing::instrument; @@ -13,10 +15,8 @@ use transaction_expiration::TransactionExpirations; use transaction_graph::TransactionGraph; use crate::{ - batch_builder::batch::{BatchId, TransactionBatch}, - domain::transaction::AuthenticatedTransaction, - errors::AddTransactionError, - COMPONENT, SERVER_MAX_BATCHES_PER_BLOCK, SERVER_MAX_TXS_PER_BATCH, + domain::transaction::AuthenticatedTransaction, errors::AddTransactionError, COMPONENT, + SERVER_MAX_BATCHES_PER_BLOCK, SERVER_MAX_TXS_PER_BATCH, }; mod batch_graph; @@ -114,7 +114,7 @@ impl BlockBudget { /// Returns [`BudgetStatus::Exceeded`] if the batch would exceed the remaining budget, /// otherwise returns [`BudgetStatus::Ok`]. #[must_use] - fn check_then_subtract(&mut self, _batch: &TransactionBatch) -> BudgetStatus { + fn check_then_subtract(&mut self, _batch: &ProvenBatch) -> BudgetStatus { if self.batches == 0 { BudgetStatus::Exceeded } else { @@ -233,7 +233,7 @@ impl Mempool { if batch.is_empty() { return None; } - let tx_ids = batch.iter().map(AuthenticatedTransaction::id).collect::>(); + let tx_ids = batch.iter().map(|tx| (tx.id(), tx.account_id())).collect::>(); let batch_id = self.batches.insert(tx_ids, parents).expect("Selected batch should insert"); @@ -268,7 +268,7 @@ impl Mempool { /// Marks a batch as proven if it exists. #[instrument(target = COMPONENT, skip_all, fields(batch=%batch.id()))] - pub fn batch_proved(&mut self, batch: TransactionBatch) { + pub fn batch_proved(&mut self, batch: ProvenBatch) { // Batch may have been removed as part of a parent batches failure. if !self.batches.contains(&batch.id()) { return; @@ -287,11 +287,11 @@ impl Mempool { /// /// Panics if there is already a block in flight. #[instrument(target = COMPONENT, skip_all)] - pub fn select_block(&mut self) -> (BlockNumber, Vec) { + pub fn select_block(&mut self) -> (BlockNumber, Vec) { assert!(self.block_in_progress.is_none(), "Cannot have two blocks inflight."); let batches = self.batches.select_block(self.block_budget); - self.block_in_progress = Some(batches.iter().map(TransactionBatch::id).collect()); + self.block_in_progress = Some(batches.iter().map(ProvenBatch::id).collect()); (self.chain_tip.child(), batches) } diff --git a/crates/block-producer/src/mempool/tests.rs b/crates/block-producer/src/mempool/tests.rs index 8e93892fb..e7680736f 100644 --- a/crates/block-producer/src/mempool/tests.rs +++ b/crates/block-producer/src/mempool/tests.rs @@ -1,9 +1,8 @@ -use miden_node_proto::domain::note::NoteAuthenticationInfo; use miden_objects::block::BlockNumber; use pretty_assertions::assert_eq; use super::*; -use crate::test_utils::MockProvenTxBuilder; +use crate::test_utils::{batch::TransactionBatchConstructor, MockProvenTxBuilder}; impl Mempool { fn for_tests() -> Self { @@ -48,10 +47,8 @@ fn children_of_failed_batches_are_ignored() { uut.batch_failed(child_batch_a); assert_eq!(uut, reference); - let proof = - TransactionBatch::new([txs[2].raw_proven_transaction()], NoteAuthenticationInfo::default()) - .unwrap(); - uut.batch_proved(proof); + let proven_batch = ProvenBatch::mocked_from_transactions([txs[2].raw_proven_transaction()]); + uut.batch_proved(proven_batch); assert_eq!(uut, reference); } @@ -95,13 +92,9 @@ fn block_commit_reverts_expired_txns() { // Force the tx into a pending block. uut.add_transaction(tx_to_commit.clone()).unwrap(); uut.select_batch().unwrap(); - uut.batch_proved( - TransactionBatch::new( - [tx_to_commit.raw_proven_transaction()], - NoteAuthenticationInfo::default(), - ) - .unwrap(), - ); + uut.batch_proved(ProvenBatch::mocked_from_transactions( + [tx_to_commit.raw_proven_transaction()], + )); let (block, _) = uut.select_block(); // A reverted transaction behaves as if it never existed, the current state is the expected // outcome, plus an extra committed block at the end. @@ -168,13 +161,9 @@ fn block_failure_reverts_its_transactions() { uut.add_transaction(reverted_txs[0].clone()).unwrap(); uut.select_batch().unwrap(); - uut.batch_proved( - TransactionBatch::new( - [reverted_txs[0].raw_proven_transaction()], - NoteAuthenticationInfo::default(), - ) - .unwrap(), - ); + uut.batch_proved(ProvenBatch::mocked_from_transactions([ + reverted_txs[0].raw_proven_transaction() + ])); // Block 1 will contain just the first batch. let (block_number, _) = uut.select_block(); diff --git a/crates/block-producer/src/store/mod.rs b/crates/block-producer/src/store/mod.rs index 2608975b5..1aa2638fd 100644 --- a/crates/block-producer/src/store/mod.rs +++ b/crates/block-producer/src/store/mod.rs @@ -6,13 +6,13 @@ use std::{ use itertools::Itertools; use miden_node_proto::{ - domain::note::NoteAuthenticationInfo, + domain::batch::BatchInputs, errors::{ConversionError, MissingFieldHelper}, generated::{ digest, requests::{ - ApplyBlockRequest, GetBlockHeaderByNumberRequest, GetBlockInputsRequest, - GetNoteAuthenticationInfoRequest, GetTransactionInputsRequest, + ApplyBlockRequest, GetBatchInputsRequest, GetBlockHeaderByNumberRequest, + GetBlockInputsRequest, GetTransactionInputsRequest, }, responses::{GetTransactionInputsResponse, NullifierTransactionInputRecord}, store::api_client as store_client, @@ -212,21 +212,17 @@ impl StoreClient { #[instrument(target = COMPONENT, skip_all, err)] pub async fn get_batch_inputs( &self, + block_references: impl Iterator + Send, notes: impl Iterator + Send, - ) -> Result { - let request = tonic::Request::new(GetNoteAuthenticationInfoRequest { + ) -> Result { + let request = tonic::Request::new(GetBatchInputsRequest { + reference_blocks: block_references.map(|(block_num, _)| block_num.as_u32()).collect(), note_ids: notes.map(digest::Digest::from).collect(), }); - let store_response = - self.inner.clone().get_note_authentication_info(request).await?.into_inner(); + let store_response = self.inner.clone().get_batch_inputs(request).await?.into_inner(); - let note_authentication_info = store_response - .proofs - .ok_or(GetTransactionInputsResponse::missing_field("proofs"))? - .try_into()?; - - Ok(note_authentication_info) + store_response.try_into().map_err(Into::into) } #[instrument(target = COMPONENT, skip_all, err)] diff --git a/crates/block-producer/src/test_utils/batch.rs b/crates/block-producer/src/test_utils/batch.rs index 45346941a..b4caffd28 100644 --- a/crates/block-producer/src/test_utils/batch.rs +++ b/crates/block-producer/src/test_utils/batch.rs @@ -1,8 +1,25 @@ -use miden_node_proto::domain::note::NoteAuthenticationInfo; +use std::collections::BTreeMap; -use crate::{batch_builder::TransactionBatch, test_utils::MockProvenTxBuilder}; +use miden_objects::{ + batch::{BatchAccountUpdate, BatchId, BatchNoteTree, ProvenBatch}, + block::BlockNumber, + transaction::{InputNotes, ProvenTransaction}, +}; + +use crate::test_utils::MockProvenTxBuilder; pub trait TransactionBatchConstructor { + /// Builds a **mocked** [`ProvenBatch`] from the given transactions, which most likely violates + /// some of the rules of actual transaction batches. + /// + /// This builds a mocked version of a proven batch for testing purposes which can be useful if + /// the batch's details don't need to be correct (e.g. if something else is under test but + /// requires a transaction batch). If you need an actual valid [`ProvenBatch`], build a + /// [`ProposedBatch`](miden_objects::batch::ProposedBatch) first and convert (without proving) + /// or prove it into a [`ProvenBatch`]. + fn mocked_from_transactions<'tx>(txs: impl IntoIterator) + -> Self; + /// Returns a `TransactionBatch` with `notes_per_tx.len()` transactions, where the i'th /// transaction has `notes_per_tx[i]` notes created fn from_notes_created(starting_account_index: u32, notes_per_tx: &[u64]) -> Self; @@ -11,7 +28,46 @@ pub trait TransactionBatchConstructor { fn from_txs(starting_account_index: u32, num_txs_in_batch: u64) -> Self; } -impl TransactionBatchConstructor for TransactionBatch { +impl TransactionBatchConstructor for ProvenBatch { + fn mocked_from_transactions<'tx>( + txs: impl IntoIterator, + ) -> Self { + let mut account_updates = BTreeMap::new(); + + let txs: Vec<_> = txs.into_iter().collect(); + let mut input_notes = Vec::new(); + let mut output_notes = Vec::new(); + + for tx in &txs { + // Aggregate account updates. + account_updates + .entry(tx.account_id()) + .and_modify(|update: &mut BatchAccountUpdate| { + update.merge_proven_tx(tx).unwrap(); + }) + .or_insert_with(|| BatchAccountUpdate::from_transaction(tx)); + + // Consider all input notes of all transactions as inputs of the batch, which may not + // always be correct. + input_notes.extend(tx.input_notes().iter().cloned()); + // Consider all outputs notes of all transactions as outputs of the batch, which may not + // always be correct. + output_notes.extend(tx.output_notes().iter().cloned()); + } + + ProvenBatch::new( + BatchId::from_transactions(txs.into_iter()), + account_updates, + InputNotes::new_unchecked(input_notes), + BatchNoteTree::with_contiguous_leaves( + output_notes.iter().map(|x| (x.id(), x.metadata())), + ) + .unwrap(), + output_notes, + BlockNumber::from(u32::MAX), + ) + } + fn from_notes_created(starting_account_index: u32, notes_per_tx: &[u64]) -> Self { let txs: Vec<_> = notes_per_tx .iter() @@ -26,7 +82,7 @@ impl TransactionBatchConstructor for TransactionBatch { }) .collect(); - Self::new(&txs, NoteAuthenticationInfo::default()).unwrap() + Self::mocked_from_transactions(&txs) } fn from_txs(starting_account_index: u32, num_txs_in_batch: u64) -> Self { @@ -38,6 +94,6 @@ impl TransactionBatchConstructor for TransactionBatch { }) .collect(); - Self::new(&txs, NoteAuthenticationInfo::default()).unwrap() + Self::mocked_from_transactions(&txs) } } diff --git a/crates/block-producer/src/test_utils/block.rs b/crates/block-producer/src/test_utils/block.rs index 2a940b1ae..532e314dd 100644 --- a/crates/block-producer/src/test_utils/block.rs +++ b/crates/block-producer/src/test_utils/block.rs @@ -1,6 +1,7 @@ use std::iter; use miden_objects::{ + batch::ProvenBatch, block::{Block, BlockAccountUpdate, BlockHeader, BlockNoteIndex, BlockNoteTree, NoteBatch}, crypto::merkle::{Mmr, SimpleSmt}, note::Nullifier, @@ -10,7 +11,6 @@ use miden_objects::{ use super::MockStoreSuccess; use crate::{ - batch_builder::TransactionBatch, block::BlockInputs, block_builder::prover::{block_witness::BlockWitness, BlockProver}, }; @@ -19,7 +19,7 @@ use crate::{ /// batches to be applied pub async fn build_expected_block_header( store: &MockStoreSuccess, - batches: &[TransactionBatch], + batches: &[ProvenBatch], ) -> BlockHeader { let last_block_header = *store .block_headers @@ -32,11 +32,11 @@ pub async fn build_expected_block_header( // Compute new account root let updated_accounts: Vec<_> = - batches.iter().flat_map(TransactionBatch::updated_accounts).collect(); + batches.iter().flat_map(|batch| batch.account_updates().iter()).collect(); let new_account_root = { let mut store_accounts = store.accounts.read().await.clone(); for (&account_id, update) in updated_accounts { - store_accounts.insert(account_id.into(), update.final_state.into()); + store_accounts.insert(account_id.into(), update.final_state_commitment().into()); } store_accounts.root() @@ -51,7 +51,8 @@ pub async fn build_expected_block_header( store_chain_mmr.peaks().hash_peaks() }; - let note_created_smt = note_created_smt_from_note_batches(block_output_notes(batches.iter())); + let note_created_smt = + note_created_smt_from_note_batches(block_output_notes(batches.iter()).iter()); // Build header BlockHeader::new( @@ -74,12 +75,12 @@ pub async fn build_expected_block_header( /// node pub async fn build_actual_block_header( store: &MockStoreSuccess, - batches: Vec, + batches: Vec, ) -> BlockHeader { let updated_accounts: Vec<_> = - batches.iter().flat_map(TransactionBatch::updated_accounts).collect(); + batches.iter().flat_map(|batch| batch.account_updates().iter()).collect(); let produced_nullifiers: Vec = - batches.iter().flat_map(TransactionBatch::produced_nullifiers).collect(); + batches.iter().flat_map(ProvenBatch::produced_nullifiers).collect(); let block_inputs_from_store: BlockInputs = store .get_block_inputs( @@ -199,7 +200,7 @@ pub(crate) fn note_created_smt_from_note_batches<'a>( } pub(crate) fn block_output_notes<'a>( - batches: impl Iterator + Clone, -) -> impl Iterator + Clone { - batches.map(TransactionBatch::output_notes) + batches: impl Iterator + Clone, +) -> Vec> { + batches.map(|batch| batch.output_notes().to_vec()).collect() } diff --git a/crates/block-producer/src/test_utils/mod.rs b/crates/block-producer/src/test_utils/mod.rs index 28ba4349f..97dfcc5fb 100644 --- a/crates/block-producer/src/test_utils/mod.rs +++ b/crates/block-producer/src/test_utils/mod.rs @@ -3,6 +3,7 @@ use std::sync::Arc; use miden_objects::{ account::AccountId, crypto::rand::{FeltRng, RpoRandomCoin}, + testing::account_id::AccountIdBuilder, transaction::TransactionId, Digest, }; @@ -48,6 +49,10 @@ impl Random { self.0.draw_word().into() } + pub fn draw_account_id(&mut self) -> AccountId { + AccountIdBuilder::new().build_with_rng(&mut self.0) + } + pub fn draw_digest(&mut self) -> Digest { self.0.draw_word().into() } diff --git a/crates/block-producer/src/test_utils/proven_tx.rs b/crates/block-producer/src/test_utils/proven_tx.rs index 3de0fd2c1..4af9a0e5b 100644 --- a/crates/block-producer/src/test_utils/proven_tx.rs +++ b/crates/block-producer/src/test_utils/proven_tx.rs @@ -135,6 +135,7 @@ impl MockProvenTxBuilder { self.account_id, self.initial_account_hash, self.final_account_hash, + BlockNumber::from(0), Digest::default(), self.expiration_block_num, ExecutionProof::new(Proof::new_dummy(), HashFunction::Blake3_192), diff --git a/crates/block-producer/src/test_utils/store.rs b/crates/block-producer/src/test_utils/store.rs index ac8e694af..ecbe1dc67 100644 --- a/crates/block-producer/src/test_utils/store.rs +++ b/crates/block-producer/src/test_utils/store.rs @@ -5,6 +5,7 @@ use std::{ use miden_node_proto::domain::{block::BlockInclusionProof, note::NoteAuthenticationInfo}; use miden_objects::{ + batch::ProvenBatch, block::{Block, BlockHeader, BlockNumber, NoteBatch}, crypto::merkle::{Mmr, SimpleSmt, Smt, ValuePath}, note::{NoteId, NoteInclusionProof, Nullifier}, @@ -15,7 +16,6 @@ use tokio::sync::RwLock; use super::*; use crate::{ - batch_builder::TransactionBatch, block::{AccountWitness, BlockInputs}, errors::StoreError, store::TransactionInputs, @@ -35,20 +35,23 @@ pub struct MockStoreSuccessBuilder { } impl MockStoreSuccessBuilder { - pub fn from_batches<'a>( - batches_iter: impl Iterator + Clone, - ) -> Self { + pub fn from_batches<'a>(batches_iter: impl Iterator + Clone) -> Self { let accounts_smt = { let accounts = batches_iter .clone() - .flat_map(TransactionBatch::account_initial_states) + .flat_map(|batch| { + batch + .account_updates() + .iter() + .map(|(account_id, update)| (account_id, update.initial_state_commitment())) + }) .map(|(account_id, hash)| (account_id.prefix().into(), hash.into())); SimpleSmt::::with_leaves(accounts).unwrap() }; Self { accounts: Some(accounts_smt), - notes: Some(block_output_notes(batches_iter).cloned().collect()), + notes: Some(block_output_notes(batches_iter)), produced_nullifiers: None, chain_mmr: None, block_num: None, diff --git a/crates/proto/src/domain/batch.rs b/crates/proto/src/domain/batch.rs new file mode 100644 index 000000000..2a29247ea --- /dev/null +++ b/crates/proto/src/domain/batch.rs @@ -0,0 +1,53 @@ +use std::collections::BTreeMap; + +use miden_objects::{ + block::BlockHeader, + note::{NoteId, NoteInclusionProof}, + transaction::ChainMmr, + utils::{Deserializable, Serializable}, +}; + +use crate::{ + errors::{ConversionError, MissingFieldHelper}, + generated::responses as proto, +}; + +/// Data required for a transaction batch. +#[derive(Clone, Debug)] +pub struct BatchInputs { + pub batch_reference_block_header: BlockHeader, + pub note_proofs: BTreeMap, + pub chain_mmr: ChainMmr, +} + +impl From for proto::GetBatchInputsResponse { + fn from(inputs: BatchInputs) -> Self { + Self { + batch_reference_block_header: Some(inputs.batch_reference_block_header.into()), + note_proofs: inputs.note_proofs.iter().map(Into::into).collect(), + chain_mmr: inputs.chain_mmr.to_bytes(), + } + } +} + +impl TryFrom for BatchInputs { + type Error = ConversionError; + + fn try_from(response: proto::GetBatchInputsResponse) -> Result { + let result = Self { + batch_reference_block_header: response + .batch_reference_block_header + .ok_or(proto::GetBatchInputsResponse::missing_field("block_header"))? + .try_into()?, + note_proofs: response + .note_proofs + .iter() + .map(<(NoteId, NoteInclusionProof)>::try_from) + .collect::>()?, + chain_mmr: ChainMmr::read_from_bytes(&response.chain_mmr) + .map_err(|source| ConversionError::deserialization_error("ChainMmr", source))?, + }; + + Ok(result) + } +} diff --git a/crates/proto/src/domain/mod.rs b/crates/proto/src/domain/mod.rs index 83959535e..2f7ee28da 100644 --- a/crates/proto/src/domain/mod.rs +++ b/crates/proto/src/domain/mod.rs @@ -1,4 +1,5 @@ pub mod account; +pub mod batch; pub mod block; pub mod digest; pub mod merkle; diff --git a/crates/proto/src/errors.rs b/crates/proto/src/errors.rs index 8af3c59ea..39f72f3e2 100644 --- a/crates/proto/src/errors.rs +++ b/crates/proto/src/errors.rs @@ -1,6 +1,9 @@ use std::{any::type_name, num::TryFromIntError}; -use miden_objects::crypto::merkle::{SmtLeafError, SmtProofError}; +use miden_objects::{ + crypto::merkle::{SmtLeafError, SmtProofError}, + utils::DeserializationError, +}; use thiserror::Error; #[derive(Debug, Error)] @@ -28,6 +31,17 @@ pub enum ConversionError { }, #[error("MMR error")] MmrError(#[from] miden_objects::crypto::merkle::MmrError), + #[error("failed to deserialize {entity}")] + DeserializationError { + entity: &'static str, + source: DeserializationError, + }, +} + +impl ConversionError { + pub fn deserialization_error(entity: &'static str, source: DeserializationError) -> Self { + Self::DeserializationError { entity, source } + } } pub trait MissingFieldHelper { diff --git a/crates/proto/src/generated/requests.rs b/crates/proto/src/generated/requests.rs index a38b5f656..c8e19bb29 100644 --- a/crates/proto/src/generated/requests.rs +++ b/crates/proto/src/generated/requests.rs @@ -92,6 +92,16 @@ pub struct GetBlockInputsRequest { #[prost(message, repeated, tag = "3")] pub unauthenticated_notes: ::prost::alloc::vec::Vec, } +/// Returns the inputs for a transaction batch. +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct GetBatchInputsRequest { + /// List of unauthenticated notes to be queried from the database. + #[prost(message, repeated, tag = "1")] + pub note_ids: ::prost::alloc::vec::Vec, + /// Set of block numbers referenced by transactions. + #[prost(fixed32, repeated, tag = "2")] + pub reference_blocks: ::prost::alloc::vec::Vec, +} /// Returns data required to validate a new transaction. #[derive(Clone, PartialEq, ::prost::Message)] pub struct GetTransactionInputsRequest { @@ -123,13 +133,6 @@ pub struct GetNotesByIdRequest { #[prost(message, repeated, tag = "1")] pub note_ids: ::prost::alloc::vec::Vec, } -/// Returns a list of Note inclusion proofs for the specified Note IDs. -#[derive(Clone, PartialEq, ::prost::Message)] -pub struct GetNoteAuthenticationInfoRequest { - /// List of notes to be queried from the database. - #[prost(message, repeated, tag = "1")] - pub note_ids: ::prost::alloc::vec::Vec, -} /// Returns the latest state of an account with the specified ID. #[derive(Clone, PartialEq, ::prost::Message)] pub struct GetAccountDetailsRequest { diff --git a/crates/proto/src/generated/responses.rs b/crates/proto/src/generated/responses.rs index 5b526ae67..c3a8f5f20 100644 --- a/crates/proto/src/generated/responses.rs +++ b/crates/proto/src/generated/responses.rs @@ -128,6 +128,21 @@ pub struct GetBlockInputsResponse { super::note::NoteAuthenticationInfo, >, } +/// Represents the result of getting batch inputs. +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct GetBatchInputsResponse { + /// The block header that the transaction batch should reference. + #[prost(message, optional, tag = "1")] + pub batch_reference_block_header: ::core::option::Option, + /// Proof of each _found_ unauthenticated note's inclusion in a block. + #[prost(message, repeated, tag = "2")] + pub note_proofs: ::prost::alloc::vec::Vec, + /// The serialized chain MMR which includes proofs for all blocks referenced by the + /// above note inclusion proofs as well as proofs for inclusion of the blocks referenced + /// by the transactions in the batch. + #[prost(bytes = "vec", tag = "3")] + pub chain_mmr: ::prost::alloc::vec::Vec, +} /// An account returned as a response to the `GetTransactionInputs`. #[derive(Clone, PartialEq, ::prost::Message)] pub struct AccountTransactionInputRecord { @@ -178,13 +193,6 @@ pub struct GetNotesByIdResponse { #[prost(message, repeated, tag = "1")] pub notes: ::prost::alloc::vec::Vec, } -/// Represents the result of getting note authentication info. -#[derive(Clone, PartialEq, ::prost::Message)] -pub struct GetNoteAuthenticationInfoResponse { - /// Proofs of note inclusions in blocks and block inclusions in chain. - #[prost(message, optional, tag = "1")] - pub proofs: ::core::option::Option, -} /// Represents the result of getting account details. #[derive(Clone, PartialEq, ::prost::Message)] pub struct GetAccountDetailsResponse { diff --git a/crates/proto/src/generated/store.rs b/crates/proto/src/generated/store.rs index 815a65235..0d9b52f78 100644 --- a/crates/proto/src/generated/store.rs +++ b/crates/proto/src/generated/store.rs @@ -328,14 +328,14 @@ pub mod api_client { req.extensions_mut().insert(GrpcMethod::new("store.Api", "GetBlockInputs")); self.inner.unary(req, path, codec).await } - /// Returns a list of Note inclusion proofs for the specified Note IDs. - pub async fn get_note_authentication_info( + /// Returns the inputs for a transaction batch. + pub async fn get_batch_inputs( &mut self, request: impl tonic::IntoRequest< - super::super::requests::GetNoteAuthenticationInfoRequest, + super::super::requests::GetBatchInputsRequest, >, ) -> std::result::Result< - tonic::Response, + tonic::Response, tonic::Status, > { self.inner @@ -347,12 +347,9 @@ pub mod api_client { ) })?; let codec = tonic::codec::ProstCodec::default(); - let path = http::uri::PathAndQuery::from_static( - "/store.Api/GetNoteAuthenticationInfo", - ); + let path = http::uri::PathAndQuery::from_static("/store.Api/GetBatchInputs"); let mut req = request.into_request(); - req.extensions_mut() - .insert(GrpcMethod::new("store.Api", "GetNoteAuthenticationInfo")); + req.extensions_mut().insert(GrpcMethod::new("store.Api", "GetBatchInputs")); self.inner.unary(req, path, codec).await } /// Returns a list of notes matching the provided note IDs. @@ -565,14 +562,12 @@ pub mod api_server { tonic::Response, tonic::Status, >; - /// Returns a list of Note inclusion proofs for the specified Note IDs. - async fn get_note_authentication_info( + /// Returns the inputs for a transaction batch. + async fn get_batch_inputs( &self, - request: tonic::Request< - super::super::requests::GetNoteAuthenticationInfoRequest, - >, + request: tonic::Request, ) -> std::result::Result< - tonic::Response, + tonic::Response, tonic::Status, >; /// Returns a list of notes matching the provided note IDs. @@ -1140,15 +1135,15 @@ pub mod api_server { }; Box::pin(fut) } - "/store.Api/GetNoteAuthenticationInfo" => { + "/store.Api/GetBatchInputs" => { #[allow(non_camel_case_types)] - struct GetNoteAuthenticationInfoSvc(pub Arc); + struct GetBatchInputsSvc(pub Arc); impl< T: Api, > tonic::server::UnaryService< - super::super::requests::GetNoteAuthenticationInfoRequest, - > for GetNoteAuthenticationInfoSvc { - type Response = super::super::responses::GetNoteAuthenticationInfoResponse; + super::super::requests::GetBatchInputsRequest, + > for GetBatchInputsSvc { + type Response = super::super::responses::GetBatchInputsResponse; type Future = BoxFuture< tonic::Response, tonic::Status, @@ -1156,13 +1151,12 @@ pub mod api_server { fn call( &mut self, request: tonic::Request< - super::super::requests::GetNoteAuthenticationInfoRequest, + super::super::requests::GetBatchInputsRequest, >, ) -> Self::Future { let inner = Arc::clone(&self.0); let fut = async move { - ::get_note_authentication_info(&inner, request) - .await + ::get_batch_inputs(&inner, request).await }; Box::pin(fut) } @@ -1173,7 +1167,7 @@ pub mod api_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { - let method = GetNoteAuthenticationInfoSvc(inner); + let method = GetBatchInputsSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) .apply_compression_config( diff --git a/crates/rpc-proto/proto/requests.proto b/crates/rpc-proto/proto/requests.proto index 1230eac78..f2323c56c 100644 --- a/crates/rpc-proto/proto/requests.proto +++ b/crates/rpc-proto/proto/requests.proto @@ -86,6 +86,14 @@ message GetBlockInputsRequest { repeated digest.Digest unauthenticated_notes = 3; } +// Returns the inputs for a transaction batch. +message GetBatchInputsRequest { + // List of unauthenticated notes to be queried from the database. + repeated digest.Digest note_ids = 1; + // Set of block numbers referenced by transactions. + repeated fixed32 reference_blocks = 2; +} + // Returns data required to validate a new transaction. message GetTransactionInputsRequest { // ID of the account against which a transaction is executed. @@ -112,12 +120,6 @@ message GetNotesByIdRequest { repeated digest.Digest note_ids = 1; } -// Returns a list of Note inclusion proofs for the specified Note IDs. -message GetNoteAuthenticationInfoRequest { - // List of notes to be queried from the database. - repeated digest.Digest note_ids = 1; -} - // Returns the latest state of an account with the specified ID. message GetAccountDetailsRequest { // Account ID to get details. diff --git a/crates/rpc-proto/proto/responses.proto b/crates/rpc-proto/proto/responses.proto index 36e175d1e..f1dfe5f90 100644 --- a/crates/rpc-proto/proto/responses.proto +++ b/crates/rpc-proto/proto/responses.proto @@ -128,6 +128,20 @@ message GetBlockInputsResponse { note.NoteAuthenticationInfo found_unauthenticated_notes = 5; } +// Represents the result of getting batch inputs. +message GetBatchInputsResponse { + // The block header that the transaction batch should reference. + block.BlockHeader batch_reference_block_header = 1; + + // Proof of each _found_ unauthenticated note's inclusion in a block. + repeated note.NoteInclusionInBlockProof note_proofs = 2; + + // The serialized chain MMR which includes proofs for all blocks referenced by the + // above note inclusion proofs as well as proofs for inclusion of the blocks referenced + // by the transactions in the batch. + bytes chain_mmr = 3; +} + // An account returned as a response to the `GetTransactionInputs`. message AccountTransactionInputRecord { // The account ID. @@ -173,12 +187,6 @@ message GetNotesByIdResponse { repeated note.Note notes = 1; } -// Represents the result of getting note authentication info. -message GetNoteAuthenticationInfoResponse { - // Proofs of note inclusions in blocks and block inclusions in chain. - note.NoteAuthenticationInfo proofs = 1; -} - // Represents the result of getting account details. message GetAccountDetailsResponse { // Account info (with details for public accounts). diff --git a/crates/rpc-proto/proto/store.proto b/crates/rpc-proto/proto/store.proto index 0562b8c54..7137121dc 100644 --- a/crates/rpc-proto/proto/store.proto +++ b/crates/rpc-proto/proto/store.proto @@ -39,8 +39,8 @@ service Api { // Returns data required to prove the next block. rpc GetBlockInputs(requests.GetBlockInputsRequest) returns (responses.GetBlockInputsResponse) {} - // Returns a list of Note inclusion proofs for the specified Note IDs. - rpc GetNoteAuthenticationInfo(requests.GetNoteAuthenticationInfoRequest) returns (responses.GetNoteAuthenticationInfoResponse) {} + // Returns the inputs for a transaction batch. + rpc GetBatchInputs(requests.GetBatchInputsRequest) returns (responses.GetBatchInputsResponse) {} // Returns a list of notes matching the provided note IDs. rpc GetNotesById(requests.GetNotesByIdRequest) returns (responses.GetNotesByIdResponse) {} diff --git a/crates/rpc/src/server/api.rs b/crates/rpc/src/server/api.rs index 976ed4fc2..e9fe29a2a 100644 --- a/crates/rpc/src/server/api.rs +++ b/crates/rpc/src/server/api.rs @@ -182,7 +182,7 @@ impl api_server::Api for RpcApi { let tx_verifier = TransactionVerifier::new(MIN_PROOF_SECURITY_LEVEL); - tx_verifier.verify(tx.clone()).map_err(|err| { + tx_verifier.verify(&tx).map_err(|err| { Status::invalid_argument(format!("Invalid proof for transaction {}: {err}", tx.id())) })?; diff --git a/crates/store/src/errors.rs b/crates/store/src/errors.rs index 462341bf5..2c6fa9e36 100644 --- a/crates/store/src/errors.rs +++ b/crates/store/src/errors.rs @@ -244,11 +244,11 @@ pub enum GetBlockInputsError { NoteInclusionMmr(#[from] MmrError), } -impl From for GetBlockInputsError { - fn from(value: GetNoteInclusionProofError) -> Self { +impl From for GetBlockInputsError { + fn from(value: GetNoteAuthenticationInfoError) -> Self { match value { - GetNoteInclusionProofError::DatabaseError(db_err) => db_err.into(), - GetNoteInclusionProofError::MmrError(mmr_err) => Self::NoteInclusionMmr(mmr_err), + GetNoteAuthenticationInfoError::DatabaseError(db_err) => db_err.into(), + GetNoteAuthenticationInfoError::MmrError(mmr_err) => Self::NoteInclusionMmr(mmr_err), } } } @@ -274,9 +274,24 @@ pub enum NoteSyncError { } #[derive(Error, Debug)] -pub enum GetNoteInclusionProofError { +pub enum GetNoteAuthenticationInfoError { #[error("database error")] DatabaseError(#[from] DatabaseError), #[error("Mmr error")] MmrError(#[from] MmrError), } + +#[derive(Error, Debug)] +pub enum GetBatchInputsError { + #[error("failed to select note inclusion proofs")] + SelectNoteInclusionProofError(#[source] DatabaseError), + #[error("failed to select block headers")] + SelectBlockHeaderError(#[source] DatabaseError), + #[error("set of blocks refernced by transactions is empty")] + TransactionBlockReferencesEmpty, + #[error("highest block number {highest_block_num} referenced by a transaction is newer than the latest block {latest_block_num}")] + TransactionBlockReferenceNewerThanLatestBlock { + highest_block_num: BlockNumber, + latest_block_num: BlockNumber, + }, +} diff --git a/crates/store/src/server/api.rs b/crates/store/src/server/api.rs index 9cb196861..84a0ff33e 100644 --- a/crates/store/src/server/api.rs +++ b/crates/store/src/server/api.rs @@ -1,28 +1,24 @@ -use std::{collections::BTreeSet, sync::Arc}; +use std::{collections::BTreeSet, convert::Infallible, sync::Arc}; use miden_node_proto::{ convert, - domain::{ - account::{AccountInfo, AccountProofRequest}, - note::NoteAuthenticationInfo, - }, + domain::account::{AccountInfo, AccountProofRequest}, errors::ConversionError, generated::{ self, account::AccountSummary, - note::NoteAuthenticationInfo as NoteAuthenticationInfoProto, requests::{ ApplyBlockRequest, CheckNullifiersByPrefixRequest, CheckNullifiersRequest, GetAccountDetailsRequest, GetAccountProofsRequest, GetAccountStateDeltaRequest, - GetBlockByNumberRequest, GetBlockHeaderByNumberRequest, GetBlockInputsRequest, - GetNoteAuthenticationInfoRequest, GetNotesByIdRequest, GetTransactionInputsRequest, + GetBatchInputsRequest, GetBlockByNumberRequest, GetBlockHeaderByNumberRequest, + GetBlockInputsRequest, GetNotesByIdRequest, GetTransactionInputsRequest, SyncNoteRequest, SyncStateRequest, }, responses::{ AccountTransactionInputRecord, ApplyBlockResponse, CheckNullifiersByPrefixResponse, CheckNullifiersResponse, GetAccountDetailsResponse, GetAccountProofsResponse, - GetAccountStateDeltaResponse, GetBlockByNumberResponse, GetBlockHeaderByNumberResponse, - GetBlockInputsResponse, GetNoteAuthenticationInfoResponse, GetNotesByIdResponse, + GetAccountStateDeltaResponse, GetBatchInputsResponse, GetBlockByNumberResponse, + GetBlockHeaderByNumberResponse, GetBlockInputsResponse, GetNotesByIdResponse, GetTransactionInputsResponse, NullifierTransactionInputRecord, NullifierUpdate, SyncNoteResponse, SyncStateResponse, }, @@ -279,42 +275,6 @@ impl api_server::Api for StoreApi { Ok(Response::new(GetNotesByIdResponse { notes })) } - /// Returns the inclusion proofs of the specified notes. - #[instrument( - target = COMPONENT, - name = "store:get_note_inclusion_proofs", - skip_all, - ret(level = "debug"), - err - )] - async fn get_note_authentication_info( - &self, - request: Request, - ) -> Result, Status> { - info!(target: COMPONENT, ?request); - - let note_ids = request.into_inner().note_ids; - - let note_ids: Vec = try_convert(note_ids) - .map_err(|err| Status::invalid_argument(format!("Invalid NoteId: {err}")))?; - - let note_ids = note_ids.into_iter().map(From::from).collect(); - - let NoteAuthenticationInfo { block_proofs, note_proofs } = self - .state - .get_note_authentication_info(note_ids) - .await - .map_err(internal_error)?; - - // Massage into shape required by protobuf - let note_proofs = note_proofs.iter().map(Into::into).collect(); - let block_proofs = block_proofs.into_iter().map(Into::into).collect(); - - Ok(Response::new(GetNoteAuthenticationInfoResponse { - proofs: Some(NoteAuthenticationInfoProto { note_proofs, block_proofs }), - })) - } - /// Returns details for public (public) account by id. #[instrument( target = COMPONENT, @@ -402,6 +362,39 @@ impl api_server::Api for StoreApi { .map_err(internal_error) } + /// Fetches the inputs for a transaction batch from the database. + /// + /// See [`State::get_batch_inputs`] for details. + #[instrument( + target = COMPONENT, + name = "store:get_batch_inputs", + skip_all, + ret(level = "debug"), + err + )] + async fn get_batch_inputs( + &self, + request: Request, + ) -> Result, Status> { + let request = request.into_inner(); + + let note_ids: Vec = try_convert(request.note_ids) + .map_err(|err| Status::invalid_argument(format!("Invalid NoteId: {err}")))?; + let note_ids = note_ids.into_iter().map(NoteId::from).collect(); + + let reference_blocks: Vec = + try_convert::<_, Infallible, _, _, _>(request.reference_blocks) + .expect("operation should be infallible"); + let reference_blocks = reference_blocks.into_iter().map(BlockNumber::from).collect(); + + self.state + .get_batch_inputs(reference_blocks, note_ids) + .await + .map(Into::into) + .map(Response::new) + .map_err(internal_error) + } + #[instrument( target = COMPONENT, name = "store:get_transaction_inputs", diff --git a/crates/store/src/state.rs b/crates/store/src/state.rs index f3ae2dc99..a6d07fc55 100644 --- a/crates/store/src/state.rs +++ b/crates/store/src/state.rs @@ -13,6 +13,7 @@ use miden_node_proto::{ convert, domain::{ account::{AccountInfo, AccountProofRequest, StorageMapKeysProof}, + batch::BatchInputs, block::BlockInclusionProof, note::NoteAuthenticationInfo, }, @@ -28,11 +29,12 @@ use miden_objects::{ crypto::{ hash::rpo::RpoDigest, merkle::{ - LeafIndex, Mmr, MmrDelta, MmrError, MmrPeaks, MmrProof, SimpleSmt, SmtProof, ValuePath, + LeafIndex, Mmr, MmrDelta, MmrError, MmrPeaks, MmrProof, PartialMmr, SimpleSmt, + SmtProof, ValuePath, }, }, note::{NoteId, Nullifier}, - transaction::OutputNote, + transaction::{ChainMmr, OutputNote}, utils::Serializable, AccountError, ACCOUNT_TREE_DEPTH, }; @@ -46,9 +48,9 @@ use crate::{ blocks::BlockStore, db::{Db, NoteRecord, NoteSyncUpdate, NullifierInfo, StateSyncUpdate}, errors::{ - ApplyBlockError, DatabaseError, GetBlockHeaderError, GetBlockInputsError, - GetNoteInclusionProofError, InvalidBlockError, NoteSyncError, StateInitializationError, - StateSyncError, + ApplyBlockError, DatabaseError, GetBatchInputsError, GetBlockHeaderError, + GetBlockInputsError, GetNoteAuthenticationInfoError, InvalidBlockError, NoteSyncError, + StateInitializationError, StateSyncError, }, nullifier_tree::NullifierTree, COMPONENT, @@ -438,8 +440,8 @@ impl State { pub async fn get_note_authentication_info( &self, note_ids: BTreeSet, - ) -> Result { - // First we grab block-inclusion proofs for the known notes. These proofs only + ) -> Result { + // First we grab note inclusion proofs for the known notes. These proofs only // prove that the note was included in a given block. We then also need to prove that // each of those blocks is included in the chain. let note_proofs = self.db.select_note_inclusion_proofs(note_ids).await?; @@ -494,6 +496,145 @@ impl State { Ok(NoteAuthenticationInfo { block_proofs, note_proofs }) } + /// Fetches the inputs for a transaction batch from the database. + /// + /// ## Inputs + /// + /// The function takes as input: + /// - The tx reference blocks are the set of blocks referenced by transactions in the batch. + /// - The unauthenticated note ids are the set of IDs of unauthenticated notes consumed by all + /// transactions in the batch. For these notes, we attempt to find note inclusion proofs. Not + /// all notes will exist in the DB necessarily, as some notes can be created and consumed + /// within the same batch. + /// + /// ## Outputs + /// + /// The function will return: + /// - A block inclusion proof for all tx reference blocks and for all blocks which are + /// referenced by a note inclusion proof. + /// - Note inclusion proofs for all notes that were found in the DB. + /// - The block header that the batch should reference, i.e. the latest known block. + pub async fn get_batch_inputs( + &self, + tx_reference_blocks: BTreeSet, + unauthenticated_note_ids: BTreeSet, + ) -> Result { + if tx_reference_blocks.is_empty() { + return Err(GetBatchInputsError::TransactionBlockReferencesEmpty); + } + + // First we grab note inclusion proofs for the known notes. These proofs only + // prove that the note was included in a given block. We then also need to prove that + // each of those blocks is included in the chain. + let note_proofs = self + .db + .select_note_inclusion_proofs(unauthenticated_note_ids) + .await + .map_err(GetBatchInputsError::SelectNoteInclusionProofError)?; + + // The set of blocks that the notes are included in. + let note_blocks = note_proofs.values().map(|proof| proof.location().block_num()); + + // Collect all blocks we need to query without duplicates, which is: + // - all blocks for which we need to prove note inclusion. + // - all blocks referenced by transactions in the batch. + let mut blocks = tx_reference_blocks; + blocks.extend(note_blocks); + + // Grab the block merkle paths from the inner state. + // + // NOTE: Scoped block to automatically drop the mutex guard asap. + // + // We also avoid accessing the db in the block as this would delay + // dropping the guard. + let (batch_reference_block, partial_mmr) = { + let state = self.inner.read().await; + let latest_block_num = state.latest_block_num(); + + let highest_block_num = + *blocks.last().expect("we should have checked for empty block references"); + if highest_block_num > latest_block_num { + return Err(GetBatchInputsError::TransactionBlockReferenceNewerThanLatestBlock { + highest_block_num, + latest_block_num, + }); + } + + // Remove the latest block from the to-be-tracked blocks as it will be the reference + // block for the batch itself and thus added to the MMR within the batch kernel, so + // there is no need to prove its inclusion. + blocks.remove(&latest_block_num); + + // Using latest block as the target forest means we take the state of the MMR one before + // the latest block. This is because the latest block will be used as the reference + // block of the batch and will be added to the MMR by the batch kernel. + let target_forest = latest_block_num.as_usize(); + let peaks = state + .chain_mmr + .peaks_at(target_forest) + .expect("target_forest should be smaller than forest of the chain mmr"); + let mut partial_mmr = PartialMmr::from_peaks(peaks); + + for block_num in blocks.iter().map(BlockNumber::as_usize) { + // SAFETY: We have ensured block nums are less than chain length. + let leaf = state + .chain_mmr + .get(block_num) + .expect("block num less than chain length should exist in chain mmr"); + let path = state + .chain_mmr + .open_at(block_num, target_forest) + .expect("block num and target forest should be valid for this mmr") + .merkle_path; + // SAFETY: We should be able to fill the partial MMR with data from the chain MMR + // without errors, otherwise it indicates the chain mmr is invalid. + partial_mmr + .track(block_num, leaf, &path) + .expect("filling partial mmr with data from mmr should succeed"); + } + + (latest_block_num, partial_mmr) + }; + + // TODO: Unnecessary conversion. We should change the select_block_headers function to take + // an impl Iterator instead to avoid this allocation. + let mut blocks: Vec<_> = blocks.into_iter().collect(); + // Fetch the reference block of the batch as part of this query, so we can avoid looking it + // up in a separate DB access. + blocks.push(batch_reference_block); + let mut headers = self + .db + .select_block_headers(blocks) + .await + .map_err(GetBatchInputsError::SelectBlockHeaderError)?; + + // Find and remove the batch reference block as we don't want to add it to the chain MMR. + let header_index = headers + .iter() + .enumerate() + .find_map(|(index, header)| { + (header.block_num() == batch_reference_block).then_some(index) + }) + .expect("DB should have returned the header of the batch reference block"); + + // The order doesn't matter for ChainMmr::new, so swap remove is fine. + let batch_reference_block_header = headers.swap_remove(header_index); + + // SAFETY: This should not error because: + // - we're passing exactly the block headers that we've added to the partial MMR, + // - so none of the block headers block numbers should exceed the chain length of the + // partial MMR, + // - and we've added blocks to a BTreeSet, so there can be no duplicates. + let chain_mmr = ChainMmr::new(partial_mmr, headers) + .expect("partial mmr and block headers should be consistent"); + + Ok(BatchInputs { + batch_reference_block_header, + note_proofs, + chain_mmr, + }) + } + /// Loads data to synchronize a client. /// /// The client's request contains a list of tag prefixes, this method will return the first diff --git a/proto/requests.proto b/proto/requests.proto index 1230eac78..f2323c56c 100644 --- a/proto/requests.proto +++ b/proto/requests.proto @@ -86,6 +86,14 @@ message GetBlockInputsRequest { repeated digest.Digest unauthenticated_notes = 3; } +// Returns the inputs for a transaction batch. +message GetBatchInputsRequest { + // List of unauthenticated notes to be queried from the database. + repeated digest.Digest note_ids = 1; + // Set of block numbers referenced by transactions. + repeated fixed32 reference_blocks = 2; +} + // Returns data required to validate a new transaction. message GetTransactionInputsRequest { // ID of the account against which a transaction is executed. @@ -112,12 +120,6 @@ message GetNotesByIdRequest { repeated digest.Digest note_ids = 1; } -// Returns a list of Note inclusion proofs for the specified Note IDs. -message GetNoteAuthenticationInfoRequest { - // List of notes to be queried from the database. - repeated digest.Digest note_ids = 1; -} - // Returns the latest state of an account with the specified ID. message GetAccountDetailsRequest { // Account ID to get details. diff --git a/proto/responses.proto b/proto/responses.proto index 36e175d1e..f1dfe5f90 100644 --- a/proto/responses.proto +++ b/proto/responses.proto @@ -128,6 +128,20 @@ message GetBlockInputsResponse { note.NoteAuthenticationInfo found_unauthenticated_notes = 5; } +// Represents the result of getting batch inputs. +message GetBatchInputsResponse { + // The block header that the transaction batch should reference. + block.BlockHeader batch_reference_block_header = 1; + + // Proof of each _found_ unauthenticated note's inclusion in a block. + repeated note.NoteInclusionInBlockProof note_proofs = 2; + + // The serialized chain MMR which includes proofs for all blocks referenced by the + // above note inclusion proofs as well as proofs for inclusion of the blocks referenced + // by the transactions in the batch. + bytes chain_mmr = 3; +} + // An account returned as a response to the `GetTransactionInputs`. message AccountTransactionInputRecord { // The account ID. @@ -173,12 +187,6 @@ message GetNotesByIdResponse { repeated note.Note notes = 1; } -// Represents the result of getting note authentication info. -message GetNoteAuthenticationInfoResponse { - // Proofs of note inclusions in blocks and block inclusions in chain. - note.NoteAuthenticationInfo proofs = 1; -} - // Represents the result of getting account details. message GetAccountDetailsResponse { // Account info (with details for public accounts). diff --git a/proto/store.proto b/proto/store.proto index 0562b8c54..7137121dc 100644 --- a/proto/store.proto +++ b/proto/store.proto @@ -39,8 +39,8 @@ service Api { // Returns data required to prove the next block. rpc GetBlockInputs(requests.GetBlockInputsRequest) returns (responses.GetBlockInputsResponse) {} - // Returns a list of Note inclusion proofs for the specified Note IDs. - rpc GetNoteAuthenticationInfo(requests.GetNoteAuthenticationInfoRequest) returns (responses.GetNoteAuthenticationInfoResponse) {} + // Returns the inputs for a transaction batch. + rpc GetBatchInputs(requests.GetBatchInputsRequest) returns (responses.GetBatchInputsResponse) {} // Returns a list of notes matching the provided note IDs. rpc GetNotesById(requests.GetNotesByIdRequest) returns (responses.GetNotesByIdResponse) {} From 018dde3cc92958a3111989def5c2dc7735cf6876 Mon Sep 17 00:00:00 2001 From: Mirko <48352201+Mirko-von-Leipzig@users.noreply.github.com> Date: Thu, 6 Feb 2025 18:28:55 +0200 Subject: [PATCH 09/27] feat: remote tracing context (#669) --- CHANGELOG.md | 1 + Cargo.lock | 1 + crates/block-producer/src/server.rs | 15 ++-- crates/block-producer/src/store/mod.rs | 10 ++- crates/rpc/src/server/api.rs | 22 +++-- crates/store/src/server/mod.rs | 1 + crates/utils/Cargo.toml | 1 + crates/utils/src/errors.rs | 3 + crates/utils/src/lib.rs | 1 + crates/utils/src/tracing/grpc.rs | 116 +++++++++++++++++++++++++ crates/utils/src/tracing/mod.rs | 1 + 11 files changed, 158 insertions(+), 14 deletions(-) create mode 100644 crates/utils/src/tracing/grpc.rs create mode 100644 crates/utils/src/tracing/mod.rs diff --git a/CHANGELOG.md b/CHANGELOG.md index 1637ec794..65a7ef2c4 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -5,6 +5,7 @@ ### Enhancements - Add an optional open-telemetry trace exporter (#659). +- Support tracing across gRPC boundaries using remote tracing context (#669). ### Changes diff --git a/Cargo.lock b/Cargo.lock index 5e6116ea9..e9eac679d 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1957,6 +1957,7 @@ version = "0.8.0" dependencies = [ "anyhow", "figment", + "http", "itertools 0.14.0", "miden-objects", "opentelemetry", diff --git a/crates/block-producer/src/server.rs b/crates/block-producer/src/server.rs index 2eaf0e506..251d0b339 100644 --- a/crates/block-producer/src/server.rs +++ b/crates/block-producer/src/server.rs @@ -7,6 +7,7 @@ use miden_node_proto::generated::{ use miden_node_utils::{ errors::ApiError, formatting::{format_input_notes, format_output_notes}, + tracing::grpc::OtelInterceptor, }; use miden_objects::{ block::BlockNumber, transaction::ProvenTransaction, utils::serde::Deserializable, @@ -52,11 +53,14 @@ impl BlockProducer { pub async fn init(config: BlockProducerConfig) -> Result { info!(target: COMPONENT, %config, "Initializing server"); - let store = StoreClient::new( - store_client::ApiClient::connect(config.store_url.to_string()) - .await - .map_err(|err| ApiError::DatabaseConnectionFailed(err.to_string()))?, - ); + let channel = tonic::transport::Endpoint::try_from(config.store_url.to_string()) + .map_err(|err| ApiError::InvalidStoreUrl(err.to_string()))? + .connect() + .await + .map_err(|err| ApiError::DatabaseConnectionFailed(err.to_string()))?; + + let store = store_client::ApiClient::with_interceptor(channel, OtelInterceptor); + let store = StoreClient::new(store); let latest_header = store .latest_header() @@ -208,6 +212,7 @@ impl BlockProducerRpcServer { async fn serve(self, listener: TcpListener) -> Result<(), tonic::transport::Error> { tonic::transport::Server::builder() + .trace_fn(miden_node_utils::tracing::grpc::block_producer_trace_fn) .add_service(api_server::ApiServer::new(self)) .serve_with_incoming(TcpListenerStream::new(listener)) .await diff --git a/crates/block-producer/src/store/mod.rs b/crates/block-producer/src/store/mod.rs index 1aa2638fd..2c4a36fe9 100644 --- a/crates/block-producer/src/store/mod.rs +++ b/crates/block-producer/src/store/mod.rs @@ -19,7 +19,7 @@ use miden_node_proto::{ }, AccountState, }; -use miden_node_utils::formatting::format_opt; +use miden_node_utils::{formatting::format_opt, tracing::grpc::OtelInterceptor}; use miden_objects::{ account::AccountId, block::{Block, BlockHeader, BlockNumber}, @@ -29,7 +29,7 @@ use miden_objects::{ Digest, }; use miden_processor::crypto::RpoDigest; -use tonic::transport::Channel; +use tonic::{service::interceptor::InterceptedService, transport::Channel}; use tracing::{debug, info, instrument}; use crate::{block::BlockInputs, errors::StoreError, COMPONENT}; @@ -121,17 +121,19 @@ impl TryFrom for TransactionInputs { // STORE CLIENT // ================================================================================================ +type InnerClient = store_client::ApiClient>; + /// Interface to the store's gRPC API. /// /// Essentially just a thin wrapper around the generated gRPC client which improves type safety. #[derive(Clone)] pub struct StoreClient { - inner: store_client::ApiClient, + inner: InnerClient, } impl StoreClient { /// TODO: this should probably take store connection string and create a connection internally - pub fn new(store: store_client::ApiClient) -> Self { + pub fn new(store: InnerClient) -> Self { Self { inner: store } } diff --git a/crates/rpc/src/server/api.rs b/crates/rpc/src/server/api.rs index e9fe29a2a..fbb233eb4 100644 --- a/crates/rpc/src/server/api.rs +++ b/crates/rpc/src/server/api.rs @@ -18,12 +18,14 @@ use miden_node_proto::{ }, try_convert, }; +use miden_node_utils::tracing::grpc::OtelInterceptor; use miden_objects::{ account::AccountId, crypto::hash::rpo::RpoDigest, transaction::ProvenTransaction, utils::serde::Deserializable, Digest, MAX_NUM_FOREIGN_ACCOUNTS, MIN_PROOF_SECURITY_LEVEL, }; use miden_tx::TransactionVerifier; use tonic::{ + service::interceptor::InterceptedService, transport::{Channel, Error}, Request, Response, Status, }; @@ -34,19 +36,29 @@ use crate::{config::RpcConfig, COMPONENT}; // RPC API // ================================================================================================ +type StoreClient = store_client::ApiClient>; +type BlockProducerClient = + block_producer_client::ApiClient>; + pub struct RpcApi { - store: store_client::ApiClient, - block_producer: block_producer_client::ApiClient, + store: StoreClient, + block_producer: BlockProducerClient, } impl RpcApi { pub(super) async fn from_config(config: &RpcConfig) -> Result { - let store = store_client::ApiClient::connect(config.store_url.to_string()).await?; + let channel = tonic::transport::Endpoint::try_from(config.store_url.to_string())? + .connect() + .await?; + let store = store_client::ApiClient::with_interceptor(channel, OtelInterceptor); info!(target: COMPONENT, store_endpoint = config.store_url.as_str(), "Store client initialized"); + let channel = tonic::transport::Endpoint::try_from(config.block_producer_url.to_string())? + .connect() + .await?; let block_producer = - block_producer_client::ApiClient::connect(config.block_producer_url.to_string()) - .await?; + block_producer_client::ApiClient::with_interceptor(channel, OtelInterceptor); + info!( target: COMPONENT, block_producer_endpoint = config.block_producer_url.as_str(), diff --git a/crates/store/src/server/mod.rs b/crates/store/src/server/mod.rs index 6ef429d08..2b65a1dc2 100644 --- a/crates/store/src/server/mod.rs +++ b/crates/store/src/server/mod.rs @@ -62,6 +62,7 @@ impl Store { /// Note: this blocks until the server dies. pub async fn serve(self) -> Result<(), ApiError> { tonic::transport::Server::builder() + .trace_fn(miden_node_utils::tracing::grpc::store_trace_fn) .add_service(self.api_service) .serve_with_incoming(TcpListenerStream::new(self.listener)) .await diff --git a/crates/utils/Cargo.toml b/crates/utils/Cargo.toml index c37684c8f..b411cd291 100644 --- a/crates/utils/Cargo.toml +++ b/crates/utils/Cargo.toml @@ -21,6 +21,7 @@ vergen = ["dep:vergen", "dep:vergen-gitcl"] [dependencies] anyhow = { version = "1.0" } figment = { version = "0.10", features = ["env", "toml"] } +http = "1.2" itertools = { workspace = true } miden-objects = { workspace = true } opentelemetry = "0.27" diff --git a/crates/utils/src/errors.rs b/crates/utils/src/errors.rs index 283da7c5b..0bed649e8 100644 --- a/crates/utils/src/errors.rs +++ b/crates/utils/src/errors.rs @@ -21,4 +21,7 @@ pub enum ApiError { #[error("connection to the database has failed: {0}")] DatabaseConnectionFailed(String), + + #[error("parsing store url failed: {0}")] + InvalidStoreUrl(String), } diff --git a/crates/utils/src/lib.rs b/crates/utils/src/lib.rs index 275ee7e35..8c71955d5 100644 --- a/crates/utils/src/lib.rs +++ b/crates/utils/src/lib.rs @@ -3,4 +3,5 @@ pub mod crypto; pub mod errors; pub mod formatting; pub mod logging; +pub mod tracing; pub mod version; diff --git a/crates/utils/src/tracing/grpc.rs b/crates/utils/src/tracing/grpc.rs new file mode 100644 index 000000000..1e951af12 --- /dev/null +++ b/crates/utils/src/tracing/grpc.rs @@ -0,0 +1,116 @@ +use tracing_opentelemetry::OpenTelemetrySpanExt; + +/// A [`trace_fn`](tonic::transport::server::Server) implementation for the block producer which +/// adds open-telemetry information to the span. +/// +/// Creates an `info` span following the open-telemetry standard: `block-producer.rpc/{method}`. +/// Additionally also pulls in remote tracing context which allows the server trace to be connected +/// to the client's origin trace. +pub fn block_producer_trace_fn(request: &http::Request<()>) -> tracing::Span { + let span = if let Some("SubmitProvenTransaction") = request.uri().path().rsplit('/').next() { + tracing::info_span!("block-producer.rpc/SubmitProvenTransaction") + } else { + tracing::info_span!("block-producer.rpc/Unknown") + }; + + add_otel_span_attributes(span, request) +} + +/// A [`trace_fn`](tonic::transport::server::Server) implementation for the store which adds +/// open-telemetry information to the span. +/// +/// Creates an `info` span following the open-telemetry standard: `store.rpc/{method}`. Additionally +/// also pulls in remote tracing context which allows the server trace to be connected to the +/// client's origin trace. +pub fn store_trace_fn(request: &http::Request<()>) -> tracing::Span { + let span = match request.uri().path().rsplit('/').next() { + Some("ApplyBlock") => tracing::info_span!("store.rpc/ApplyBlock"), + Some("CheckNullifiers") => tracing::info_span!("store.rpc/CheckNullifiers"), + Some("CheckNullifiersByPrefix") => tracing::info_span!("store.rpc/CheckNullifiersByPrefix"), + Some("GetAccountDetails") => tracing::info_span!("store.rpc/GetAccountDetails"), + Some("GetAccountProofs") => tracing::info_span!("store.rpc/GetAccountProofs"), + Some("GetAccountStateDelta") => tracing::info_span!("store.rpc/GetAccountStateDelta"), + Some("GetBlockByNumber") => tracing::info_span!("store.rpc/GetBlockByNumber"), + Some("GetBlockHeaderByNumber") => tracing::info_span!("store.rpc/GetBlockHeaderByNumber"), + Some("GetBlockInputs") => tracing::info_span!("store.rpc/GetBlockInputs"), + Some("GetBatchInputs") => tracing::info_span!("store.rpc/GetBatchInputs"), + Some("GetNotesById") => tracing::info_span!("store.rpc/GetNotesById"), + Some("GetTransactionInputs") => tracing::info_span!("store.rpc/GetTransactionInputs"), + Some("SyncNotes") => tracing::info_span!("store.rpc/SyncNotes"), + Some("SyncState") => tracing::info_span!("store.rpc/SyncState"), + _ => tracing::info_span!("store.rpc/Unknown"), + }; + + add_otel_span_attributes(span, request) +} + +/// Adds remote tracing context to the span. +/// +/// Could be expanded in the future by adding in more open-telemetry properties. +fn add_otel_span_attributes(span: tracing::Span, request: &http::Request<()>) -> tracing::Span { + // Pull the open-telemetry parent context using the HTTP extractor. We could make a more + // generic gRPC extractor by utilising the gRPC metadata. However that + // (a) requires cloning headers, + // (b) we would have to write this ourselves, and + // (c) gRPC metadata is transferred using HTTP headers in any case. + use tracing_opentelemetry::OpenTelemetrySpanExt; + let otel_ctx = opentelemetry::global::get_text_map_propagator(|propagator| { + propagator.extract(&MetadataExtractor(&tonic::metadata::MetadataMap::from_headers( + request.headers().clone(), + ))) + }); + span.set_parent(otel_ctx); + + span +} + +/// Injects open-telemetry remote context into traces. +#[derive(Copy, Clone)] +pub struct OtelInterceptor; + +impl tonic::service::Interceptor for OtelInterceptor { + fn call( + &mut self, + mut request: tonic::Request<()>, + ) -> Result, tonic::Status> { + let ctx = tracing::Span::current().context(); + opentelemetry::global::get_text_map_propagator(|propagator| { + propagator.inject_context(&ctx, &mut MetadataInjector(request.metadata_mut())); + }); + + Ok(request) + } +} + +struct MetadataExtractor<'a>(&'a tonic::metadata::MetadataMap); +impl opentelemetry::propagation::Extractor for MetadataExtractor<'_> { + /// Get a value for a key from the `MetadataMap`. If the value can't be converted to &str, + /// returns None + fn get(&self, key: &str) -> Option<&str> { + self.0.get(key).and_then(|metadata| metadata.to_str().ok()) + } + + /// Collect all the keys from the `MetadataMap`. + fn keys(&self) -> Vec<&str> { + self.0 + .keys() + .map(|key| match key { + tonic::metadata::KeyRef::Ascii(v) => v.as_str(), + tonic::metadata::KeyRef::Binary(v) => v.as_str(), + }) + .collect::>() + } +} + +struct MetadataInjector<'a>(&'a mut tonic::metadata::MetadataMap); +impl opentelemetry::propagation::Injector for MetadataInjector<'_> { + /// Set a key and value in the `MetadataMap`. Does nothing if the key or value are not valid + /// inputs + fn set(&mut self, key: &str, value: String) { + if let Ok(key) = tonic::metadata::MetadataKey::from_bytes(key.as_bytes()) { + if let Ok(val) = tonic::metadata::MetadataValue::try_from(&value) { + self.0.insert(key, val); + } + } + } +} diff --git a/crates/utils/src/tracing/mod.rs b/crates/utils/src/tracing/mod.rs new file mode 100644 index 000000000..773d491c6 --- /dev/null +++ b/crates/utils/src/tracing/mod.rs @@ -0,0 +1 @@ +pub mod grpc; From d8eb5bc38890687f67045026b444731b406afcde Mon Sep 17 00:00:00 2001 From: Varun Doshi <61531351+varun-doshi@users.noreply.github.com> Date: Fri, 7 Feb 2025 16:11:21 +0530 Subject: [PATCH 10/27] feat: use iterator in select_block_headers (#667) --- crates/store/src/db/mod.rs | 7 +++++-- crates/store/src/db/sql/mod.rs | 6 +++--- crates/store/src/state.rs | 13 ++++--------- 3 files changed, 12 insertions(+), 14 deletions(-) diff --git a/crates/store/src/db/mod.rs b/crates/store/src/db/mod.rs index ad7219dce..51a678e22 100644 --- a/crates/store/src/db/mod.rs +++ b/crates/store/src/db/mod.rs @@ -250,11 +250,14 @@ impl Db { /// Loads multiple block headers from the DB. #[instrument(target = COMPONENT, skip_all, ret(level = "debug"), err)] - pub async fn select_block_headers(&self, blocks: Vec) -> Result> { + pub async fn select_block_headers( + &self, + blocks: impl Iterator + Send + 'static, + ) -> Result> { self.pool .get() .await? - .interact(move |conn| sql::select_block_headers(conn, &blocks)) + .interact(move |conn| sql::select_block_headers(conn, blocks)) .await .map_err(|err| { DatabaseError::InteractError(format!( diff --git a/crates/store/src/db/sql/mod.rs b/crates/store/src/db/sql/mod.rs index 62d0d92f0..95e6dcdb1 100644 --- a/crates/store/src/db/sql/mod.rs +++ b/crates/store/src/db/sql/mod.rs @@ -1059,11 +1059,11 @@ pub fn select_block_header_by_block_num( /// A vector of [`BlockHeader`] or an error. pub fn select_block_headers( conn: &mut Connection, - blocks: &[BlockNumber], + blocks: impl Iterator + Send, ) -> Result> { - let mut headers = Vec::with_capacity(blocks.len()); + let blocks: Vec = blocks.map(|b| b.as_u32().into()).collect(); - let blocks: Vec = blocks.iter().copied().map(|b| b.as_u32().into()).collect(); + let mut headers = Vec::with_capacity(blocks.len()); let mut stmt = conn .prepare_cached("SELECT block_header FROM block_headers WHERE block_num IN rarray(?1);")?; let mut rows = stmt.query(params![Rc::new(blocks)])?; diff --git a/crates/store/src/state.rs b/crates/store/src/state.rs index a6d07fc55..059c22be2 100644 --- a/crates/store/src/state.rs +++ b/crates/store/src/state.rs @@ -450,9 +450,7 @@ impl State { let blocks = note_proofs .values() .map(|proof| proof.location().block_num()) - .collect::>() - .into_iter() - .collect::>(); + .collect::>(); // Grab the block merkle paths from the inner state. // @@ -479,7 +477,8 @@ impl State { (chain_length.into(), paths) }; - let headers = self.db.select_block_headers(blocks).await?; + let headers = self.db.select_block_headers(blocks.into_iter()).await?; + let headers = headers .into_iter() .map(|header| (header.block_num(), header)) @@ -596,15 +595,11 @@ impl State { (latest_block_num, partial_mmr) }; - // TODO: Unnecessary conversion. We should change the select_block_headers function to take - // an impl Iterator instead to avoid this allocation. - let mut blocks: Vec<_> = blocks.into_iter().collect(); // Fetch the reference block of the batch as part of this query, so we can avoid looking it // up in a separate DB access. - blocks.push(batch_reference_block); let mut headers = self .db - .select_block_headers(blocks) + .select_block_headers(blocks.into_iter().chain(std::iter::once(batch_reference_block))) .await .map_err(GetBatchInputsError::SelectBlockHeaderError)?; From 03622e74500ee709ef30b303121d2b94f8418da2 Mon Sep 17 00:00:00 2001 From: Mirko <48352201+Mirko-von-Leipzig@users.noreply.github.com> Date: Sat, 8 Feb 2025 11:40:15 +0200 Subject: [PATCH 11/27] fix: add faucet routes for favicon and background (#672) --- CHANGELOG.md | 4 ++++ bin/faucet/src/handlers.rs | 8 ++++++++ bin/faucet/src/main.rs | 4 +++- 3 files changed, 15 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 65a7ef2c4..7cf6658c9 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,10 @@ ## Unreleased +### Fixes + +- Faucet webpage is missing `background.png` and `favicon.ico` (#672). + ### Enhancements - Add an optional open-telemetry trace exporter (#659). diff --git a/bin/faucet/src/handlers.rs b/bin/faucet/src/handlers.rs index 5a6de2bc0..171e7f5a2 100644 --- a/bin/faucet/src/handlers.rs +++ b/bin/faucet/src/handlers.rs @@ -128,6 +128,14 @@ pub async fn get_index_css(state: State) -> Result) -> Result { + get_static_file(state, "background.png") +} + +pub async fn get_favicon(state: State) -> Result { + get_static_file(state, "favicon.ico") +} + /// Returns a static file bundled with the app state. /// /// # Panics diff --git a/bin/faucet/src/main.rs b/bin/faucet/src/main.rs index dc2d53384..5cc169a4e 100644 --- a/bin/faucet/src/main.rs +++ b/bin/faucet/src/main.rs @@ -14,7 +14,7 @@ use axum::{ }; use clap::{Parser, Subcommand}; use client::initialize_faucet_client; -use handlers::{get_index_css, get_index_html, get_index_js}; +use handlers::{get_background, get_favicon, get_index_css, get_index_html, get_index_js}; use http::HeaderValue; use miden_lib::{account::faucets::create_basic_fungible_faucet, AuthScheme}; use miden_node_utils::{config::load_config, crypto::get_rpo_random_coin, version::LongVersion}; @@ -106,6 +106,8 @@ async fn main() -> anyhow::Result<()> { .route("/", get(get_index_html)) .route("/index.js", get(get_index_js)) .route("/index.css", get(get_index_css)) + .route("/background.png", get(get_background)) + .route("/favicon.ico", get(get_favicon)) .route("/get_metadata", get(get_metadata)) .route("/get_tokens", post(get_tokens)) .layer( From ebef2151f5883f76f8c36c5c835634e94d0aa8bf Mon Sep 17 00:00:00 2001 From: Mirko <48352201+Mirko-von-Leipzig@users.noreply.github.com> Date: Tue, 11 Feb 2025 19:14:02 +0200 Subject: [PATCH 12/27] feat(block-producer): instrument block building (#675) --- CHANGELOG.md | 1 + Cargo.lock | 5 + crates/block-producer/Cargo.toml | 1 + .../block-producer/src/block_builder/mod.rs | 293 ++++++++++++++---- crates/block-producer/src/errors.rs | 4 +- crates/block-producer/src/mempool/mod.rs | 67 ++-- crates/block-producer/src/mempool/tests.rs | 23 +- crates/block-producer/src/store/mod.rs | 10 +- crates/store/src/server/api.rs | 28 +- crates/utils/Cargo.toml | 4 +- crates/utils/src/logging.rs | 5 + crates/utils/src/tracing/mod.rs | 5 + 12 files changed, 316 insertions(+), 130 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 7cf6658c9..e0a88e643 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -10,6 +10,7 @@ - Add an optional open-telemetry trace exporter (#659). - Support tracing across gRPC boundaries using remote tracing context (#669). +- Instrument the block-producer's block building process (#676). ### Changes diff --git a/Cargo.lock b/Cargo.lock index e9eac679d..8bdd6e95d 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -903,6 +903,7 @@ checksum = "65bc07b1a8bc7c85c5f2e110c476c7389b4554ba72af57d8445ea63a576b0876" dependencies = [ "futures-channel", "futures-core", + "futures-executor", "futures-io", "futures-sink", "futures-task", @@ -971,10 +972,13 @@ version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9fa08315bb612088cc391249efdc3bc77536f16c91f6cf495e6fbe85b20a4a81" dependencies = [ + "futures-channel", "futures-core", + "futures-io", "futures-macro", "futures-sink", "futures-task", + "memchr", "pin-project-lite", "pin-utils", "slab", @@ -1863,6 +1867,7 @@ version = "0.8.0" dependencies = [ "assert_matches", "async-trait", + "futures", "itertools 0.14.0", "miden-air", "miden-lib", diff --git a/crates/block-producer/Cargo.toml b/crates/block-producer/Cargo.toml index ccecfab44..dabf8d7bd 100644 --- a/crates/block-producer/Cargo.toml +++ b/crates/block-producer/Cargo.toml @@ -19,6 +19,7 @@ tracing-forest = ["miden-node-utils/tracing-forest"] [dependencies] async-trait = { version = "0.1" } +futures = { version = "0.3" } itertools = { workspace = true } miden-lib = { workspace = true } miden-node-proto = { workspace = true } diff --git a/crates/block-producer/src/block_builder/mod.rs b/crates/block-producer/src/block_builder/mod.rs index 21790dffc..1e7b24294 100644 --- a/crates/block-producer/src/block_builder/mod.rs +++ b/crates/block-producer/src/block_builder/mod.rs @@ -1,20 +1,24 @@ -use std::{collections::BTreeSet, ops::Range}; +use std::{ + collections::BTreeSet, + ops::{Add, Range}, +}; -use miden_node_utils::formatting::format_array; +use futures::FutureExt; +use miden_node_utils::tracing::{OpenTelemetrySpanExt, OtelStatus}; use miden_objects::{ account::AccountId, batch::ProvenBatch, - block::Block, - note::{NoteHeader, Nullifier}, + block::{Block, BlockNumber}, + note::{NoteHeader, NoteId, Nullifier}, transaction::{InputNoteCommitment, OutputNote}, }; use rand::Rng; use tokio::time::Duration; -use tracing::{debug, info, instrument}; +use tracing::{instrument, Span}; use crate::{ - errors::BuildBlockError, mempool::SharedMempool, store::StoreClient, COMPONENT, - SERVER_BLOCK_FREQUENCY, + block::BlockInputs, errors::BuildBlockError, mempool::SharedMempool, store::StoreClient, + COMPONENT, SERVER_BLOCK_FREQUENCY, }; pub(crate) mod prover; @@ -32,7 +36,7 @@ pub struct BlockBuilder { /// Simulated block failure rate as a percentage. /// /// Note: this _must_ be sign positive and less than 1.0. - pub failure_rate: f32, + pub failure_rate: f64, pub store: StoreClient, pub block_kernel: BlockProver, @@ -72,37 +76,160 @@ impl BlockBuilder { loop { interval.tick().await; - let (block_number, batches) = mempool.lock().await.select_block(); + self.build_block(&mempool).await; + } + } + + /// Run the block building stages and add open-telemetry trace information where applicable. + /// + /// A failure in any stage will result in that block being rolled back. + /// + /// ## Telemetry + /// + /// - Creates a new root span which means each block gets its own complete trace. + /// - Important telemetry fields are added to the root span with the `block.xxx` prefix. + /// - Each stage has its own child span and are free to add further field data. + /// - A failed stage will emit an error event, and both its own span and the root span will be + /// marked as errors. + #[instrument(parent = None, target = COMPONENT, name = "block_builder.build_block", skip_all)] + async fn build_block(&self, mempool: &SharedMempool) { + use futures::TryFutureExt; + + Self::select_block(mempool) + .inspect(SelectedBlock::inject_telemetry) + .then(|selected| self.get_block_inputs(selected)) + .inspect_ok(BlockSummaryAndInputs::inject_telemetry) + .and_then(|inputs| self.prove_block(inputs)) + .inspect_ok(ProvenBlock::inject_telemetry) + // Failure must be injected before the final pipeline stage i.e. before commit is called. The system cannot + // handle errors after it considers the process complete (which makes sense). + .and_then(|proven_block| async { self.inject_failure(proven_block) }) + .and_then(|proven_block| self.commit_block(mempool, proven_block)) + // Handle errors by propagating the error to the root span and rolling back the block. + .inspect_err(|err| Span::current().set_status(OtelStatus::Error { description: format!("{err:?}").into() })) + .or_else(|_err| self.rollback_block(mempool).never_error()) + // Error has been handled, this is just type manipulation to remove the result wrapper. + .unwrap_or_else(|_| ()) + .await; + } - let mut result = self.build_block(&batches).await; - let proving_duration = rand::thread_rng().gen_range(self.simulated_proof_time.clone()); + #[instrument(target = COMPONENT, name = "block_builder.select_block", skip_all)] + async fn select_block(mempool: &SharedMempool) -> SelectedBlock { + let (block_number, batches) = mempool.lock().await.select_block(); + SelectedBlock { block_number, batches } + } - tokio::time::sleep(proving_duration).await; + #[instrument(target = COMPONENT, name = "block_builder.get_block_inputs", skip_all, err)] + async fn get_block_inputs( + &self, + selected_block: SelectedBlock, + ) -> Result { + let SelectedBlock { block_number: _, batches } = selected_block; + let summary = BlockSummary::summarize_batches(&batches); - // Randomly inject failures at the given rate. - // - // Note: Rng::gen rolls between [0, 1.0) for f32, so this works as expected. - if rand::thread_rng().gen::() < self.failure_rate { - result = Err(BuildBlockError::InjectedFailure); - } + let inputs = self + .store + .get_block_inputs( + summary.updated_accounts.iter().copied(), + summary.nullifiers.iter(), + summary.dangling_notes.iter(), + ) + .await + .map_err(BuildBlockError::GetBlockInputsFailed)?; - let mut mempool = mempool.lock().await; - match result { - Ok(_) => mempool.block_committed(block_number), - Err(_) => mempool.block_failed(block_number), - } + let missing_notes: Vec<_> = summary + .dangling_notes + .difference(&inputs.found_unauthenticated_notes.note_ids()) + .copied() + .collect(); + if !missing_notes.is_empty() { + return Err(BuildBlockError::UnauthenticatedNotesNotFound(missing_notes)); } + + Ok(BlockSummaryAndInputs { batches, summary, inputs }) } - #[instrument(target = COMPONENT, skip_all, err)] - async fn build_block(&self, batches: &[ProvenBatch]) -> Result<(), BuildBlockError> { - info!( - target: COMPONENT, - num_batches = batches.len(), - batches = %format_array(batches.iter().map(ProvenBatch::id)), - ); + #[instrument(target = COMPONENT, name = "block_builder.prove_block", skip_all, err)] + async fn prove_block( + &self, + preimage: BlockSummaryAndInputs, + ) -> Result { + let BlockSummaryAndInputs { batches, summary, inputs } = preimage; + + let (block_header_witness, updated_accounts) = BlockWitness::new(inputs, &batches)?; + + let new_block_header = self.block_kernel.prove(block_header_witness)?; + + let block = Block::new( + new_block_header, + updated_accounts, + summary.output_notes, + summary.nullifiers, + )?; + + self.simulate_proving().await; + + Ok(ProvenBlock { block }) + } + + #[instrument(target = COMPONENT, name = "block_builder.commit_block", skip_all, err)] + async fn commit_block( + &self, + mempool: &SharedMempool, + proven_block: ProvenBlock, + ) -> Result<(), BuildBlockError> { + self.store + .apply_block(&proven_block.block) + .await + .map_err(BuildBlockError::StoreApplyBlockFailed)?; + + mempool.lock().await.commit_block(); + + Ok(()) + } + + #[instrument(target = COMPONENT, name = "block_builder.rollback_block", skip_all)] + async fn rollback_block(&self, mempool: &SharedMempool) { + mempool.lock().await.rollback_block(); + } + + #[instrument(target = COMPONENT, name = "block_builder.simulate_proving", skip_all)] + async fn simulate_proving(&self) { + let proving_duration = rand::thread_rng().gen_range(self.simulated_proof_time.clone()); + + Span::current().set_attribute("range.min_s", self.simulated_proof_time.start.as_secs_f64()); + Span::current().set_attribute("range.max_s", self.simulated_proof_time.end.as_secs_f64()); + Span::current().set_attribute("dice_roll_s", proving_duration.as_secs_f64()); - let updated_account_set: BTreeSet = batches + tokio::time::sleep(proving_duration).await; + } + + #[instrument(target = COMPONENT, name = "block_builder.inject_failure", skip_all, err)] + fn inject_failure(&self, value: T) -> Result { + let roll = rand::thread_rng().gen::(); + + Span::current().set_attribute("failure_rate", self.failure_rate); + Span::current().set_attribute("dice_roll", roll); + + if roll < self.failure_rate { + Err(BuildBlockError::InjectedFailure) + } else { + Ok(value) + } + } +} + +struct BlockSummary { + updated_accounts: BTreeSet, + nullifiers: Vec, + output_notes: Vec>, + dangling_notes: BTreeSet, +} + +impl BlockSummary { + #[instrument(target = COMPONENT, name = "block_builder.summarize_batches", skip_all)] + fn summarize_batches(batches: &[ProvenBatch]) -> Self { + let updated_accounts: BTreeSet = batches .iter() .flat_map(ProvenBatch::account_updates) .map(|(account_id, _)| *account_id) @@ -111,7 +238,7 @@ impl BlockBuilder { let output_notes: Vec<_> = batches.iter().map(|batch| batch.output_notes().to_vec()).collect(); - let produced_nullifiers: Vec = + let nullifiers: Vec = batches.iter().flat_map(ProvenBatch::produced_nullifiers).collect(); // Populate set of output notes from all batches @@ -120,8 +247,8 @@ impl BlockBuilder { .flat_map(|output_notes| output_notes.iter().map(OutputNote::id)) .collect(); - // Build a set of unauthenticated input notes for this block which do not have a matching - // output note produced in this block + // Build a set of unauthenticated input notes for this block which do not have a + // matching output note produced in this block let dangling_notes: BTreeSet<_> = batches .iter() .flat_map(ProvenBatch::input_notes) @@ -130,47 +257,79 @@ impl BlockBuilder { .filter(|note_id| !output_notes_set.contains(note_id)) .collect(); - // Request information needed for block building from the store - let block_inputs = self - .store - .get_block_inputs( - updated_account_set.into_iter(), - produced_nullifiers.iter(), - dangling_notes.iter(), - ) - .await - .map_err(BuildBlockError::GetBlockInputsFailed)?; - - let missing_notes: Vec<_> = dangling_notes - .difference(&block_inputs.found_unauthenticated_notes.note_ids()) - .copied() - .collect(); - if !missing_notes.is_empty() { - return Err(BuildBlockError::UnauthenticatedNotesNotFound(missing_notes)); + Self { + updated_accounts, + nullifiers, + output_notes, + dangling_notes, } + } +} - let (block_header_witness, updated_accounts) = BlockWitness::new(block_inputs, batches)?; +struct SelectedBlock { + block_number: BlockNumber, + batches: Vec, +} +struct BlockSummaryAndInputs { + batches: Vec, + summary: BlockSummary, + inputs: BlockInputs, +} +struct ProvenBlock { + block: Block, +} - let new_block_header = self.block_kernel.prove(block_header_witness)?; +impl SelectedBlock { + fn inject_telemetry(&self) { + let span = Span::current(); + span.set_attribute("block.number", i64::from(self.block_number.as_u32())); + span.set_attribute("block.batches.count", i64::from(self.batches.len() as u32)); + } +} - // TODO: return an error? - let block = - Block::new(new_block_header, updated_accounts, output_notes, produced_nullifiers) - .expect("invalid block components"); +impl BlockSummaryAndInputs { + fn inject_telemetry(&self) { + let span = Span::current(); - let block_hash = block.hash(); - let block_num = new_block_header.block_num(); + // SAFETY: We do not expect to have more than u32::MAX of any count per block. + span.set_attribute( + "block.updated_accounts.count", + i64::try_from(self.summary.updated_accounts.len()) + .expect("less than u32::MAX account updates"), + ); + span.set_attribute( + "block.output_notes.count", + i64::try_from(self.summary.output_notes.iter().fold(0, |acc, x| acc.add(x.len()))) + .expect("less than u32::MAX output notes"), + ); + span.set_attribute( + "block.nullifiers.count", + i64::try_from(self.summary.nullifiers.len()).expect("less than u32::MAX nullifiers"), + ); + span.set_attribute( + "block.dangling_notes.count", + i64::try_from(self.summary.dangling_notes.len()) + .expect("less than u32::MAX dangling notes"), + ); + } +} - info!(target: COMPONENT, %block_num, %block_hash, "block built"); - debug!(target: COMPONENT, ?block); +impl ProvenBlock { + fn inject_telemetry(&self) { + let span = Span::current(); + let header = self.block.header(); - self.store - .apply_block(&block) - .await - .map_err(BuildBlockError::StoreApplyBlockFailed)?; + span.set_attribute("block.hash", header.hash().to_hex()); + span.set_attribute("block.sub_hash", header.sub_hash().to_hex()); + span.set_attribute("block.parent_hash", header.prev_hash().to_hex()); - info!(target: COMPONENT, %block_num, %block_hash, "block committed"); + span.set_attribute("block.protocol.version", i64::from(header.version())); - Ok(()) + span.set_attribute("block.commitments.kernel", header.kernel_root().to_hex()); + span.set_attribute("block.commitments.nullifier", header.nullifier_root().to_hex()); + span.set_attribute("block.commitments.account", header.account_root().to_hex()); + span.set_attribute("block.commitments.chain", header.chain_root().to_hex()); + span.set_attribute("block.commitments.note", header.note_root().to_hex()); + span.set_attribute("block.commitments.transaction", header.tx_hash().to_hex()); } } diff --git a/crates/block-producer/src/errors.rs b/crates/block-producer/src/errors.rs index f61cf6dc2..02bc01568 100644 --- a/crates/block-producer/src/errors.rs +++ b/crates/block-producer/src/errors.rs @@ -6,7 +6,7 @@ use miden_objects::{ crypto::merkle::MerkleError, note::{NoteId, Nullifier}, transaction::TransactionId, - AccountDeltaError, Digest, ProposedBatchError, + AccountDeltaError, BlockError, Digest, ProposedBatchError, }; use miden_processor::ExecutionError; use miden_tx_batch_prover::errors::BatchProveError; @@ -187,6 +187,8 @@ pub enum BuildBlockError { account_id: AccountId, source: AccountDeltaError, }, + #[error("block construction failed")] + BlockConstructionError(#[from] BlockError), /// We sometimes randomly inject errors into the batch building process to test our failure /// responses. #[error("nothing actually went wrong, failure was injected on purpose")] diff --git a/crates/block-producer/src/mempool/mod.rs b/crates/block-producer/src/mempool/mod.rs index 3eaa40526..a4fca1614 100644 --- a/crates/block-producer/src/mempool/mod.rs +++ b/crates/block-producer/src/mempool/mod.rs @@ -9,7 +9,7 @@ use miden_objects::{ transaction::TransactionId, MAX_ACCOUNTS_PER_BATCH, MAX_INPUT_NOTES_PER_BATCH, MAX_OUTPUT_NOTES_PER_BATCH, }; -use tokio::sync::Mutex; +use tokio::sync::{Mutex, MutexGuard}; use tracing::instrument; use transaction_expiration::TransactionExpirations; use transaction_graph::TransactionGraph; @@ -127,7 +127,15 @@ impl BlockBudget { // MEMPOOL // ================================================================================================ -pub type SharedMempool = Arc>; +#[derive(Clone)] +pub struct SharedMempool(Arc>); + +impl SharedMempool { + #[instrument(target = COMPONENT, name = "mempool.lock", skip_all)] + pub async fn lock(&self) -> MutexGuard<'_, Mempool> { + self.0.lock().await + } +} #[derive(Clone, Debug, PartialEq)] pub struct Mempool { @@ -168,13 +176,13 @@ impl Mempool { state_retention: usize, expiration_slack: u32, ) -> SharedMempool { - Arc::new(Mutex::new(Self::new( + SharedMempool(Arc::new(Mutex::new(Self::new( chain_tip, batch_budget, block_budget, state_retention, expiration_slack, - ))) + )))) } fn new( @@ -205,7 +213,7 @@ impl Mempool { /// # Errors /// /// Returns an error if the transaction's initial conditions don't match the current state. - #[instrument(target = COMPONENT, skip_all, fields(tx=%transaction.id()))] + #[instrument(target = COMPONENT, name = "mempool.add_transaction", skip_all, fields(tx=%transaction.id()))] pub fn add_transaction( &mut self, transaction: AuthenticatedTransaction, @@ -227,7 +235,7 @@ impl Mempool { /// Transactions are returned in a valid execution ordering. /// /// Returns `None` if no transactions are available. - #[instrument(target = COMPONENT, skip_all)] + #[instrument(target = COMPONENT, name = "mempool.select_batch", skip_all)] pub fn select_batch(&mut self) -> Option<(BatchId, Vec)> { let (batch, parents) = self.transactions.select_batch(self.batch_budget); if batch.is_empty() { @@ -243,7 +251,7 @@ impl Mempool { /// Drops the failed batch and all of its descendants. /// /// Transactions are placed back in the queue. - #[instrument(target = COMPONENT, skip_all, fields(batch))] + #[instrument(target = COMPONENT, name = "mempool.batch_failed", skip_all, fields(batch_id=%batch))] pub fn batch_failed(&mut self, batch: BatchId) { // Batch may already have been removed as part of a parent batches failure. if !self.batches.contains(&batch) { @@ -267,7 +275,7 @@ impl Mempool { } /// Marks a batch as proven if it exists. - #[instrument(target = COMPONENT, skip_all, fields(batch=%batch.id()))] + #[instrument(target = COMPONENT, name = "mempool.batch_proved", skip_all, fields(batch_id=%batch.id()))] pub fn batch_proved(&mut self, batch: ProvenBatch) { // Batch may have been removed as part of a parent batches failure. if !self.batches.contains(&batch.id()) { @@ -286,7 +294,7 @@ impl Mempool { /// # Panics /// /// Panics if there is already a block in flight. - #[instrument(target = COMPONENT, skip_all)] + #[instrument(target = COMPONENT, name = "mempool.select_block", skip_all)] pub fn select_block(&mut self) -> (BlockNumber, Vec) { assert!(self.block_in_progress.is_none(), "Cannot have two blocks inflight."); @@ -296,15 +304,16 @@ impl Mempool { (self.chain_tip.child(), batches) } - /// Notify the pool that the block was successfully completed. + /// Notify the pool that the in flight block was successfully committed to the chain. + /// + /// The pool will mark the associated batches and transactions as committed, and prune stale + /// committed data, and purge transactions that are now considered expired. /// /// # Panics /// - /// Panics if blocks are completed out-of-order or if there is no block in flight. - #[instrument(target = COMPONENT, skip_all, fields(block_number))] - pub fn block_committed(&mut self, block_number: BlockNumber) { - assert_eq!(block_number, self.chain_tip.child(), "Blocks must be submitted sequentially"); - + /// Panics if there is no block in flight. + #[instrument(target = COMPONENT, name = "mempool.commit_block", skip_all)] + pub fn commit_block(&mut self) { // Remove committed batches and transactions from graphs. let batches = self.block_in_progress.take().expect("No block in progress to commit"); let transactions = @@ -321,21 +330,18 @@ impl Mempool { self.chain_tip = self.chain_tip.child(); // Revert expired transactions and their descendents. - let expired = self.expirations.get(block_number); - self.revert_transactions(expired.into_iter().collect()) - .expect("expired transactions must be part of the mempool"); + self.revert_expired_transactions(); } - /// Block and all of its contents and dependents are purged from the mempool. + /// Notify the pool that construction of the in flight block failed. + /// + /// The pool will purge the block and all of its contents from the pool. /// /// # Panics /// - /// Panics if there is no block in flight or if the block number does not match the current - /// inflight block. - #[instrument(target = COMPONENT, skip_all, fields(block_number))] - pub fn block_failed(&mut self, block_number: BlockNumber) { - assert_eq!(block_number, self.chain_tip.child(), "Blocks must be submitted sequentially"); - + /// Panics if there is no block in flight. + #[instrument(target = COMPONENT, name = "mempool.rollback_block", skip_all)] + pub fn rollback_block(&mut self) { let batches = self.block_in_progress.take().expect("No block in progress to be failed"); // Revert all transactions. This is the nuclear (but simplest) solution. @@ -358,6 +364,14 @@ impl Mempool { .expect("transactions from a block must be part of the mempool"); } + #[instrument(target = COMPONENT, name = "mempool.revert_expired_transactions", skip_all)] + fn revert_expired_transactions(&mut self) { + let expired = self.expirations.get(self.chain_tip); + + self.revert_transactions(expired.into_iter().collect()) + .expect("expired transactions must be part of the mempool"); + } + /// Reverts the given transactions and their descendents from the mempool. /// /// This includes removing them from the transaction and batch graphs, as well as cleaning up @@ -370,10 +384,13 @@ impl Mempool { /// /// Returns an error if any transaction was not in the transaction graph i.e. if the transaction /// is unknown. + #[instrument(target = COMPONENT, name = "mempool.revert_transactions", skip_all, fields(transactions.ids))] fn revert_transactions( &mut self, txs: Vec, ) -> Result<(), GraphError> { + tracing::Span::current().record("transactions.expired.ids", tracing::field::debug(&txs)); + // Revert all transactions and their descendents, and their associated batches. let reverted = self.transactions.remove_transactions(txs)?; let batches_reverted = self.batches.remove_batches_with_transactions(reverted.iter()); diff --git a/crates/block-producer/src/mempool/tests.rs b/crates/block-producer/src/mempool/tests.rs index e7680736f..8c81286bd 100644 --- a/crates/block-producer/src/mempool/tests.rs +++ b/crates/block-producer/src/mempool/tests.rs @@ -107,8 +107,8 @@ fn block_commit_reverts_expired_txns() { uut.add_transaction(tx_to_revert).unwrap(); // Commit the pending block which should revert the above tx. - uut.block_committed(block); - reference.block_committed(block); + uut.commit_block(); + reference.commit_block(); assert_eq!(uut, reference); } @@ -118,24 +118,15 @@ fn empty_block_commitment() { let mut uut = Mempool::for_tests(); for _ in 0..3 { - let (block, _) = uut.select_block(); - uut.block_committed(block); + let (_block, _) = uut.select_block(); + uut.commit_block(); } } -#[test] -#[should_panic] -fn blocks_must_be_committed_sequentially() { - let mut uut = Mempool::for_tests(); - - let (block, _) = uut.select_block(); - uut.block_committed(block + 1); -} - #[test] #[should_panic] fn block_commitment_is_rejected_if_no_block_is_in_flight() { - Mempool::for_tests().block_committed(BlockNumber::from(1)); + Mempool::for_tests().commit_block(); } #[test] @@ -166,7 +157,7 @@ fn block_failure_reverts_its_transactions() { ])); // Block 1 will contain just the first batch. - let (block_number, _) = uut.select_block(); + let (_number, _batches) = uut.select_block(); // Create another dependent batch. uut.add_transaction(reverted_txs[1].clone()).unwrap(); @@ -175,7 +166,7 @@ fn block_failure_reverts_its_transactions() { uut.add_transaction(reverted_txs[2].clone()).unwrap(); // Fail the block which should result in everything reverting. - uut.block_failed(block_number); + uut.rollback_block(); assert_eq!(uut, reference); } diff --git a/crates/block-producer/src/store/mod.rs b/crates/block-producer/src/store/mod.rs index 2c4a36fe9..3d8009cda 100644 --- a/crates/block-producer/src/store/mod.rs +++ b/crates/block-producer/src/store/mod.rs @@ -138,7 +138,7 @@ impl StoreClient { } /// Returns the latest block's header from the store. - #[instrument(target = COMPONENT, skip_all, err)] + #[instrument(target = COMPONENT, name = "store.client.latest_header", skip_all, err)] pub async fn latest_header(&self) -> Result { let response = self .inner @@ -156,7 +156,7 @@ impl StoreClient { BlockHeader::try_from(response).map_err(Into::into) } - #[instrument(target = COMPONENT, skip_all, err)] + #[instrument(target = COMPONENT, name = "store.client.get_tx_inputs", skip_all, err)] pub async fn get_tx_inputs( &self, proven_tx: &ProvenTransaction, @@ -193,7 +193,7 @@ impl StoreClient { Ok(tx_inputs) } - #[instrument(target = COMPONENT, skip_all, err)] + #[instrument(target = COMPONENT, name = "store.client.get_block_inputs", skip_all, err)] pub async fn get_block_inputs( &self, updated_accounts: impl Iterator + Send, @@ -211,7 +211,7 @@ impl StoreClient { store_response.try_into().map_err(Into::into) } - #[instrument(target = COMPONENT, skip_all, err)] + #[instrument(target = COMPONENT, name = "store.client.get_batch_inputs", skip_all, err)] pub async fn get_batch_inputs( &self, block_references: impl Iterator + Send, @@ -227,7 +227,7 @@ impl StoreClient { store_response.try_into().map_err(Into::into) } - #[instrument(target = COMPONENT, skip_all, err)] + #[instrument(target = COMPONENT, name = "store.client.apply_block", skip_all, err)] pub async fn apply_block(&self, block: &Block) -> Result<(), StoreError> { let request = tonic::Request::new(ApplyBlockRequest { block: block.to_bytes() }); diff --git a/crates/store/src/server/api.rs b/crates/store/src/server/api.rs index 84a0ff33e..24cde86a5 100644 --- a/crates/store/src/server/api.rs +++ b/crates/store/src/server/api.rs @@ -56,7 +56,7 @@ impl api_server::Api for StoreApi { /// If the block number is not provided, block header for the latest block is returned. #[instrument( target = COMPONENT, - name = "store:get_block_header_by_number", + name = "store.server.get_block_header_by_number", skip_all, ret(level = "debug"), err @@ -88,7 +88,7 @@ impl api_server::Api for StoreApi { /// be verified against the latest root of the nullifier database. #[instrument( target = COMPONENT, - name = "store:check_nullifiers", + name = "store.server.check_nullifiers", skip_all, ret(level = "debug"), err @@ -112,7 +112,7 @@ impl api_server::Api for StoreApi { /// Currently the only supported prefix length is 16 bits. #[instrument( target = COMPONENT, - name = "store:check_nullifiers_by_prefix", + name = "store.server.check_nullifiers_by_prefix", skip_all, ret(level = "debug"), err @@ -145,7 +145,7 @@ impl api_server::Api for StoreApi { /// for the objects the client is interested in. #[instrument( target = COMPONENT, - name = "store:sync_state", + name = "store.server.sync_state", skip_all, ret(level = "debug"), err @@ -214,7 +214,7 @@ impl api_server::Api for StoreApi { /// Returns info which can be used by the client to sync note state. #[instrument( target = COMPONENT, - name = "store:sync_notes", + name = "store.server.sync_notes", skip_all, ret(level = "debug"), err @@ -246,7 +246,7 @@ impl api_server::Api for StoreApi { /// If the list is empty or no Note matched the requested NoteId and empty list is returned. #[instrument( target = COMPONENT, - name = "store:get_notes_by_id", + name = "store.server.get_notes_by_id", skip_all, ret(level = "debug"), err @@ -278,7 +278,7 @@ impl api_server::Api for StoreApi { /// Returns details for public (public) account by id. #[instrument( target = COMPONENT, - name = "store:get_account_details", + name = "store.server.get_account_details", skip_all, ret(level = "debug"), err @@ -302,7 +302,7 @@ impl api_server::Api for StoreApi { /// Updates the local DB by inserting a new block header and the related data. #[instrument( target = COMPONENT, - name = "store:apply_block", + name = "store.server.apply_block", skip_all, ret(level = "debug"), err @@ -338,7 +338,7 @@ impl api_server::Api for StoreApi { /// Returns data needed by the block producer to construct and prove the next block. #[instrument( target = COMPONENT, - name = "store:get_block_inputs", + name = "store.server.get_block_inputs", skip_all, ret(level = "debug"), err @@ -367,7 +367,7 @@ impl api_server::Api for StoreApi { /// See [`State::get_batch_inputs`] for details. #[instrument( target = COMPONENT, - name = "store:get_batch_inputs", + name = "store.server.get_batch_inputs", skip_all, ret(level = "debug"), err @@ -397,7 +397,7 @@ impl api_server::Api for StoreApi { #[instrument( target = COMPONENT, - name = "store:get_transaction_inputs", + name = "store.server.get_transaction_inputs", skip_all, ret(level = "debug"), err @@ -445,7 +445,7 @@ impl api_server::Api for StoreApi { #[instrument( target = COMPONENT, - name = "store:get_block_by_number", + name = "store.server.get_block_by_number", skip_all, ret(level = "debug"), err @@ -465,7 +465,7 @@ impl api_server::Api for StoreApi { #[instrument( target = COMPONENT, - name = "store:get_account_proofs", + name = "store.server.get_account_proofs", skip_all, ret(level = "debug"), err @@ -503,7 +503,7 @@ impl api_server::Api for StoreApi { #[instrument( target = COMPONENT, - name = "store:get_account_state_delta", + name = "store.server.get_account_state_delta", skip_all, ret(level = "debug"), err diff --git a/crates/utils/Cargo.toml b/crates/utils/Cargo.toml index b411cd291..2d0cc785b 100644 --- a/crates/utils/Cargo.toml +++ b/crates/utils/Cargo.toml @@ -24,7 +24,7 @@ figment = { version = "0.10", features = ["env", "toml"] } http = "1.2" itertools = { workspace = true } miden-objects = { workspace = true } -opentelemetry = "0.27" +opentelemetry = { version = "0.27" } opentelemetry-otlp = { version = "0.27", features = ["tls-roots"] } opentelemetry_sdk = { version = "0.27", features = ["rt-tokio"] } rand = { workspace = true } @@ -33,7 +33,7 @@ thiserror = { workspace = true } tonic = { workspace = true } tracing = { workspace = true } tracing-forest = { version = "0.1", optional = true, features = ["chrono"] } -tracing-opentelemetry = "0.28" +tracing-opentelemetry = { version = "0.28" } tracing-subscriber = { workspace = true } # Optional dependencies enabled by `vergen` feature. # This must match the version expected by `vergen-gitcl`. diff --git a/crates/utils/src/logging.rs b/crates/utils/src/logging.rs index 069685271..8d933046d 100644 --- a/crates/utils/src/logging.rs +++ b/crates/utils/src/logging.rs @@ -1,6 +1,7 @@ use anyhow::Result; use opentelemetry::trace::TracerProvider as _; use opentelemetry_otlp::WithTonicConfig; +use opentelemetry_sdk::propagation::TraceContextPropagator; use tracing::subscriber::{self, Subscriber}; use tracing_opentelemetry::OpenTelemetryLayer; use tracing_subscriber::{layer::SubscriberExt, EnvFilter, Layer, Registry}; @@ -10,6 +11,10 @@ use tracing_subscriber::{layer::SubscriberExt, EnvFilter, Layer, Registry}; /// The open-telemetry configuration is controlled via environment variables as defined in the /// [specification](https://github.com/open-telemetry/opentelemetry-specification/blob/main/specification/protocol/exporter.md#opentelemetry-protocol-exporter) pub fn setup_tracing(enable_otel: bool) -> Result<()> { + if enable_otel { + opentelemetry::global::set_text_map_propagator(TraceContextPropagator::new()); + } + let otel_layer = enable_otel.then_some(open_telemetry_layer()); let subscriber = Registry::default().with(stdout_layer()).with(otel_layer); tracing::subscriber::set_global_default(subscriber).map_err(Into::into) diff --git a/crates/utils/src/tracing/mod.rs b/crates/utils/src/tracing/mod.rs index 773d491c6..292be68be 100644 --- a/crates/utils/src/tracing/mod.rs +++ b/crates/utils/src/tracing/mod.rs @@ -1 +1,6 @@ pub mod grpc; + +// Re-export useful traits for open-telemetry traces. This avoids requiring other crates from +// importing that family of crates directly. +pub use opentelemetry::trace::Status as OtelStatus; +pub use tracing_opentelemetry::OpenTelemetrySpanExt; From 26cf6be11b2cf9024c7875ce0a5801eca477e0c3 Mon Sep 17 00:00:00 2001 From: igamigo Date: Fri, 14 Feb 2025 03:20:26 -0300 Subject: [PATCH 13/27] chore: bump `miden-base` versions (#689) --- Cargo.lock | 8 ++++---- Cargo.toml | 8 ++++---- crates/block-producer/src/test_utils/batch.rs | 3 +++ crates/block-producer/src/test_utils/block.rs | 2 +- crates/block-producer/src/test_utils/store.rs | 3 ++- crates/store/src/db/sql/mod.rs | 14 +++++++++----- crates/store/src/genesis.rs | 2 +- crates/store/src/state.rs | 2 +- 8 files changed, 25 insertions(+), 17 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 8bdd6e95d..90826a365 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1787,7 +1787,7 @@ dependencies = [ [[package]] name = "miden-lib" version = "0.8.0" -source = "git+https://github.com/0xPolygonMiden/miden-base.git?rev=e82dee03de7589ef3fb12b7fd901cef25ae5535d#e82dee03de7589ef3fb12b7fd901cef25ae5535d" +source = "git+https://github.com/0xPolygonMiden/miden-base?branch=next#66cf1bc8744cf739aa3ef726300c389796047394" dependencies = [ "miden-assembly", "miden-objects", @@ -1983,7 +1983,7 @@ dependencies = [ [[package]] name = "miden-objects" version = "0.8.0" -source = "git+https://github.com/0xPolygonMiden/miden-base.git?rev=e82dee03de7589ef3fb12b7fd901cef25ae5535d#e82dee03de7589ef3fb12b7fd901cef25ae5535d" +source = "git+https://github.com/0xPolygonMiden/miden-base?branch=next#66cf1bc8744cf739aa3ef726300c389796047394" dependencies = [ "getrandom 0.2.15", "miden-assembly", @@ -2042,7 +2042,7 @@ dependencies = [ [[package]] name = "miden-tx" version = "0.8.0" -source = "git+https://github.com/0xPolygonMiden/miden-base.git?rev=e82dee03de7589ef3fb12b7fd901cef25ae5535d#e82dee03de7589ef3fb12b7fd901cef25ae5535d" +source = "git+https://github.com/0xPolygonMiden/miden-base?branch=next#66cf1bc8744cf739aa3ef726300c389796047394" dependencies = [ "async-trait", "miden-lib", @@ -2059,7 +2059,7 @@ dependencies = [ [[package]] name = "miden-tx-batch-prover" version = "0.8.0" -source = "git+https://github.com/0xPolygonMiden/miden-base.git?rev=e82dee03de7589ef3fb12b7fd901cef25ae5535d#e82dee03de7589ef3fb12b7fd901cef25ae5535d" +source = "git+https://github.com/0xPolygonMiden/miden-base?branch=next#66cf1bc8744cf739aa3ef726300c389796047394" dependencies = [ "miden-core", "miden-crypto", diff --git a/Cargo.toml b/Cargo.toml index bbb10141a..2c05af39a 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -28,18 +28,18 @@ version = "0.8.0" assert_matches = { version = "1.5" } itertools = { version = "0.14" } miden-air = { version = "0.12" } -miden-lib = { git = "https://github.com/0xPolygonMiden/miden-base.git", rev = "e82dee03de7589ef3fb12b7fd901cef25ae5535d" } +miden-lib = { git = "https://github.com/0xPolygonMiden/miden-base", branch = "next" } miden-node-block-producer = { path = "crates/block-producer", version = "0.8" } miden-node-proto = { path = "crates/proto", version = "0.8" } miden-node-rpc = { path = "crates/rpc", version = "0.8" } miden-node-store = { path = "crates/store", version = "0.8" } miden-node-test-macro = { path = "crates/test-macro" } miden-node-utils = { path = "crates/utils", version = "0.8" } -miden-objects = { git = "https://github.com/0xPolygonMiden/miden-base.git", rev = "e82dee03de7589ef3fb12b7fd901cef25ae5535d" } +miden-objects = { git = "https://github.com/0xPolygonMiden/miden-base", branch = "next" } miden-processor = { version = "0.12" } miden-stdlib = { version = "0.12", default-features = false } -miden-tx = { git = "https://github.com/0xPolygonMiden/miden-base.git", rev = "e82dee03de7589ef3fb12b7fd901cef25ae5535d" } -miden-tx-batch-prover = { git = "https://github.com/0xPolygonMiden/miden-base.git", rev = "e82dee03de7589ef3fb12b7fd901cef25ae5535d" } +miden-tx = { git = "https://github.com/0xPolygonMiden/miden-base", branch = "next" } +miden-tx-batch-prover = { git = "https://github.com/0xPolygonMiden/miden-base.git", branch = "next" } prost = { version = "0.13" } rand = { version = "0.8" } thiserror = { version = "2.0", default-features = false } diff --git a/crates/block-producer/src/test_utils/batch.rs b/crates/block-producer/src/test_utils/batch.rs index b4caffd28..37c2041fb 100644 --- a/crates/block-producer/src/test_utils/batch.rs +++ b/crates/block-producer/src/test_utils/batch.rs @@ -4,6 +4,7 @@ use miden_objects::{ batch::{BatchAccountUpdate, BatchId, BatchNoteTree, ProvenBatch}, block::BlockNumber, transaction::{InputNotes, ProvenTransaction}, + Digest, }; use crate::test_utils::MockProvenTxBuilder; @@ -57,6 +58,8 @@ impl TransactionBatchConstructor for ProvenBatch { ProvenBatch::new( BatchId::from_transactions(txs.into_iter()), + Digest::default(), + BlockNumber::GENESIS, account_updates, InputNotes::new_unchecked(input_notes), BatchNoteTree::with_contiguous_leaves( diff --git a/crates/block-producer/src/test_utils/block.rs b/crates/block-producer/src/test_utils/block.rs index 532e314dd..03ea004f3 100644 --- a/crates/block-producer/src/test_utils/block.rs +++ b/crates/block-producer/src/test_utils/block.rs @@ -131,7 +131,7 @@ impl MockBlockBuilder { pub fn account_updates(mut self, updated_accounts: Vec) -> Self { for update in &updated_accounts { self.store_accounts - .insert(update.account_id().into(), update.new_state_hash().into()); + .insert(update.account_id().into(), update.final_state_commitment().into()); } self.updated_accounts = Some(updated_accounts); diff --git a/crates/block-producer/src/test_utils/store.rs b/crates/block-producer/src/test_utils/store.rs index ecbe1dc67..af404acd9 100644 --- a/crates/block-producer/src/test_utils/store.rs +++ b/crates/block-producer/src/test_utils/store.rs @@ -199,7 +199,8 @@ impl MockStoreSuccess { // update accounts for update in block.updated_accounts() { - locked_accounts.insert(update.account_id().into(), update.new_state_hash().into()); + locked_accounts + .insert(update.account_id().into(), update.final_state_commitment().into()); } let header = block.header(); debug_assert_eq!(locked_accounts.root(), header.account_root()); diff --git a/crates/store/src/db/sql/mod.rs b/crates/store/src/db/sql/mod.rs index 95e6dcdb1..7ade42670 100644 --- a/crates/store/src/db/sql/mod.rs +++ b/crates/store/src/db/sql/mod.rs @@ -422,10 +422,10 @@ pub fn upsert_accounts( AccountUpdateDetails::New(account) => { debug_assert_eq!(account_id, account.id()); - if account.hash() != update.new_state_hash() { + if account.hash() != update.final_state_commitment() { return Err(DatabaseError::AccountHashesMismatch { calculated: account.hash(), - expected: update.new_state_hash(), + expected: update.final_state_commitment(), }); } @@ -439,8 +439,12 @@ pub fn upsert_accounts( return Err(DatabaseError::AccountNotFoundInDb(account_id)); }; - let account = - apply_delta(account_id, &row.get_ref(0)?, delta, &update.new_state_hash())?; + let account = apply_delta( + account_id, + &row.get_ref(0)?, + delta, + &update.final_state_commitment(), + )?; (Some(Cow::Owned(account)), Some(Cow::Borrowed(delta))) }, @@ -448,7 +452,7 @@ pub fn upsert_accounts( let inserted = upsert_stmt.execute(params![ account_id.to_bytes(), - update.new_state_hash().to_bytes(), + update.final_state_commitment().to_bytes(), block_num.as_u32(), full_account.as_ref().map(|account| account.to_bytes()), ])?; diff --git a/crates/store/src/genesis.rs b/crates/store/src/genesis.rs index 2232cc2da..de128879c 100644 --- a/crates/store/src/genesis.rs +++ b/crates/store/src/genesis.rs @@ -48,7 +48,7 @@ impl GenesisState { let account_smt: SimpleSmt = SimpleSmt::with_leaves(accounts.iter().map(|update| { - (update.account_id().prefix().into(), update.new_state_hash().into()) + (update.account_id().prefix().into(), update.final_state_commitment().into()) }))?; let header = BlockHeader::new( diff --git a/crates/store/src/state.rs b/crates/store/src/state.rs index 059c22be2..8b8bcf695 100644 --- a/crates/store/src/state.rs +++ b/crates/store/src/state.rs @@ -264,7 +264,7 @@ impl State { block.updated_accounts().iter().map(|update| { ( LeafIndex::new_max_depth(update.account_id().prefix().into()), - update.new_state_hash().into(), + update.final_state_commitment().into(), ) }), ); From 2a10b47f5a2e3d928296d85d8516050b8e560f4a Mon Sep 17 00:00:00 2001 From: Mirko <48352201+Mirko-von-Leipzig@users.noreply.github.com> Date: Fri, 14 Feb 2025 08:25:27 +0200 Subject: [PATCH 14/27] chore: upgrade otel crates and filter otel traces (#690) --- CHANGELOG.md | 2 +- Cargo.lock | 27 ++++---- bin/faucet/src/main.rs | 29 +++++---- bin/node/src/main.rs | 9 +-- crates/test-macro/src/lib.rs | 4 +- crates/utils/Cargo.toml | 10 +-- crates/utils/src/logging.rs | 123 ++++++++++++++++++----------------- 7 files changed, 108 insertions(+), 96 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index e0a88e643..8a3e87b2a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -8,7 +8,7 @@ ### Enhancements -- Add an optional open-telemetry trace exporter (#659). +- Add an optional open-telemetry trace exporter (#659, #690). - Support tracing across gRPC boundaries using remote tracing context (#669). - Instrument the block-producer's block building process (#676). diff --git a/Cargo.lock b/Cargo.lock index 90826a365..a7eb2e6ce 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2312,23 +2312,23 @@ checksum = "d05e27ee213611ffe7d6348b942e8f942b37114c00cc03cec254295a4a17852e" [[package]] name = "opentelemetry" -version = "0.27.1" +version = "0.28.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ab70038c28ed37b97d8ed414b6429d343a8bbf44c9f79ec854f3a643029ba6d7" +checksum = "236e667b670a5cdf90c258f5a55794ec5ac5027e960c224bff8367a59e1e6426" dependencies = [ "futures-core", "futures-sink", "js-sys", "pin-project-lite", - "thiserror 1.0.69", + "thiserror 2.0.11", "tracing", ] [[package]] name = "opentelemetry-otlp" -version = "0.27.0" +version = "0.28.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "91cf61a1868dacc576bf2b2a1c3e9ab150af7272909e80085c3173384fe11f76" +checksum = "5bef114c6d41bea83d6dc60eb41720eedd0261a67af57b66dd2b84ac46c01d91" dependencies = [ "async-trait", "futures-core", @@ -2337,17 +2337,16 @@ dependencies = [ "opentelemetry-proto", "opentelemetry_sdk", "prost", - "thiserror 1.0.69", + "thiserror 2.0.11", "tokio", "tonic", - "tracing", ] [[package]] name = "opentelemetry-proto" -version = "0.27.0" +version = "0.28.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a6e05acbfada5ec79023c85368af14abd0b307c015e9064d249b2a950ef459a6" +checksum = "56f8870d3024727e99212eb3bb1762ec16e255e3e6f58eeb3dc8db1aa226746d" dependencies = [ "opentelemetry", "opentelemetry_sdk", @@ -2357,9 +2356,9 @@ dependencies = [ [[package]] name = "opentelemetry_sdk" -version = "0.27.1" +version = "0.28.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "231e9d6ceef9b0b2546ddf52335785ce41252bc7474ee8ba05bfad277be13ab8" +checksum = "84dfad6042089c7fc1f6118b7040dc2eb4ab520abbf410b79dc481032af39570" dependencies = [ "async-trait", "futures-channel", @@ -2370,7 +2369,7 @@ dependencies = [ "percent-encoding", "rand", "serde_json", - "thiserror 1.0.69", + "thiserror 2.0.11", "tokio", "tokio-stream", "tracing", @@ -3764,9 +3763,9 @@ dependencies = [ [[package]] name = "tracing-opentelemetry" -version = "0.28.0" +version = "0.29.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "97a971f6058498b5c0f1affa23e7ea202057a7301dbff68e968b2d578bcbd053" +checksum = "721f2d2569dce9f3dfbbddee5906941e953bfcdf736a62da3377f5751650cc36" dependencies = [ "js-sys", "once_cell", diff --git a/bin/faucet/src/main.rs b/bin/faucet/src/main.rs index 5cc169a4e..64e223d0b 100644 --- a/bin/faucet/src/main.rs +++ b/bin/faucet/src/main.rs @@ -17,7 +17,9 @@ use client::initialize_faucet_client; use handlers::{get_background, get_favicon, get_index_css, get_index_html, get_index_js}; use http::HeaderValue; use miden_lib::{account::faucets::create_basic_fungible_faucet, AuthScheme}; -use miden_node_utils::{config::load_config, crypto::get_rpo_random_coin, version::LongVersion}; +use miden_node_utils::{ + config::load_config, crypto::get_rpo_random_coin, logging::OpenTelemetry, version::LongVersion, +}; use miden_objects::{ account::{AccountFile, AccountStorageMode, AuthSecretKey}, asset::TokenSymbol, @@ -89,14 +91,15 @@ pub enum Command { #[tokio::main] async fn main() -> anyhow::Result<()> { - miden_node_utils::logging::setup_logging().context("Failed to initialize logging")?; + miden_node_utils::logging::setup_tracing(OpenTelemetry::Disabled) + .context("failed to initialize logging")?; let cli = Cli::parse(); match &cli.command { Command::Start { config } => { let config: FaucetConfig = - load_config(config).context("Failed to load configuration file")?; + load_config(config).context("failed to load configuration file")?; let faucet_state = FaucetState::new(config.clone()).await?; @@ -129,7 +132,7 @@ async fn main() -> anyhow::Result<()> { anyhow::anyhow!("Couldn't get any socket addrs for endpoint: {}", config.endpoint), )?; let listener = - TcpListener::bind(socket_addr).await.context("Failed to bind TCP listener")?; + TcpListener::bind(socket_addr).await.context("failed to bind TCP listener")?; info!(target: COMPONENT, endpoint = %config.endpoint, "Server started"); @@ -146,12 +149,12 @@ async fn main() -> anyhow::Result<()> { println!("Generating new faucet account. This may take a few minutes..."); let config: FaucetConfig = - load_config(config_path).context("Failed to load configuration file")?; + load_config(config_path).context("failed to load configuration file")?; let (_, root_block_header, _) = initialize_faucet_client(&config).await?; let current_dir = - std::env::current_dir().context("Failed to open current directory")?; + std::env::current_dir().context("failed to open current directory")?; let mut rng = ChaCha20Rng::from_seed(rand::random()); @@ -159,16 +162,16 @@ async fn main() -> anyhow::Result<()> { let (account, account_seed) = create_basic_fungible_faucet( rng.gen(), - (&root_block_header).try_into().context("Failed to create anchor block")?, + (&root_block_header).try_into().context("failed to create anchor block")?, TokenSymbol::try_from(token_symbol.as_str()) - .context("Failed to parse token symbol")?, + .context("failed to parse token symbol")?, *decimals, Felt::try_from(*max_supply) .expect("max supply value is greater than or equal to the field modulus"), AccountStorageMode::Public, AuthScheme::RpoFalcon512 { pub_key: secret.public_key() }, ) - .context("Failed to create basic fungible faucet account")?; + .context("failed to create basic fungible faucet account")?; let account_data = AccountFile::new(account, Some(account_seed), AuthSecretKey::RpoFalcon512(secret)); @@ -176,14 +179,14 @@ async fn main() -> anyhow::Result<()> { let output_path = current_dir.join(output_path); account_data .write(&output_path) - .context("Failed to write account data to file")?; + .context("failed to write account data to file")?; println!("Faucet account file successfully created at: {output_path:?}"); }, Command::Init { config_path, faucet_account_path } => { let current_dir = - std::env::current_dir().context("Failed to open current directory")?; + std::env::current_dir().context("failed to open current directory")?; let config_file_path = current_dir.join(config_path); @@ -193,10 +196,10 @@ async fn main() -> anyhow::Result<()> { }; let config_as_toml_string = - toml::to_string(&config).context("Failed to serialize default config")?; + toml::to_string(&config).context("failed to serialize default config")?; std::fs::write(&config_file_path, config_as_toml_string) - .context("Error writing config to file")?; + .context("error writing config to file")?; println!("Config file successfully created at: {config_file_path:?}"); }, diff --git a/bin/node/src/main.rs b/bin/node/src/main.rs index 1697f61f7..599e2a6fc 100644 --- a/bin/node/src/main.rs +++ b/bin/node/src/main.rs @@ -6,7 +6,7 @@ use commands::{init::init_config_files, start::start_node}; use miden_node_block_producer::server::BlockProducer; use miden_node_rpc::server::Rpc; use miden_node_store::server::Store; -use miden_node_utils::{config::load_config, version::LongVersion}; +use miden_node_utils::{config::load_config, logging::OpenTelemetry, version::LongVersion}; mod commands; mod config; @@ -88,9 +88,10 @@ async fn main() -> anyhow::Result<()> { let cli = Cli::parse(); // Open telemetry exporting is only valid for running the node. - let open_telemetry = match &cli.command { - Command::Start { open_telemetry, .. } => *open_telemetry, - _ => false, + let open_telemetry = if let Command::Start { open_telemetry: true, .. } = &cli.command { + OpenTelemetry::Enabled + } else { + OpenTelemetry::Disabled }; miden_node_utils::logging::setup_tracing(open_telemetry)?; diff --git a/crates/test-macro/src/lib.rs b/crates/test-macro/src/lib.rs index e0c0e7db2..8958a4d47 100644 --- a/crates/test-macro/src/lib.rs +++ b/crates/test-macro/src/lib.rs @@ -10,7 +10,9 @@ pub fn enable_logging(_attr: TokenStream, item: TokenStream) -> TokenStream { let stmts = function.block.stmts; let block: Block = parse_quote! {{ if ::std::env::args().any(|e| e == "--nocapture") { - let subscriber = ::tracing::subscriber::set_default(::miden_node_utils::logging::subscriber()); + ::miden_node_utils::logging::setup_tracing( + ::miden_node_utils::logging::OpenTelemetry::Disabled + ).expect("logging setup should succeed"); let span = ::tracing::span!(::tracing::Level::INFO, #name).entered(); #(#stmts)* diff --git a/crates/utils/Cargo.toml b/crates/utils/Cargo.toml index 2d0cc785b..73995dca5 100644 --- a/crates/utils/Cargo.toml +++ b/crates/utils/Cargo.toml @@ -21,19 +21,19 @@ vergen = ["dep:vergen", "dep:vergen-gitcl"] [dependencies] anyhow = { version = "1.0" } figment = { version = "0.10", features = ["env", "toml"] } -http = "1.2" +http = { version = "1.2" } itertools = { workspace = true } miden-objects = { workspace = true } -opentelemetry = { version = "0.27" } -opentelemetry-otlp = { version = "0.27", features = ["tls-roots"] } -opentelemetry_sdk = { version = "0.27", features = ["rt-tokio"] } +opentelemetry = { version = "0.28" } +opentelemetry-otlp = { version = "0.28", default-features = false, features = ["grpc-tonic", "tls-roots", "trace"] } +opentelemetry_sdk = { version = "0.28", features = ["rt-tokio"] } rand = { workspace = true } serde = { version = "1.0", features = ["derive"] } thiserror = { workspace = true } tonic = { workspace = true } tracing = { workspace = true } tracing-forest = { version = "0.1", optional = true, features = ["chrono"] } -tracing-opentelemetry = { version = "0.28" } +tracing-opentelemetry = { version = "0.29" } tracing-subscriber = { workspace = true } # Optional dependencies enabled by `vergen` feature. # This must match the version expected by `vergen-gitcl`. diff --git a/crates/utils/src/logging.rs b/crates/utils/src/logging.rs index 8d933046d..a8a220ccd 100644 --- a/crates/utils/src/logging.rs +++ b/crates/utils/src/logging.rs @@ -1,29 +1,50 @@ +use std::str::FromStr; + use anyhow::Result; use opentelemetry::trace::TracerProvider as _; use opentelemetry_otlp::WithTonicConfig; use opentelemetry_sdk::propagation::TraceContextPropagator; -use tracing::subscriber::{self, Subscriber}; +use tracing::subscriber::Subscriber; use tracing_opentelemetry::OpenTelemetryLayer; -use tracing_subscriber::{layer::SubscriberExt, EnvFilter, Layer, Registry}; +use tracing_subscriber::{ + layer::{Filter, SubscriberExt}, + Layer, Registry, +}; + +/// Configures [`setup_tracing`] to enable or disable the open-telemetry exporter. +#[derive(Clone, Copy)] +pub enum OpenTelemetry { + Enabled, + Disabled, +} -/// Configures tracing and optionally enables an open-telemetry OTLP exporter. +impl OpenTelemetry { + fn is_enabled(self) -> bool { + matches!(self, OpenTelemetry::Enabled) + } +} + +/// Initializes tracing to stdout and optionally an open-telemetry exporter. +/// +/// Trace filtering defaults to `INFO` and can be configured using the conventional `RUST_LOG` +/// environment variable. /// /// The open-telemetry configuration is controlled via environment variables as defined in the /// [specification](https://github.com/open-telemetry/opentelemetry-specification/blob/main/specification/protocol/exporter.md#opentelemetry-protocol-exporter) -pub fn setup_tracing(enable_otel: bool) -> Result<()> { - if enable_otel { +pub fn setup_tracing(otel: OpenTelemetry) -> Result<()> { + if otel.is_enabled() { opentelemetry::global::set_text_map_propagator(TraceContextPropagator::new()); } - let otel_layer = enable_otel.then_some(open_telemetry_layer()); - let subscriber = Registry::default().with(stdout_layer()).with(otel_layer); - tracing::subscriber::set_global_default(subscriber).map_err(Into::into) -} + // Note: open-telemetry requires a tokio-runtime, so this _must_ be lazily evaluated (aka not + // `then_some`) to avoid crashing sync callers (with OpenTelemetry::Disabled set). Examples of + // such callers are tests with logging enabled. + let otel_layer = otel.is_enabled().then(open_telemetry_layer); -pub fn setup_logging() -> Result<()> { - subscriber::set_global_default(subscriber())?; - - Ok(()) + let subscriber = Registry::default() + .with(stdout_layer().with_filter(env_or_default_filter())) + .with(otel_layer.with_filter(env_or_default_filter())); + tracing::subscriber::set_global_default(subscriber).map_err(Into::into) } fn open_telemetry_layer() -> Box + Send + Sync + 'static> @@ -37,8 +58,8 @@ where .build() .unwrap(); - let tracer = opentelemetry_sdk::trace::TracerProvider::builder() - .with_batch_exporter(exporter, opentelemetry_sdk::runtime::Tokio) + let tracer = opentelemetry_sdk::trace::SdkTracerProvider::builder() + .with_batch_exporter(exporter) .build(); let tracer = tracer.tracer("tracing-otel-subscriber"); @@ -61,11 +82,6 @@ where .with_line_number(true) .with_target(true) .with_span_events(FmtSpan::NEW | FmtSpan::CLOSE) - .with_filter(EnvFilter::try_from_default_env().unwrap_or_else(|_| { - // axum logs rejections from built-in extracts on the trace level, so we enable this - // manually. - "info,axum::rejection=trace".into() - })) .boxed() } @@ -75,45 +91,36 @@ where S: Subscriber, for<'a> S: tracing_subscriber::registry::LookupSpan<'a>, { - tracing_forest::ForestLayer::default() - .with_filter(EnvFilter::try_from_default_env().unwrap_or_else(|_| { - // axum logs rejections from built-in extracts on the trace level, so we enable this - // manually. - "info,axum::rejection=trace".into() - })) - .boxed() -} - -#[cfg(not(feature = "tracing-forest"))] -pub fn subscriber() -> impl Subscriber + core::fmt::Debug { - use tracing_subscriber::fmt::format::FmtSpan; - - tracing_subscriber::fmt() - .pretty() - .compact() - .with_level(true) - .with_file(true) - .with_line_number(true) - .with_target(true) - .with_env_filter(EnvFilter::try_from_default_env().unwrap_or_else(|_| { - // axum logs rejections from built-in extracts on the trace level, so we enable this - // manually. - "info,axum::rejection=trace".into() - })) - .with_span_events(FmtSpan::NEW | FmtSpan::CLOSE) - .finish() + tracing_forest::ForestLayer::default().boxed() } -#[cfg(feature = "tracing-forest")] -pub fn subscriber() -> impl Subscriber + core::fmt::Debug { - pub use tracing_forest::ForestLayer; - pub use tracing_subscriber::{layer::SubscriberExt, Registry}; +/// Creates a filter from the `RUST_LOG` env var with a default of `INFO` if unset. +/// +/// # Panics +/// +/// Panics if `RUST_LOG` fails to parse. +fn env_or_default_filter() -> Box + Send + Sync + 'static> { + use tracing::level_filters::LevelFilter; + use tracing_subscriber::{ + filter::{FilterExt, Targets}, + EnvFilter, + }; - Registry::default().with(ForestLayer::default()).with( - EnvFilter::try_from_default_env().unwrap_or_else(|_| { - // axum logs rejections from built-in extracts on the trace level, so we enable this - // manually. - "info,axum::rejection=trace".into() - }), - ) + // `tracing` does not allow differentiating between invalid and missing env var so we manually + // do this instead. The alternative is to silently ignore parsing errors which I think is worse. + match std::env::var(EnvFilter::DEFAULT_ENV) { + Ok(rust_log) => FilterExt::boxed( + EnvFilter::from_str(&rust_log) + .expect("RUST_LOG should contain a valid filter configuration"), + ), + Err(std::env::VarError::NotUnicode(_)) => panic!("RUST_LOG contained non-unicode"), + Err(std::env::VarError::NotPresent) => { + // Default level is INFO, and additionally enable logs from axum extractor rejections. + FilterExt::boxed( + Targets::new() + .with_default(LevelFilter::INFO) + .with_target("axum::rejection", LevelFilter::TRACE), + ) + }, + } } From d57c8dd4df7a6ef62ae79b430427fbf5f407a425 Mon Sep 17 00:00:00 2001 From: Varun Doshi <61531351+varun-doshi@users.noreply.github.com> Date: Tue, 18 Feb 2025 19:37:12 +0530 Subject: [PATCH 15/27] feat(store): added BlockChain wrapper for Mmr (#668) --- crates/store/src/state.rs | 168 ++++++++++++++++++++++++++------------ 1 file changed, 118 insertions(+), 50 deletions(-) diff --git a/crates/store/src/state.rs b/crates/store/src/state.rs index 8b8bcf695..ccc566e98 100644 --- a/crates/store/src/state.rs +++ b/crates/store/src/state.rs @@ -96,17 +96,108 @@ pub struct TransactionInputs { pub found_unauthenticated_notes: BTreeSet, } +/// A [Merkle Mountain Range](Mmr) defining a chain of blocks. +#[derive(Debug, Clone)] +pub struct Blockchain(Mmr); + +impl Blockchain { + /// Returns a new Blockchain. + pub fn new(chain_mmr: Mmr) -> Self { + Self(chain_mmr) + } + + /// Returns the tip of the chain, i.e. the number of the latest block in the chain. + pub fn chain_tip(&self) -> BlockNumber { + let block_number: u32 = (self.0.forest() - 1) + .try_into() + .expect("chain_mmr always has, at least, the genesis block"); + + block_number.into() + } + + /// Returns the chain length. + pub fn chain_length(&self) -> BlockNumber { + self.chain_tip().child() + } + + /// Returns the current peaks of the MMR. + pub fn peaks(&self) -> MmrPeaks { + self.0.peaks() + } + + /// Returns the peaks of the MMR at the state specified by `forest`. + /// + /// # Errors + /// + /// Returns an error if the specified `forest` value is not valid for this MMR. + pub fn peaks_at(&self, forest: usize) -> Result { + self.0.peaks_at(forest) + } + + /// Adds a block commitment to the MMR. The caller must ensure that this commitent is the one + /// for the next block in the chain. + pub fn push(&mut self, block_commitment: RpoDigest) { + self.0.add(block_commitment); + } + + /// Returns an [`MmrProof`] for the leaf at the specified position. + pub fn open(&self, pos: usize) -> Result { + self.0.open_at(pos, self.0.forest()) + } + + /// Returns a reference to the underlying [`Mmr`]. + pub fn as_mmr(&self) -> &Mmr { + &self.0 + } + + /// Returns the latest block number and partial mmr. + pub fn partial_mmr_from_blocks( + &self, + blocks: &BTreeSet, + latest_block_number: BlockNumber, + ) -> Result { + // Using latest block as the target forest means we take the state of the MMR one before + // the latest block. This is because the latest block will be used as the reference + // block of the batch and will be added to the MMR by the batch kernel. + let target_forest = latest_block_number.as_usize(); + let peaks = self + .peaks_at(target_forest) + .expect("target_forest should be smaller than forest of the chain mmr"); + // Grab the block merkle paths from the inner state. + let mut partial_mmr = PartialMmr::from_peaks(peaks); + + for block_num in blocks.iter().map(BlockNumber::as_usize) { + // SAFETY: We have ensured block nums are less than chain length. + let leaf = self + .0 + .get(block_num) + .expect("block num less than chain length should exist in chain mmr"); + let path = self + .0 + .open_at(block_num, target_forest) + .expect("block num and target forest should be valid for this mmr") + .merkle_path; + // SAFETY: We should be able to fill the partial MMR with data from the chain MMR + // without errors, otherwise it indicates the chain mmr is invalid. + partial_mmr + .track(block_num, leaf, &path) + .expect("filling partial mmr with data from mmr should succeed"); + } + Ok(partial_mmr) + } +} + /// Container for state that needs to be updated atomically. struct InnerState { nullifier_tree: NullifierTree, - chain_mmr: Mmr, + blockchain: Blockchain, account_tree: SimpleSmt, } impl InnerState { /// Returns the latest block number. fn latest_block_num(&self) -> BlockNumber { - let block_number: u32 = (self.chain_mmr.forest() - 1) + let block_number: u32 = (self.blockchain.chain_tip().as_usize() - 1) .try_into() .expect("chain_mmr always has, at least, the genesis block"); @@ -144,7 +235,11 @@ impl State { let chain_mmr = load_mmr(&mut db).await?; let account_tree = load_accounts(&mut db).await?; - let inner = RwLock::new(InnerState { nullifier_tree, chain_mmr, account_tree }); + let inner = RwLock::new(InnerState { + nullifier_tree, + blockchain: Blockchain::new(chain_mmr), + account_tree, + }); let writer = Mutex::new(()); let db = Arc::new(db); @@ -245,7 +340,7 @@ impl State { // compute updates for the in-memory data structures // new_block.chain_root must be equal to the chain MMR root prior to the update - let peaks = inner.chain_mmr.peaks(); + let peaks = inner.blockchain.peaks(); if peaks.hash_peaks() != header.chain_root() { return Err(InvalidBlockError::NewBlockInvalidChainRoot.into()); } @@ -374,7 +469,7 @@ impl State { .account_tree .apply_mutations(account_tree_update) .expect("Unreachable: old account tree root must be checked before this step"); - inner.chain_mmr.add(block_hash); + inner.blockchain.push(block_hash); } info!(%block_hash, block_num = block_num.as_u32(), COMPONENT, "apply_block successful"); @@ -396,7 +491,7 @@ impl State { if let Some(header) = block_header { let mmr_proof = if include_mmr_proof { let inner = self.inner.read().await; - let mmr_proof = inner.chain_mmr.open(header.block_num().as_usize())?; + let mmr_proof = inner.blockchain.open(header.block_num().as_usize())?; Some(mmr_proof) } else { None @@ -460,12 +555,12 @@ impl State { // dropping the guard. let (chain_length, merkle_paths) = { let state = self.inner.read().await; - let chain_length = state.chain_mmr.forest(); + let chain_length = state.blockchain.chain_length().as_usize(); let paths = blocks .iter() .map(|&block_num| { - let proof = state.chain_mmr.open(block_num.as_usize())?.merkle_path; + let proof = state.blockchain.open(block_num.as_usize())?.merkle_path; Ok::<_, MmrError>((block_num, proof)) }) @@ -540,15 +635,12 @@ impl State { let mut blocks = tx_reference_blocks; blocks.extend(note_blocks); - // Grab the block merkle paths from the inner state. - // - // NOTE: Scoped block to automatically drop the mutex guard asap. - // - // We also avoid accessing the db in the block as this would delay - // dropping the guard. + // Scoped block to automatically drop the read lock guard as soon as we're done. + // We also avoid accessing the db in the block as this would delay dropping the guard. let (batch_reference_block, partial_mmr) = { - let state = self.inner.read().await; - let latest_block_num = state.latest_block_num(); + let inner_state = self.inner.blocking_read(); + + let latest_block_num = inner_state.blockchain.chain_tip(); let highest_block_num = *blocks.last().expect("we should have checked for empty block references"); @@ -564,35 +656,10 @@ impl State { // there is no need to prove its inclusion. blocks.remove(&latest_block_num); - // Using latest block as the target forest means we take the state of the MMR one before - // the latest block. This is because the latest block will be used as the reference - // block of the batch and will be added to the MMR by the batch kernel. - let target_forest = latest_block_num.as_usize(); - let peaks = state - .chain_mmr - .peaks_at(target_forest) - .expect("target_forest should be smaller than forest of the chain mmr"); - let mut partial_mmr = PartialMmr::from_peaks(peaks); - - for block_num in blocks.iter().map(BlockNumber::as_usize) { - // SAFETY: We have ensured block nums are less than chain length. - let leaf = state - .chain_mmr - .get(block_num) - .expect("block num less than chain length should exist in chain mmr"); - let path = state - .chain_mmr - .open_at(block_num, target_forest) - .expect("block num and target forest should be valid for this mmr") - .merkle_path; - // SAFETY: We should be able to fill the partial MMR with data from the chain MMR - // without errors, otherwise it indicates the chain mmr is invalid. - partial_mmr - .track(block_num, leaf, &path) - .expect("filling partial mmr with data from mmr should succeed"); - } - - (latest_block_num, partial_mmr) + ( + latest_block_num, + inner_state.blockchain.partial_mmr_from_blocks(&blocks, latest_block_num)?, + ) }; // Fetch the reference block of the batch as part of this query, so we can avoid looking it @@ -679,7 +746,8 @@ impl State { let from_forest = (block_num + 1).as_usize(); let to_forest = state_sync.block_header.block_num().as_usize(); inner - .chain_mmr + .blockchain + .as_mmr() .get_delta(from_forest, to_forest) .map_err(StateSyncError::FailedToBuildMmrDelta)? }; @@ -708,7 +776,7 @@ impl State { let note_sync = self.db.get_note_sync(block_num, note_tags).await?; - let mmr_proof = inner.chain_mmr.open(note_sync.block_header.block_num().as_usize())?; + let mmr_proof = inner.blockchain.open(note_sync.block_header.block_num().as_usize())?; Ok((note_sync, mmr_proof)) } @@ -729,9 +797,9 @@ impl State { .ok_or(GetBlockInputsError::DbBlockHeaderEmpty)?; // sanity check - if inner.chain_mmr.forest() != latest.block_num().as_usize() + 1 { + if inner.blockchain.chain_tip() != latest.block_num() { return Err(GetBlockInputsError::IncorrectChainMmrForestNumber { - forest: inner.chain_mmr.forest(), + forest: inner.blockchain.chain_tip().as_usize(), block_num: latest.block_num(), }); } @@ -739,7 +807,7 @@ impl State { // using current block number gets us the peaks of the chain MMR as of one block ago; // this is done so that latest.chain_root matches the returned peaks let chain_peaks = - inner.chain_mmr.peaks_at(latest.block_num().as_usize()).map_err(|error| { + inner.blockchain.peaks_at(latest.block_num().as_usize()).map_err(|error| { GetBlockInputsError::FailedToGetMmrPeaksForForest { forest: latest.block_num().as_usize(), error, From 22f53bdd3f559973f972a1137dc8da93b18593ac Mon Sep 17 00:00:00 2001 From: Serge Radinovich <47865535+sergerad@users.noreply.github.com> Date: Wed, 19 Feb 2025 18:49:09 +1300 Subject: [PATCH 16/27] feat: OpenTelemetrySpanExt trait (#700) --- .../block-producer/src/block_builder/mod.rs | 32 +++---- crates/utils/src/tracing/mod.rs | 6 +- crates/utils/src/tracing/span_ext.rs | 84 +++++++++++++++++++ 3 files changed, 102 insertions(+), 20 deletions(-) create mode 100644 crates/utils/src/tracing/span_ext.rs diff --git a/crates/block-producer/src/block_builder/mod.rs b/crates/block-producer/src/block_builder/mod.rs index 1e7b24294..bbdc23c5a 100644 --- a/crates/block-producer/src/block_builder/mod.rs +++ b/crates/block-producer/src/block_builder/mod.rs @@ -4,7 +4,7 @@ use std::{ }; use futures::FutureExt; -use miden_node_utils::tracing::{OpenTelemetrySpanExt, OtelStatus}; +use miden_node_utils::tracing::OpenTelemetrySpanExt; use miden_objects::{ account::AccountId, batch::ProvenBatch, @@ -106,7 +106,7 @@ impl BlockBuilder { .and_then(|proven_block| async { self.inject_failure(proven_block) }) .and_then(|proven_block| self.commit_block(mempool, proven_block)) // Handle errors by propagating the error to the root span and rolling back the block. - .inspect_err(|err| Span::current().set_status(OtelStatus::Error { description: format!("{err:?}").into() })) + .inspect_err(|err| Span::current().set_error(err)) .or_else(|_err| self.rollback_block(mempool).never_error()) // Error has been handled, this is just type manipulation to remove the result wrapper. .unwrap_or_else(|_| ()) @@ -197,9 +197,9 @@ impl BlockBuilder { async fn simulate_proving(&self) { let proving_duration = rand::thread_rng().gen_range(self.simulated_proof_time.clone()); - Span::current().set_attribute("range.min_s", self.simulated_proof_time.start.as_secs_f64()); - Span::current().set_attribute("range.max_s", self.simulated_proof_time.end.as_secs_f64()); - Span::current().set_attribute("dice_roll_s", proving_duration.as_secs_f64()); + Span::current().set_attribute("range.min_s", self.simulated_proof_time.start); + Span::current().set_attribute("range.max_s", self.simulated_proof_time.end); + Span::current().set_attribute("dice_roll_s", proving_duration); tokio::time::sleep(proving_duration).await; } @@ -282,8 +282,8 @@ struct ProvenBlock { impl SelectedBlock { fn inject_telemetry(&self) { let span = Span::current(); - span.set_attribute("block.number", i64::from(self.block_number.as_u32())); - span.set_attribute("block.batches.count", i64::from(self.batches.len() as u32)); + span.set_attribute("block.number", self.block_number); + span.set_attribute("block.batches.count", self.batches.len() as u32); } } @@ -319,17 +319,17 @@ impl ProvenBlock { let span = Span::current(); let header = self.block.header(); - span.set_attribute("block.hash", header.hash().to_hex()); - span.set_attribute("block.sub_hash", header.sub_hash().to_hex()); - span.set_attribute("block.parent_hash", header.prev_hash().to_hex()); + span.set_attribute("block.hash", header.hash()); + span.set_attribute("block.sub_hash", header.sub_hash()); + span.set_attribute("block.parent_hash", header.prev_hash()); span.set_attribute("block.protocol.version", i64::from(header.version())); - span.set_attribute("block.commitments.kernel", header.kernel_root().to_hex()); - span.set_attribute("block.commitments.nullifier", header.nullifier_root().to_hex()); - span.set_attribute("block.commitments.account", header.account_root().to_hex()); - span.set_attribute("block.commitments.chain", header.chain_root().to_hex()); - span.set_attribute("block.commitments.note", header.note_root().to_hex()); - span.set_attribute("block.commitments.transaction", header.tx_hash().to_hex()); + span.set_attribute("block.commitments.kernel", header.kernel_root()); + span.set_attribute("block.commitments.nullifier", header.nullifier_root()); + span.set_attribute("block.commitments.account", header.account_root()); + span.set_attribute("block.commitments.chain", header.chain_root()); + span.set_attribute("block.commitments.note", header.note_root()); + span.set_attribute("block.commitments.transaction", header.tx_hash()); } } diff --git a/crates/utils/src/tracing/mod.rs b/crates/utils/src/tracing/mod.rs index 292be68be..2a01208df 100644 --- a/crates/utils/src/tracing/mod.rs +++ b/crates/utils/src/tracing/mod.rs @@ -1,6 +1,4 @@ pub mod grpc; +mod span_ext; -// Re-export useful traits for open-telemetry traces. This avoids requiring other crates from -// importing that family of crates directly. -pub use opentelemetry::trace::Status as OtelStatus; -pub use tracing_opentelemetry::OpenTelemetrySpanExt; +pub use span_ext::{OpenTelemetrySpanExt, ToValue}; diff --git a/crates/utils/src/tracing/span_ext.rs b/crates/utils/src/tracing/span_ext.rs new file mode 100644 index 000000000..a21150ac0 --- /dev/null +++ b/crates/utils/src/tracing/span_ext.rs @@ -0,0 +1,84 @@ +use core::time::Duration; + +use miden_objects::{block::BlockNumber, Digest}; +use opentelemetry::{trace::Status, Key, Value}; + +/// Utility functions for converting types into [`opentelemetry::Value`]. +pub trait ToValue { + fn to_value(&self) -> Value; +} + +impl ToValue for Duration { + fn to_value(&self) -> Value { + self.as_secs_f64().into() + } +} + +impl ToValue for Digest { + fn to_value(&self) -> Value { + self.to_hex().into() + } +} + +impl ToValue for f64 { + fn to_value(&self) -> Value { + (*self).into() + } +} + +impl ToValue for BlockNumber { + fn to_value(&self) -> Value { + i64::from(self.as_u32()).into() + } +} + +impl ToValue for u32 { + fn to_value(&self) -> Value { + i64::from(*self).into() + } +} + +impl ToValue for i64 { + fn to_value(&self) -> Value { + (*self).into() + } +} + +/// Utility functions based on [`tracing_opentelemetry::OpenTelemetrySpanExt`]. +/// +/// This is a sealed trait. It and cannot be implemented outside of this module. +pub trait OpenTelemetrySpanExt: private::Sealed { + fn set_attribute(&self, key: impl Into, value: impl ToValue); + fn set_error(&self, err: &dyn std::error::Error); +} + +impl OpenTelemetrySpanExt for S +where + S: tracing_opentelemetry::OpenTelemetrySpanExt, +{ + /// Sets an attribute on `Span`. + /// + /// Implementations for `ToValue` should be added to this crate (miden-node-utils). + fn set_attribute(&self, key: impl Into, value: impl ToValue) { + tracing_opentelemetry::OpenTelemetrySpanExt::set_attribute(self, key, value.to_value()); + } + + /// Sets a status on `Span` based on an error. + fn set_error(&self, err: &dyn std::error::Error) { + // Coalesce all sources into one string. + let mut description = format!("{err}"); + let current = err; + while let Some(cause) = current.source() { + description.push_str(format!("\nCaused by: {cause}").as_str()); + } + tracing_opentelemetry::OpenTelemetrySpanExt::set_status( + self, + Status::Error { description: description.into() }, + ); + } +} + +mod private { + pub trait Sealed {} + impl Sealed for S where S: tracing_opentelemetry::OpenTelemetrySpanExt {} +} From fea7a5947fdd543c83f2fe5d4c468638fa97a131 Mon Sep 17 00:00:00 2001 From: Mirko <48352201+Mirko-von-Leipzig@users.noreply.github.com> Date: Wed, 19 Feb 2025 11:30:12 +0200 Subject: [PATCH 17/27] fix(store): blocking in async fn get_batch_inputs (#705) --- crates/store/src/state.rs | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/crates/store/src/state.rs b/crates/store/src/state.rs index ccc566e98..b08348e3a 100644 --- a/crates/store/src/state.rs +++ b/crates/store/src/state.rs @@ -197,11 +197,7 @@ struct InnerState { impl InnerState { /// Returns the latest block number. fn latest_block_num(&self) -> BlockNumber { - let block_number: u32 = (self.blockchain.chain_tip().as_usize() - 1) - .try_into() - .expect("chain_mmr always has, at least, the genesis block"); - - block_number.into() + self.blockchain.chain_tip() } } @@ -638,7 +634,7 @@ impl State { // Scoped block to automatically drop the read lock guard as soon as we're done. // We also avoid accessing the db in the block as this would delay dropping the guard. let (batch_reference_block, partial_mmr) = { - let inner_state = self.inner.blocking_read(); + let inner_state = self.inner.read().await; let latest_block_num = inner_state.blockchain.chain_tip(); From cf87d340f394d03bf376abff06a4b36149d63b24 Mon Sep 17 00:00:00 2001 From: Mirko <48352201+Mirko-von-Leipzig@users.noreply.github.com> Date: Thu, 20 Feb 2025 09:07:02 +0200 Subject: [PATCH 18/27] docs: monitoring guide (#699) --- .editorconfig | 23 ------ .github/workflows/doc.yml | 28 ------- .pre-commit-config.yaml | 34 -------- CHANGELOG.md | 1 + Makefile | 5 -- README.md | 97 +++++++++++++++------- docs/dev/_start_here.md | 16 ++++ docs/dev/monitoring.md | 99 ++++++++++++++++++++++ docs/index.md | 17 ---- docs/operator.md | 151 ++++++++++++++++++++++++++++++++++ mkdocs.yml | 4 - scripts/docs_requirements.txt | 4 - scripts/serve-doc-site.sh | 8 -- 13 files changed, 332 insertions(+), 155 deletions(-) delete mode 100644 .editorconfig delete mode 100644 .github/workflows/doc.yml delete mode 100644 .pre-commit-config.yaml create mode 100644 docs/dev/_start_here.md create mode 100644 docs/dev/monitoring.md delete mode 100644 docs/index.md create mode 100644 docs/operator.md delete mode 100644 mkdocs.yml delete mode 100755 scripts/docs_requirements.txt delete mode 100755 scripts/serve-doc-site.sh diff --git a/.editorconfig b/.editorconfig deleted file mode 100644 index 435c9cb63..000000000 --- a/.editorconfig +++ /dev/null @@ -1,23 +0,0 @@ -# Documentation available at editorconfig.org - -root=true - -[*] -ident_style = space -ident_size = 4 -end_of_line = lf -charset = utf-8 -trim_trailing_whitespace = true -insert_final_newline = true - -[*.rs] -max_line_length = 100 - -[*.proto] -max_line_length = 100 - -[*.md] -trim_trailing_whitespace = false - -[*.yml] -ident_size = 2 diff --git a/.github/workflows/doc.yml b/.github/workflows/doc.yml deleted file mode 100644 index 772a2d85e..000000000 --- a/.github/workflows/doc.yml +++ /dev/null @@ -1,28 +0,0 @@ -# Runs documentation related jobs. - -name: doc - -on: - push: - branches: [main, next] - pull_request: - types: [opened, reopened, synchronize] - -# Limits workflow concurrency to only the latest commit in the PR. -concurrency: - group: "${{ github.workflow }} @ ${{ github.event.pull_request.head.label || github.head_ref || github.ref }}" - cancel-in-progress: true - -jobs: - doc: - name: doc stable on ubuntu-latest - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@main - - name: Rustup - run: rustup update --no-self-update - - uses: Swatinem/rust-cache@v2 - with: - save-if: ${{ github.event_name == 'push' && github.ref == 'refs/heads/next' }} - - name: Build docs - run: make doc diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml deleted file mode 100644 index 4b1551577..000000000 --- a/.pre-commit-config.yaml +++ /dev/null @@ -1,34 +0,0 @@ -# See https://pre-commit.com for more information -# See https://pre-commit.com/hooks.html for more hooks -repos: - - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.6.0 - hooks: - - id: trailing-whitespace - - id: end-of-file-fixer - - id: check-yaml - - id: check-json - - id: check-toml - - id: pretty-format-json - - id: check-added-large-files - - id: check-case-conflict - - id: check-executables-have-shebangs - - id: check-merge-conflict - - id: detect-private-key - - repo: local - hooks: - - id: lint - name: Make lint - stages: [commit] - language: rust - entry: make lint - - id: doc - name: Make doc - stages: [commit] - language: rust - entry: make doc - - id: check - name: Make check - stages: [commit] - language: rust - entry: make check \ No newline at end of file diff --git a/CHANGELOG.md b/CHANGELOG.md index 8a3e87b2a..87d5ae0a8 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -11,6 +11,7 @@ - Add an optional open-telemetry trace exporter (#659, #690). - Support tracing across gRPC boundaries using remote tracing context (#669). - Instrument the block-producer's block building process (#676). +- Initial developer and operator guides covering monitoring (#699). ### Changes diff --git a/Makefile b/Makefile index 5a6d691b0..8514edf2f 100644 --- a/Makefile +++ b/Makefile @@ -55,11 +55,6 @@ lint: format fix clippy toml workspace-check ## Runs all linting tasks at once ( doc: ## Generates & checks documentation $(WARNINGS) cargo doc --all-features --keep-going --release --locked - -.PHONY: doc-serve -doc-serve: ## Serves documentation site - ./scripts/serve-doc-site.sh - # --- testing ------------------------------------------------------------------------------------- .PHONY: test diff --git a/README.md b/README.md index 390488218..251238a09 100644 --- a/README.md +++ b/README.md @@ -5,25 +5,33 @@ [![RUST_VERSION](https://img.shields.io/badge/rustc-1.84+-lightgray.svg)](https://www.rust-lang.org/tools/install) [![crates.io](https://img.shields.io/crates/v/miden-node)](https://crates.io/crates/miden-node) -This repository holds the Miden node; that is, the software which processes transactions and creates blocks for the Miden rollup. +This repository holds the Miden node; that is, the software which processes transactions and creates blocks for the +Miden rollup. -### Status - -The Miden node is still under heavy development and the project can be considered to be in an _alpha_ stage. Many features are yet to be implemented and there are a number of limitations which we will lift in the near future. - -At this point, we are developing the Miden node for a centralized operator. As such, the work does not yet include components such as P2P networking and consensus. These will be added in the future. +> [!NOTE] +> The Miden node is still under heavy development and the project can be considered to be in an _alpha_ stage. +> Many features are yet to be implemented and there are a number of limitations which we will lift in the near future. +> +> At this point, we are developing the Miden node for a centralized operator. As such, the work does not yet include +> components such as P2P networking and consensus. These will be added in the future. ## Architecture The Miden node consists of 3 main components, which communicate using gRPC: -- **[RPC](crates/rpc):** an externally-facing component through which clients can interact with the node. It receives client requests (e.g., to synchronize with the latest state of the chain, or to submit transactions), performs basic validation, and forwards the requests to the appropriate internal components. -- **[Store](crates/store):** maintains the state of the chain. It serves as the "source of truth" for the chain - i.e., if it is not in the store, the node does not consider it to be part of the chain. -- **[Block Producer](crates/block-producer):** accepts transactions from the RPC component, creates blocks containing those transactions, and sends them to the store. +- **[RPC](crates/rpc):** an externally-facing component through which clients can interact with the node. It receives + client requests (e.g., to synchronize with the latest state of the chain, or to submit transactions), performs basic + validation, and forwards the requests to the appropriate internal components. +- **[Store](crates/store):** maintains the state of the chain. It serves as the "source of truth" for the chain - i.e., + if it is not in the store, the node does not consider it to be part of the chain. +- **[Block Producer](crates/block-producer):** accepts transactions from the RPC component, creates blocks containing + those transactions, and sends them to the store. -All 3 components can either run as one process, or each component can run in its own process. See the [Running the node](#running-the-node) section for more details. +All 3 components can either run as one process, or each component can run in its own process. See the +[Running the node](#running-the-node) section for more details. -The diagram below illustrates high-level design of each component as well as basic interactions between them (components in light-grey are yet to be built). +The diagram below illustrates high-level design of each component as well as basic interactions between them (components +in light-grey are yet to be built). ![Architecture diagram](./assets/architecture.png) @@ -31,15 +39,19 @@ The diagram below illustrates high-level design of each component as well as bas The node software can be installed as a Debian package or using Rust's package manager `cargo`. -Official releases are available as debian packages which can be found under our [releases](https://github.com/0xPolygonMiden/miden-node/releases) page. +Official releases are available as debian packages which can be found under our +[releases](https://github.com/0xPolygonMiden/miden-node/releases) page. -Alternatively, the Rust package manager `cargo` can be used to install on non-debian distributions or to compile from source. +Alternatively, the Rust package manager `cargo` can be used to install on non-debian distributions or to compile from +source. ### Debian package -Debian packages are available and are the fastest way to install the node on a Debian-based system. Currently only `amd64` architecture are supported. +Debian packages are available and are the fastest way to install the node on a Debian-based system. Both `amd64` and +`arm64` packages are available. -These packages can be found under our [releases](https://github.com/0xPolygonMiden/miden-node/releases) page along with a checksum. +These packages can be found under our [releases](https://github.com/0xPolygonMiden/miden-node/releases) page along with +a checksum. Note that this includes a `systemd` service called `miden-node` (disabled by default). @@ -50,17 +62,22 @@ sudo dpkg -i $package_name.deb ``` > [!TIP] -> You should verify the checksum using a SHA256 utility. This differs from platform to platform, but on most linux distros: +> You should verify the checksum using a SHA256 utility. This differs from platform to platform, but on most linux +> distros: +> > ```sh > sha256sum --check $checksum_file.deb.checksum > ``` +> > can be used so long as the checksum file and the package file are in the same folder. ### Install using `cargo` -Install Rust version **1.84** or greater using the official Rust installation [instructions](https://www.rust-lang.org/tools/install). +Install Rust version **1.84** or greater using the official Rust installation +[instructions](https://www.rust-lang.org/tools/install). -Depending on the platform, you may need to install additional libraries. For example, on Ubuntu 22.04 the following command ensures that all required libraries are installed. +Depending on the platform, you may need to install additional libraries. For example, on Ubuntu 22.04 the following +command ensures that all required libraries are installed. ```sh sudo apt install llvm clang bindgen pkg-config libssl-dev libsqlite3-dev @@ -78,7 +95,9 @@ This will install the latest official version of the node. You can install a spe cargo install miden-node --locked --version x.y.z ``` -You can also use `cargo` to compile the node from the source code if for some reason you need a specific git revision. Note that since these aren't official releases we cannot provide much support for any issues you run into, so consider this for advanced users only. The incantation is a little different as you'll be targeting this repo instead: +You can also use `cargo` to compile the node from the source code if for some reason you need a specific git revision. +Note that since these aren't official releases we cannot provide much support for any issues you run into, so consider +this for advanced users only. The incantation is a little different as you'll be targeting this repo instead: ```sh # Install from a specific branch @@ -91,7 +110,8 @@ cargo install --locked --git https://github.com/0xPolygonMiden/miden-node miden- cargo install --locked --git https://github.com/0xPolygonMiden/miden-node miden-node --rev ``` -More information on the various options can be found [here](https://doc.rust-lang.org/cargo/commands/cargo-install.html#install-options). +More information on the various options can be found +[here](https://doc.rust-lang.org/cargo/commands/cargo-install.html#install-options). ### Verify installation @@ -105,17 +125,23 @@ miden-node --version ### Setup -Decide on a location to store all the node data and configuration files in. This guide will use the placeholder `` and `` to represent these directories. They are allowed to be the same, though most unix distributions have conventions for these being `/opt/miden` and `/etc/miden` respectively. Note that if you intend to use the `systemd` service then by default it expects these conventions to be upheld. +Decide on a location to store all the node data and configuration files in. This guide will use the placeholder +`` and `` to represent these directories. They are allowed to be the same, though most unix +distributions have conventions for these being `/opt/miden` and `/etc/miden` respectively. Note that if you intend to +use the `systemd` service then by default it expects these conventions to be upheld. -We need to configure the node as well as bootstrap the chain by creating the genesis block. Generate the default configurations for both: +We need to configure the node as well as bootstrap the chain by creating the genesis block. Generate the default +configurations for both: ```sh miden-node init \ --config-path /miden-node.toml \ - --genesis-path /genesis.toml + --genesis-path /genesis.toml ``` -which will generate `miden-node.toml` and `genesis.toml` files. The latter controls the accounts that the genesis block will be spawned with and by default includes a basic wallet account and a basic fungible faucet account. You can modify this file to add/remove accounts as desired. +which will generate `miden-node.toml` and `genesis.toml` files. The latter controls the accounts that the genesis block +will be spawned with and by default includes a basic fungible faucet account. You can modify this file to add/remove +accounts as desired. Next, bootstrap the chain by generating the genesis data: @@ -127,9 +153,6 @@ miden-node make-genesis \ which will create `genesis.dat` and an `accounts` directory containing account data based on the `genesis.toml` file. -> [!NOTE] -> `make-genesis` will take a long time if you're running the production version of `miden-node`, see the tip in the [installation](#install-using-`cargo`) section. - Modify the `miden-node.toml` configuration file such that the `[store]` paths point to our `` folder: ```toml @@ -143,7 +166,8 @@ Finally, configure the node's endpoints to your liking. ### Systemd -An example service file is provided [here](packaging/miden-node.service). If you used the Debian package installer then this service was already installed alongside it. +An example service file is provided [here](packaging/miden-node.service). If you used the Debian package installer then +this service was already installed alongside it. ### Running the node @@ -161,11 +185,19 @@ or alternatively start the systemd service if that's how you wish to operate: systemctl start miden-node.service ``` +### Monitoring and telemetry + +Please see our operator documentation [here](docs/operator.md). + ## Updating -We currently make no guarantees about backwards compatibility. Updating the node software therefore consists of wiping all existing data and re-installing the node's software again. This includes regenerating the configuration files and genesis block as these formats may have changed. This effectively means every update is a complete reset of the blockchain. +We currently make no guarantees about backwards compatibility. Updating the node software therefore consists of wiping +all existing data and re-installing the node's software again. This includes regenerating the configuration files and +genesis block as these formats may have changed. This effectively means every update is a complete reset of the +blockchain. -First stop the currently running node or systemd service then remove all existing data. If you followed the [Setup](#setup) section, then this can be achieved by deleting all information in ``: +First stop the currently running node or systemd service then remove all existing data. If you followed the +[Setup](#setup) section, then this can be achieved by deleting all information in ``: ```sh rm -rf @@ -176,11 +208,12 @@ rm -rf ## Development -See our [contributing](CONTRIBUTING.md) guidelines and our [makefile](Makefile) for example workflows e.g. run the testsuite using +See our [contributing](CONTRIBUTING.md) guidelines and our [makefile](Makefile) for example workflows e.g. run the +testsuite using ```sh make test -``` +``` ## License diff --git a/docs/dev/_start_here.md b/docs/dev/_start_here.md new file mode 100644 index 000000000..2482fb9ad --- /dev/null +++ b/docs/dev/_start_here.md @@ -0,0 +1,16 @@ +Welcome to the developer guide for the `miden` node :) + +> [!TIP] +> This is intended to serve as a basic introduction to the codebase as well as covering relevant concepts and +> recording architectural decisions. +> +> This is _not_ intended for dApp developers or users of the node, but for development on this codebase itself. + +> [!CAUTION] +> Living documents go stale - the code is the final arbitrator of truth. + +It is also a good idea to familiarise yourself with the [operator manual](../operator.md). + +| Chapter | Overview | +| ------------------------------------- | ---------------------------------------------- | +| [monitoring](/docs/dev/monitoring.md) | All things relating to tracing and monitoring. | diff --git a/docs/dev/monitoring.md b/docs/dev/monitoring.md new file mode 100644 index 000000000..4c682c3cf --- /dev/null +++ b/docs/dev/monitoring.md @@ -0,0 +1,99 @@ +# Monitoring + +Developer level overview of how we aim to use `tracing` and `open-telemetry` to provide monitoring and telemetry for the +node. + +Please begin by reading through the [monitoring operator guide](/docs/operator.md#monitoring-telemetry) as this will +provide some much needed context. + +## Approach and philosophy + +We want to trace important information such that we can quickly recognise issues (monitoring & alerting) and identify +the cause. Conventionally this has been achieved via metrics and logs respectively, however a more modern approach is +using wide-events/traces and post-processing these instead. We're using the OpenTelemetry standard for this, however we +are only using the trace pillar and avoid metrics and logs. + +We wish to emit these traces without compromising on code quality nor readibility. This is also a downside to including +metrics - these are usually emitted inline with the code, causing noise and obscuring the business logic. Ideally we +want to rely almost entirely on `tracing::#[instrument]` to create spans as these live outide the function body. + +There are of course exceptions to the rule - usually the root span itself is created manually e.g. a new root span for +each block building iteration. Inner spans should ideally keep to `#[instrument]` where possible. + +## Relevant crates + +We've attempted to lock most of the OpenTelemetry crates behind our own abstractions in the `utils` crate. There are a +lot of these crates and it can be difficult to keep them all separate when writing new code. We also hope this will +provide a more consistent result as we build out our monitoring. + +`tracing` is the defacto standard for logging and tracing within the Rust ecosystem. OpenTelemetry has decided to avoid +fracturing the ecosystem and instead attempts to bridge between `tracing` and the OpenTelemetry standard in-so-far as is +possible. All this to say that there are some rough edges where the two combine - this should improve over time. + +| crate | description | +| ------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------ | +| `tracing` | Emits tracing spans and events. | +| `tracing-subscriber` | Provides the conventional `tracing` stdout logger (no interaction with OpenTelemetry). | +| `tracing-forest` | Logs span trees to stdout. Useful to visualize span relations, but cannot trace across RPC boundaries as it doesn't understand remote tracing context. | +| `tracing-opentelemetry` | Bridges the gaps between `tracing` and the OpenTelemetry standard. | +| `opentelemetry` | Defines core types and concepts for OpenTelemetry. | +| `opentelemetry-otlp` | gRPC exporter for OpenTelemetry traces. | +| `opentelemetry_sdk` | Provides the OpenTelemetry abstractions for metrics, logs and traces. | +| `opentelemetry-semantic-conventions` | Constants for naming conventions as per OpenTelemetry standard. | + +## Important concepts + +### OpenTelemetry standards & documentation + +https://opentelemetry.io/docs/ + +There is a lot. You don't need all of it - look things up as and when you stumble into confusion. + +It is probably worth reading through the naming conventions to get a sense of style. + +### Footguns and common issues + +`tracing` requires data to be known statically e.g. you cannot add span attributes dynamically. `tracing-opentelemetry` +provides a span extention trait which works around this limitation - however this dynamic information is _only_ visible +to the OpenTelemetry processing i.e. `tracing_subscriber` won't see this at all. + +In general, you'll find that `tracing` subscribers are blind to any extensions or OpenTelemetry specific concepts. The +reverse is of course not true because OpenTelemetry is integrating with `tracing`. + +Another pain point is error stacks - or rather lack thereof. `#[tracing::instrument(err)]` correctly marks the span as +an error, however unfortunately the macro only uses the `Display` or `Debug` implementation of the error. This means you +are missing the error reports entirely. `tracing_opentelemetry` reuses the stringified error data provided by `tracing` +so currently there is no work-around for this. Using `Debug` via `?err` at least shows some information but one still +misses the actual error messages which is quite bad. + +Manually instrumenting code (i.e. without `#[instrument]`) can be rather error prone because async calls must be +manually instrumented each time. And non-async code also requires holding the span. + +### Distributed context + +We track traces across our components by injecting the parent span ID into the gRPC client's request. The server side +then extracts this and uses this as the parent span ID for its processing. + +> [!CAUTION] +> This is an OpenTelemetry concept - conventional `tracing` cannot follow these relations. + +Read more in the official OpenTelemetry [documentation](https://opentelemetry.io/docs/concepts/context-propagation/). + +### Choosing spans + +A root span should represent a set of operations that belong together. It also shouldn't live forever i.e. a root span +around the entire node makes no sense as the operation runs forever. + +A good convention to follow is creating child spans for timing information you may want when debugging a failure or slow +operation. As an example, it may make sense to instrument a mutex locking function to visualize the contention on it. Or +separating the database file IO from the sqlite statement creation. Essentially operations which you would otherwise +consider logging the timings for should be separate spans. While you may find this changes the code you might otherwise +create, We've found this actually results in fairly good structure since it follows your business logic sense. + +### Inclusions and naming conventions + +Where possible, attempt to find and use the naming conventions specified by the standard, ideally via the +`opentelemetry-semantic-conventions` crate. + +Include information you'd want to see when debugging - make life easy for your future self looking at data at 3AM on a +Saturday. Also consider what information may be useful when correlating data e.g. sender IP. diff --git a/docs/index.md b/docs/index.md deleted file mode 100644 index 000ea3455..000000000 --- a/docs/index.md +++ /dev/null @@ -1,17 +0,0 @@ -# Welcome to MkDocs - -For full documentation visit [mkdocs.org](https://www.mkdocs.org). - -## Commands - -* `mkdocs new [dir-name]` - Create a new project. -* `mkdocs serve` - Start the live-reloading docs server. -* `mkdocs build` - Build the documentation site. -* `mkdocs -h` - Print help message and exit. - -## Project layout - - mkdocs.yml # The configuration file. - docs/ - index.md # The documentation homepage. - ... # Other markdown pages, images and other files. diff --git a/docs/operator.md b/docs/operator.md new file mode 100644 index 000000000..286aa64eb --- /dev/null +++ b/docs/operator.md @@ -0,0 +1,151 @@ +The bulk of the information is still present in the [main readme](/README.md), which will be migrated here eventually. + + + +- [Node configuration](#node-configuration) +- [Monitoring & telemetry](#monitoring-telemetry) + - [What gets traced](#what-gets-traced) + - [RPC request/response](#rpc-requestresponse) + - [Block building](#block-building) + - [Batch building](#batch-building) + - [Verbosity](#verbosity) + - [Configuration](#configuration) + - [Example: Honeycomb configuration](#example-honeycomb-configuration) + + + +## Node configuration + +TODO (waiting on CLI arg refactoring): + +- mention `--help` to see options, defaults and env var options +- consider `--help` output here +- describe how to inject env vars using `source`, `systemd.service` files etc. + +## Monitoring & telemetry + +We provide logging to `stdout` and an optional [OpenTelemetry](https://opentelemetry.io/) exporter for our traces. + +We do _not_ export OpenTelemetry logs or metrics. Our end goal is to derive these based off of our tracing information. +This approach is known as [wide-events](https://isburmistrov.substack.com/p/all-you-need-is-wide-events-not-metrics), +[structured logs](https://newrelic.com/blog/how-to-relic/structured-logging), and +[Observibility 2.0](https://www.honeycomb.io/blog/time-to-version-observability-signs-point-to-yes). + +What we're exporting are `traces` which consist of `spans` (covering a period of time), and `events` (something happened +at a specific instance in time). These are extremely useful to debug distributed systems - even though `miden` is still +centralized, the `node` components are distributed. + +OpenTelemetry provides a +[Span Metrics Converter](https://github.com/open-telemetry/opentelemetry-collector-contrib/tree/main/connector/spanmetricsconnector) +which can be used to convert our traces into more conventional metrics. + +### What gets traced + +We assign a unique trace (aka root span) to each RPC request/response, batch build, and block build process. + +> [!CAUTION] +> Span and attribute naming is unstable and should not be relied upon. This also means changes here will not be +> breaking, however we will do our best to document them. + +#### RPC request/response + +Not yet implemented. + +#### Block building + +This trace covers the building, proving and submission of a block. + +
+ Span tree + +```sh +block_builder.build_block +┝━ block_builder.select_block +│ ┝━ mempool.lock +│ ┕━ mempool.select_block +┝━ block_builder.get_block_inputs +│ ┝━ block_builder.summarize_batches +│ ┕━ store.client.get_block_inputs +│ ┕━ store.rpc/GetBlockInputs +│ ┕━ store.server.get_block_inputs +│ ┝━ validate_nullifiers +│ ┝━ read_account_ids +│ ┝━ validate_notes +│ ┝━ select_block_header_by_block_num +│ ┝━ select_note_inclusion_proofs +│ ┕━ select_block_headers +┝━ block_builder.prove_block +│ ┝━ execute_program +│ ┕━ block_builder.simulate_proving +┝━ block_builder.inject_failure +┕━ block_builder.commit_block + ┝━ store.client.apply_block + │ ┕━ store.rpc/ApplyBlock + │ ┕━ store.server.apply_block + │ ┕━ apply_block + │ ┝━ select_block_header_by_block_num + │ ┕━ update_in_memory_structs + ┝━ mempool.lock + ┕━ mempool.commit_block + ┕━ mempool.revert_expired_transactions + ┕━ mempool.revert_transactions +``` +
+ +#### Batch building + +Not yet implemented. + +### Verbosity + +> [!IMPORTANT] +> We log important spans and events at `info` level or higher, which is also the default log level. +> +> Changing this level should rarely be required - let us know if you're missing information that should be at `info`. + +The available log levels are `trace`, `debug`, `info` (default), `warn`, `error` which can be configured using the +`RUST_LOG` environment variable e.g. + +```sh +export RUST_LOG=debug +``` + +The verbosity can also be specified by component (when running them as a single process): + +```sh +export RUST_LOG=warn,block-producer=debug,rpc=error +``` + +The above would set the general level to `warn`, and the `block-producer` and `rpc` components would be overriden to +`debug` and `error` respectively. Though as mentioned, it should be unusual to do this. + +### Configuration + +The OpenTelemetry trace exporter can be enabled by adding the `--open-telemetry` flag to the node's start command: + +```sh +miden-node start --open-telemetry node +``` + +The exporter can be configured using environment variables as specified in the official +[documents](https://opentelemetry.io/docs/specs/otel/protocol/exporter/). + +> [!WARNING] +> Not all options are fully supported. We are limited to what the Rust OpenTelemetry implementation supports. If you +> have any problems please open an issue and we'll do our best to resolve it. +> +> Note: we only support gRPC as the export protocol. + +#### Example: Honeycomb configuration + +> [!NOTE] +> This is based off Honeycomb's OpenTelemetry +> [setup guide](https://docs.honeycomb.io/send-data/opentelemetry/#using-the-honeycomb-opentelemetry-endpoint). + +```sh +OTEL_EXPORTER_OTLP_ENDPOINT=api.honeycomb.io:443 \ +OTEL_EXPORTER_OTLP_HEADERS="x-honeycomb-team=your-api-key" \ +miden-node start --open-telemetry node +``` + +TODO: honeycomb queries, triggers and board examples. diff --git a/mkdocs.yml b/mkdocs.yml deleted file mode 100644 index 442a2f742..000000000 --- a/mkdocs.yml +++ /dev/null @@ -1,4 +0,0 @@ -site_name: Miden node - -theme: - name: material diff --git a/scripts/docs_requirements.txt b/scripts/docs_requirements.txt deleted file mode 100755 index a1ceeb8e9..000000000 --- a/scripts/docs_requirements.txt +++ /dev/null @@ -1,4 +0,0 @@ -mkdocs-material==9.4.8 -markdown-include==0.8.1 -mkdocs-open-in-new-tab==1.0.3 -mkdocs-multirepo-plugin==0.7.0 \ No newline at end of file diff --git a/scripts/serve-doc-site.sh b/scripts/serve-doc-site.sh deleted file mode 100755 index 219462915..000000000 --- a/scripts/serve-doc-site.sh +++ /dev/null @@ -1,8 +0,0 @@ -#!/usr/bin/env bash -set -euo pipefail - -virtualenv venv -source venv/bin/activate -pip3 install -r docs_requirements.txt -cd .. -mkdocs serve --strict \ No newline at end of file From f83570a879b13f7f51f41cc5de97f9ebda07994f Mon Sep 17 00:00:00 2001 From: Tomas Date: Fri, 21 Feb 2025 06:17:25 -0300 Subject: [PATCH 19/27] feat: remove nullifiers from sync state (#713) --- CHANGELOG.md | 2 + crates/proto/src/generated/requests.rs | 9 +- crates/proto/src/generated/responses.rs | 3 - crates/proto/src/generated/rpc.rs | 16 +-- crates/rpc-proto/proto/requests.proto | 8 +- crates/rpc-proto/proto/responses.proto | 3 - crates/rpc-proto/proto/rpc.proto | 8 +- crates/rpc/README.md | 14 +-- crates/store/src/db/mod.rs | 9 +- crates/store/src/db/sql/mod.rs | 67 +--------- crates/store/src/db/tests.rs | 157 +++++------------------- crates/store/src/server/api.rs | 23 +--- crates/store/src/state.rs | 13 +- proto/requests.proto | 8 +- proto/responses.proto | 3 - proto/rpc.proto | 8 +- 16 files changed, 82 insertions(+), 269 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 87d5ae0a8..e51e27f17 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -17,6 +17,8 @@ - [BREAKING] Updated minimum Rust version to 1.84. - [BREAKING] `Endpoint` configuration simplified to a single string (#654). +- [BREAKING] `CheckNullifiersByPrefix` now takes a starting block number (#707). +- [BREAKING] Removed nullifiers from `SyncState` endpoint (#708). ### Enhancements diff --git a/crates/proto/src/generated/requests.rs b/crates/proto/src/generated/requests.rs index c8e19bb29..9194e084f 100644 --- a/crates/proto/src/generated/requests.rs +++ b/crates/proto/src/generated/requests.rs @@ -17,6 +17,9 @@ pub struct CheckNullifiersByPrefixRequest { /// to `prefix_len`. #[prost(uint32, repeated, tag = "2")] pub nullifiers: ::prost::alloc::vec::Vec, + /// Block number from which the nullifiers are requested (inclusive). + #[prost(fixed32, tag = "3")] + pub block_num: u32, } /// Returns a nullifier proof for each of the requested nullifiers. #[derive(Clone, PartialEq, ::prost::Message)] @@ -42,7 +45,7 @@ pub struct GetBlockHeaderByNumberRequest { /// /// Specifies state updates the client is interested in. The server will return the first block which /// contains a note matching `note_tags` or the chain tip. And the corresponding updates to -/// `nullifiers` and `account_ids` for that block range. +/// `account_ids` for that block range. #[derive(Clone, PartialEq, ::prost::Message)] pub struct SyncStateRequest { /// Last block known by the client. The response will contain data starting from the next block, @@ -60,10 +63,6 @@ pub struct SyncStateRequest { /// Specifies the tags which the client is interested in. #[prost(fixed32, repeated, tag = "3")] pub note_tags: ::prost::alloc::vec::Vec, - /// Determines the nullifiers the client is interested in by specifying the 16high bits of the - /// target nullifier. - #[prost(uint32, repeated, tag = "4")] - pub nullifiers: ::prost::alloc::vec::Vec, } /// Note synchronization request. /// diff --git a/crates/proto/src/generated/responses.rs b/crates/proto/src/generated/responses.rs index c3a8f5f20..ac764735d 100644 --- a/crates/proto/src/generated/responses.rs +++ b/crates/proto/src/generated/responses.rs @@ -61,9 +61,6 @@ pub struct SyncStateResponse { /// List of all notes together with the Merkle paths from `response.block_header.note_root`. #[prost(message, repeated, tag = "7")] pub notes: ::prost::alloc::vec::Vec, - /// List of nullifiers created between `request.block_num + 1` and `response.block_header.block_num`. - #[prost(message, repeated, tag = "8")] - pub nullifiers: ::prost::alloc::vec::Vec, } /// Represents the result of syncing notes request. #[derive(Clone, PartialEq, ::prost::Message)] diff --git a/crates/proto/src/generated/rpc.rs b/crates/proto/src/generated/rpc.rs index cab7a3998..4f25dfdbb 100644 --- a/crates/proto/src/generated/rpc.rs +++ b/crates/proto/src/generated/rpc.rs @@ -353,19 +353,19 @@ pub mod api_client { self.inner.unary(req, path, codec).await } /// Returns info which can be used by the client to sync up to the latest state of the chain - /// for the objects (accounts, notes, nullifiers) the client is interested in. + /// for the objects (accounts and notes) the client is interested in. /// /// This request returns the next block containing requested data. It also returns `chain_tip` /// which is the latest block number in the chain. Client is expected to repeat these requests /// in a loop until `response.block_header.block_num == response.chain_tip`, at which point /// the client is fully synchronized with the chain. /// - /// Each request also returns info about new notes, nullifiers etc. created. It also returns + /// Each update response also contains info about new notes, accounts etc. created. It also returns /// Chain MMR delta that can be used to update the state of Chain MMR. This includes both chain /// MMR peaks and chain MMR nodes. /// - /// For preserving some degree of privacy, note tags and nullifiers filters contain only high - /// part of hashes. Thus, returned data contains excessive notes and nullifiers, client can make + /// For preserving some degree of privacy, note tags contain only high + /// part of hashes. Thus, returned data contains excessive notes, client can make /// additional filtering of that data on its side. pub async fn sync_state( &mut self, @@ -502,19 +502,19 @@ pub mod api_server { tonic::Status, >; /// Returns info which can be used by the client to sync up to the latest state of the chain - /// for the objects (accounts, notes, nullifiers) the client is interested in. + /// for the objects (accounts and notes) the client is interested in. /// /// This request returns the next block containing requested data. It also returns `chain_tip` /// which is the latest block number in the chain. Client is expected to repeat these requests /// in a loop until `response.block_header.block_num == response.chain_tip`, at which point /// the client is fully synchronized with the chain. /// - /// Each request also returns info about new notes, nullifiers etc. created. It also returns + /// Each update response also contains info about new notes, accounts etc. created. It also returns /// Chain MMR delta that can be used to update the state of Chain MMR. This includes both chain /// MMR peaks and chain MMR nodes. /// - /// For preserving some degree of privacy, note tags and nullifiers filters contain only high - /// part of hashes. Thus, returned data contains excessive notes and nullifiers, client can make + /// For preserving some degree of privacy, note tags contain only high + /// part of hashes. Thus, returned data contains excessive notes, client can make /// additional filtering of that data on its side. async fn sync_state( &self, diff --git a/crates/rpc-proto/proto/requests.proto b/crates/rpc-proto/proto/requests.proto index f2323c56c..bf9fd557a 100644 --- a/crates/rpc-proto/proto/requests.proto +++ b/crates/rpc-proto/proto/requests.proto @@ -18,6 +18,8 @@ message CheckNullifiersByPrefixRequest { // List of nullifiers to check. Each nullifier is specified by its prefix with length equal // to `prefix_len`. repeated uint32 nullifiers = 2; + // Block number from which the nullifiers are requested (inclusive). + fixed32 block_num = 3; } // Returns a nullifier proof for each of the requested nullifiers. @@ -41,7 +43,7 @@ message GetBlockHeaderByNumberRequest { // // Specifies state updates the client is interested in. The server will return the first block which // contains a note matching `note_tags` or the chain tip. And the corresponding updates to -// `nullifiers` and `account_ids` for that block range. +// `account_ids` for that block range. message SyncStateRequest { // Last block known by the client. The response will contain data starting from the next block, // until the first block which contains a note of matching the requested tag, or the chain tip @@ -57,10 +59,6 @@ message SyncStateRequest { // Specifies the tags which the client is interested in. repeated fixed32 note_tags = 3; - - // Determines the nullifiers the client is interested in by specifying the 16high bits of the - // target nullifier. - repeated uint32 nullifiers = 4; } // Note synchronization request. diff --git a/crates/rpc-proto/proto/responses.proto b/crates/rpc-proto/proto/responses.proto index f1dfe5f90..ad1f353a5 100644 --- a/crates/rpc-proto/proto/responses.proto +++ b/crates/rpc-proto/proto/responses.proto @@ -66,9 +66,6 @@ message SyncStateResponse { // List of all notes together with the Merkle paths from `response.block_header.note_root`. repeated note.NoteSyncRecord notes = 7; - - // List of nullifiers created between `request.block_num + 1` and `response.block_header.block_num`. - repeated NullifierUpdate nullifiers = 8; } // Represents the result of syncing notes request. diff --git a/crates/rpc-proto/proto/rpc.proto b/crates/rpc-proto/proto/rpc.proto index 82da3e20c..a7ad531db 100644 --- a/crates/rpc-proto/proto/rpc.proto +++ b/crates/rpc-proto/proto/rpc.proto @@ -49,19 +49,19 @@ service Api { rpc SyncNotes(requests.SyncNoteRequest) returns (responses.SyncNoteResponse) {} // Returns info which can be used by the client to sync up to the latest state of the chain - // for the objects (accounts, notes, nullifiers) the client is interested in. + // for the objects (accounts and notes) the client is interested in. // // This request returns the next block containing requested data. It also returns `chain_tip` // which is the latest block number in the chain. Client is expected to repeat these requests // in a loop until `response.block_header.block_num == response.chain_tip`, at which point // the client is fully synchronized with the chain. // - // Each request also returns info about new notes, nullifiers etc. created. It also returns + // Each update response also contains info about new notes, accounts etc. created. It also returns // Chain MMR delta that can be used to update the state of Chain MMR. This includes both chain // MMR peaks and chain MMR nodes. // - // For preserving some degree of privacy, note tags and nullifiers filters contain only high - // part of hashes. Thus, returned data contains excessive notes and nullifiers, client can make + // For preserving some degree of privacy, note tags contain only high + // part of hashes. Thus, returned data contains excessive notes, client can make // additional filtering of that data on its side. rpc SyncState(requests.SyncStateRequest) returns (responses.SyncStateResponse) {} } diff --git a/crates/rpc/README.md b/crates/rpc/README.md index e8e7e6492..520b52014 100644 --- a/crates/rpc/README.md +++ b/crates/rpc/README.md @@ -37,7 +37,8 @@ Returns a nullifier proof for each of the requested nullifiers. ### CheckNullifiersByPrefix -Returns a list of nullifiers that match the specified prefixes and are recorded in the node. +Returns a list of nullifiers recorded in the node that match the specified prefixes and were created at or after +the given block height. Only 16-bit prefixes are supported at this time. @@ -102,19 +103,18 @@ the chain. ### SyncState -Returns info which can be used by the client to sync up to the latest state of the chain for the objects (accounts, -notes, nullifiers) the client is interested in. +Returns info which can be used by the client to sync up to the latest state of the chain for the objects (accounts and +notes) the client is interested in. This request returns the next block containing requested data. It also returns `chain_tip` which is the latest block number in the chain. Client is expected to repeat these requests in a loop until `response.block_header.block_num == response.chain_tip`, at which point the client is fully synchronized with the chain. -Each request also returns info about new notes, nullifiers etc. created. It also returns Chain MMR delta that can be +Each request also returns info about new notes, accounts, etc. created. It also returns Chain MMR delta that can be used to update the state of Chain MMR. This includes both chain MMR peaks and chain MMR nodes. -For preserving some degree of privacy, note tags and nullifiers filters contain only high part of hashes. Thus, returned -data contains excessive notes and nullifiers, client can make additional filtering of that data on its side. - +For preserving some degree of privacy, note tags contain only high part of hashes. Thus, returned data contains excessive +notes, client can make additional filtering of that data on its side. --- ## License diff --git a/crates/store/src/db/mod.rs b/crates/store/src/db/mod.rs index 51a678e22..a00094f54 100644 --- a/crates/store/src/db/mod.rs +++ b/crates/store/src/db/mod.rs @@ -85,7 +85,6 @@ pub struct StateSyncUpdate { pub block_header: BlockHeader, pub account_updates: Vec, pub transactions: Vec, - pub nullifiers: Vec, } #[derive(Debug, PartialEq)] @@ -215,12 +214,13 @@ impl Db { &self, prefix_len: u32, nullifier_prefixes: Vec, + block_num: BlockNumber, ) -> Result> { self.pool .get() .await? .interact(move |conn| { - sql::select_nullifiers_by_prefix(conn, prefix_len, &nullifier_prefixes) + sql::select_nullifiers_by_prefix(conn, prefix_len, &nullifier_prefixes, block_num) }) .await .map_err(|err| { @@ -327,15 +327,12 @@ impl Db { block_num: BlockNumber, account_ids: Vec, note_tags: Vec, - nullifier_prefixes: Vec, ) -> Result { self.pool .get() .await .map_err(DatabaseError::MissingDbConnection)? - .interact(move |conn| { - sql::get_state_sync(conn, block_num, &account_ids, ¬e_tags, &nullifier_prefixes) - }) + .interact(move |conn| sql::get_state_sync(conn, block_num, &account_ids, ¬e_tags)) .await .map_err(|err| { DatabaseError::InteractError(format!("Get state sync task failed: {err}")) diff --git a/crates/store/src/db/sql/mod.rs b/crates/store/src/db/sql/mod.rs index 7ade42670..87ff16c3d 100644 --- a/crates/store/src/db/sql/mod.rs +++ b/crates/store/src/db/sql/mod.rs @@ -617,56 +617,7 @@ pub fn select_all_nullifiers(conn: &mut Connection) -> Result Result> { - let nullifier_prefixes: Vec = - nullifier_prefixes.iter().copied().map(Into::into).collect(); - - let mut stmt = conn.prepare_cached( - " - SELECT - nullifier, - block_num - FROM - nullifiers - WHERE - block_num > ?1 AND - block_num <= ?2 AND - nullifier_prefix IN rarray(?3) - ORDER BY - block_num ASC - ", - )?; - - let mut rows = - stmt.query(params![block_start.as_u32(), block_end.as_u32(), Rc::new(nullifier_prefixes)])?; - - let mut result = Vec::new(); - while let Some(row) = rows.next()? { - let nullifier_data = row.get_ref(0)?.as_blob()?; - let nullifier = Nullifier::read_from_bytes(nullifier_data)?; - let block_num: u32 = row.get(1)?; - result.push(NullifierInfo { nullifier, block_num: block_num.into() }); - } - Ok(result) -} - -/// Select nullifiers created that match the `nullifier_prefixes` filter using the given -/// [Connection]. +/// Returns nullifiers filtered by prefix and block creation height. /// /// Each value of the `nullifier_prefixes` is only the `prefix_len` most significant bits /// of the nullifier of interest to the client. This hides the details of the specific @@ -680,6 +631,7 @@ pub fn select_nullifiers_by_prefix( conn: &mut Connection, prefix_len: u32, nullifier_prefixes: &[u32], + block_num: BlockNumber, ) -> Result> { assert_eq!(prefix_len, 16, "Only 16-bit prefixes are supported"); @@ -694,13 +646,13 @@ pub fn select_nullifiers_by_prefix( FROM nullifiers WHERE - nullifier_prefix IN rarray(?1) + nullifier_prefix IN rarray(?1) AND + block_num >= ?2 ORDER BY block_num ASC ", )?; - - let mut rows = stmt.query(params![Rc::new(nullifier_prefixes)])?; + let mut rows = stmt.query(params![Rc::new(nullifier_prefixes), block_num.as_u32()])?; let mut result = Vec::new(); while let Some(row) = rows.next()? { @@ -1195,7 +1147,6 @@ pub fn get_state_sync( block_num: BlockNumber, account_ids: &[AccountId], note_tag_prefixes: &[u32], - nullifier_prefixes: &[u32], ) -> Result { let notes = select_notes_since_block_by_tag_and_sender( conn, @@ -1218,19 +1169,11 @@ pub fn get_state_sync( account_ids, )?; - let nullifiers = select_nullifiers_by_block_range( - conn, - block_num, - block_header.block_num(), - nullifier_prefixes, - )?; - Ok(StateSyncUpdate { notes, block_header, account_updates, transactions, - nullifiers, }) } diff --git a/crates/store/src/db/tests.rs b/crates/store/src/db/tests.rs index fe8d1684c..768d6b2ff 100644 --- a/crates/store/src/db/tests.rs +++ b/crates/store/src/db/tests.rs @@ -486,140 +486,14 @@ fn sql_public_account_details() { assert_eq!(read_delta, Some(delta2)); } -#[test] -fn sql_select_nullifiers_by_block_range() { - let mut conn = create_db(); - - // test empty table - let nullifiers = - sql::select_nullifiers_by_block_range(&mut conn, 0.into(), u32::MAX.into(), &[]).unwrap(); - assert!(nullifiers.is_empty()); - - // test single item - let nullifier1 = num_to_nullifier(1 << 48); - let block_number1 = 1.into(); - create_block(&mut conn, block_number1); - - let transaction = conn.transaction().unwrap(); - sql::insert_nullifiers_for_block(&transaction, &[nullifier1], block_number1).unwrap(); - transaction.commit().unwrap(); - - let nullifiers = sql::select_nullifiers_by_block_range( - &mut conn, - 0.into(), - u32::MAX.into(), - &[sql::utils::get_nullifier_prefix(&nullifier1)], - ) - .unwrap(); - assert_eq!( - nullifiers, - vec![NullifierInfo { - nullifier: nullifier1, - block_num: block_number1 - }] - ); - - // test two elements - let nullifier2 = num_to_nullifier(2 << 48); - let block_number2 = 2.into(); - create_block(&mut conn, block_number2); - - let transaction = conn.transaction().unwrap(); - sql::insert_nullifiers_for_block(&transaction, &[nullifier2], block_number2).unwrap(); - transaction.commit().unwrap(); - - let nullifiers = sql::select_all_nullifiers(&mut conn).unwrap(); - assert_eq!(nullifiers, vec![(nullifier1, block_number1), (nullifier2, block_number2)]); - - // only the nullifiers matching the prefix are included - let nullifiers = sql::select_nullifiers_by_block_range( - &mut conn, - 0.into(), - u32::MAX.into(), - &[sql::utils::get_nullifier_prefix(&nullifier1)], - ) - .unwrap(); - assert_eq!( - nullifiers, - vec![NullifierInfo { - nullifier: nullifier1, - block_num: block_number1 - }] - ); - let nullifiers = sql::select_nullifiers_by_block_range( - &mut conn, - 0.into(), - u32::MAX.into(), - &[sql::utils::get_nullifier_prefix(&nullifier2)], - ) - .unwrap(); - assert_eq!( - nullifiers, - vec![NullifierInfo { - nullifier: nullifier2, - block_num: block_number2 - }] - ); - - // Nullifiers created at block_end are included - let nullifiers = sql::select_nullifiers_by_block_range( - &mut conn, - 0.into(), - 1.into(), - &[ - sql::utils::get_nullifier_prefix(&nullifier1), - sql::utils::get_nullifier_prefix(&nullifier2), - ], - ) - .unwrap(); - assert_eq!( - nullifiers, - vec![NullifierInfo { - nullifier: nullifier1, - block_num: block_number1 - }] - ); - - // Nullifiers created at block_start are not included - let nullifiers = sql::select_nullifiers_by_block_range( - &mut conn, - 1.into(), - u32::MAX.into(), - &[ - sql::utils::get_nullifier_prefix(&nullifier1), - sql::utils::get_nullifier_prefix(&nullifier2), - ], - ) - .unwrap(); - assert_eq!( - nullifiers, - vec![NullifierInfo { - nullifier: nullifier2, - block_num: block_number2 - }] - ); - - // When block start and end are the same, no nullifiers should be returned. This case happens - // when the client requests a sync update, and it is already tracking the chain tip. - let nullifiers = sql::select_nullifiers_by_block_range( - &mut conn, - 2.into(), - 2.into(), - &[ - sql::utils::get_nullifier_prefix(&nullifier1), - sql::utils::get_nullifier_prefix(&nullifier2), - ], - ) - .unwrap(); - assert!(nullifiers.is_empty()); -} - #[test] fn select_nullifiers_by_prefix() { const PREFIX_LEN: u32 = 16; let mut conn = create_db(); // test empty table - let nullifiers = sql::select_nullifiers_by_prefix(&mut conn, PREFIX_LEN, &[]).unwrap(); + let block_number0 = 0.into(); + let nullifiers = + sql::select_nullifiers_by_prefix(&mut conn, PREFIX_LEN, &[], block_number0).unwrap(); assert!(nullifiers.is_empty()); // test single item @@ -635,6 +509,7 @@ fn select_nullifiers_by_prefix() { &mut conn, PREFIX_LEN, &[sql::utils::get_nullifier_prefix(&nullifier1)], + block_number0, ) .unwrap(); assert_eq!( @@ -662,6 +537,7 @@ fn select_nullifiers_by_prefix() { &mut conn, PREFIX_LEN, &[sql::utils::get_nullifier_prefix(&nullifier1)], + block_number0, ) .unwrap(); assert_eq!( @@ -675,6 +551,7 @@ fn select_nullifiers_by_prefix() { &mut conn, PREFIX_LEN, &[sql::utils::get_nullifier_prefix(&nullifier2)], + block_number0, ) .unwrap(); assert_eq!( @@ -693,6 +570,7 @@ fn select_nullifiers_by_prefix() { sql::utils::get_nullifier_prefix(&nullifier1), sql::utils::get_nullifier_prefix(&nullifier2), ], + block_number0, ) .unwrap(); assert_eq!( @@ -714,9 +592,30 @@ fn select_nullifiers_by_prefix() { &mut conn, PREFIX_LEN, &[sql::utils::get_nullifier_prefix(&num_to_nullifier(3 << 48))], + block_number0, ) .unwrap(); assert!(nullifiers.is_empty()); + + // If a block number is provided, only matching nullifiers created at or after that block are + // returned + let nullifiers = sql::select_nullifiers_by_prefix( + &mut conn, + PREFIX_LEN, + &[ + sql::utils::get_nullifier_prefix(&nullifier1), + sql::utils::get_nullifier_prefix(&nullifier2), + ], + block_number2, + ) + .unwrap(); + assert_eq!( + nullifiers, + vec![NullifierInfo { + nullifier: nullifier2, + block_num: block_number2 + }] + ); } #[test] diff --git a/crates/store/src/server/api.rs b/crates/store/src/server/api.rs index 24cde86a5..be0ea560d 100644 --- a/crates/store/src/server/api.rs +++ b/crates/store/src/server/api.rs @@ -129,7 +129,11 @@ impl api_server::Api for StoreApi { let nullifiers = self .state - .check_nullifiers_by_prefix(request.prefix_len, request.nullifiers) + .check_nullifiers_by_prefix( + request.prefix_len, + request.nullifiers, + BlockNumber::from(request.block_num), + ) .await? .into_iter() .map(|nullifier_info| NullifierUpdate { @@ -160,12 +164,7 @@ impl api_server::Api for StoreApi { let (state, delta) = self .state - .sync_state( - request.block_num.into(), - account_ids, - request.note_tags, - request.nullifiers, - ) + .sync_state(request.block_num.into(), account_ids, request.note_tags) .await .map_err(internal_error)?; @@ -191,15 +190,6 @@ impl api_server::Api for StoreApi { let notes = state.notes.into_iter().map(Into::into).collect(); - let nullifiers = state - .nullifiers - .into_iter() - .map(|nullifier_info| NullifierUpdate { - nullifier: Some(nullifier_info.nullifier.into()), - block_num: nullifier_info.block_num.as_u32(), - }) - .collect(); - Ok(Response::new(SyncStateResponse { chain_tip: self.state.latest_block_num().await.as_u32(), block_header: Some(state.block_header.into()), @@ -207,7 +197,6 @@ impl api_server::Api for StoreApi { accounts, transactions, notes, - nullifiers, })) } diff --git a/crates/store/src/state.rs b/crates/store/src/state.rs index b08348e3a..e832c4078 100644 --- a/crates/store/src/state.rs +++ b/crates/store/src/state.rs @@ -502,8 +502,11 @@ impl State { &self, prefix_len: u32, nullifier_prefixes: Vec, + block_num: BlockNumber, ) -> Result, DatabaseError> { - self.db.select_nullifiers_by_prefix(prefix_len, nullifier_prefixes).await + self.db + .select_nullifiers_by_prefix(prefix_len, nullifier_prefixes, block_num) + .await } /// Generates membership proofs for each one of the `nullifiers` against the latest nullifier @@ -706,22 +709,16 @@ impl State { /// range. /// - `note_tags`: The tags the client is interested in, result is restricted to the first block /// with any matches tags. - /// - `nullifier_prefixes`: Only the 16 high bits of the nullifiers the client is interested in, - /// results will include nullifiers matching prefixes produced in the given block range. #[instrument(target = COMPONENT, skip_all, ret(level = "debug"), err)] pub async fn sync_state( &self, block_num: BlockNumber, account_ids: Vec, note_tags: Vec, - nullifier_prefixes: Vec, ) -> Result<(StateSyncUpdate, MmrDelta), StateSyncError> { let inner = self.inner.read().await; - let state_sync = self - .db - .get_state_sync(block_num, account_ids, note_tags, nullifier_prefixes) - .await?; + let state_sync = self.db.get_state_sync(block_num, account_ids, note_tags).await?; let delta = if block_num == state_sync.block_header.block_num() { // The client is in sync with the chain tip. diff --git a/proto/requests.proto b/proto/requests.proto index f2323c56c..bf9fd557a 100644 --- a/proto/requests.proto +++ b/proto/requests.proto @@ -18,6 +18,8 @@ message CheckNullifiersByPrefixRequest { // List of nullifiers to check. Each nullifier is specified by its prefix with length equal // to `prefix_len`. repeated uint32 nullifiers = 2; + // Block number from which the nullifiers are requested (inclusive). + fixed32 block_num = 3; } // Returns a nullifier proof for each of the requested nullifiers. @@ -41,7 +43,7 @@ message GetBlockHeaderByNumberRequest { // // Specifies state updates the client is interested in. The server will return the first block which // contains a note matching `note_tags` or the chain tip. And the corresponding updates to -// `nullifiers` and `account_ids` for that block range. +// `account_ids` for that block range. message SyncStateRequest { // Last block known by the client. The response will contain data starting from the next block, // until the first block which contains a note of matching the requested tag, or the chain tip @@ -57,10 +59,6 @@ message SyncStateRequest { // Specifies the tags which the client is interested in. repeated fixed32 note_tags = 3; - - // Determines the nullifiers the client is interested in by specifying the 16high bits of the - // target nullifier. - repeated uint32 nullifiers = 4; } // Note synchronization request. diff --git a/proto/responses.proto b/proto/responses.proto index f1dfe5f90..ad1f353a5 100644 --- a/proto/responses.proto +++ b/proto/responses.proto @@ -66,9 +66,6 @@ message SyncStateResponse { // List of all notes together with the Merkle paths from `response.block_header.note_root`. repeated note.NoteSyncRecord notes = 7; - - // List of nullifiers created between `request.block_num + 1` and `response.block_header.block_num`. - repeated NullifierUpdate nullifiers = 8; } // Represents the result of syncing notes request. diff --git a/proto/rpc.proto b/proto/rpc.proto index 82da3e20c..a7ad531db 100644 --- a/proto/rpc.proto +++ b/proto/rpc.proto @@ -49,19 +49,19 @@ service Api { rpc SyncNotes(requests.SyncNoteRequest) returns (responses.SyncNoteResponse) {} // Returns info which can be used by the client to sync up to the latest state of the chain - // for the objects (accounts, notes, nullifiers) the client is interested in. + // for the objects (accounts and notes) the client is interested in. // // This request returns the next block containing requested data. It also returns `chain_tip` // which is the latest block number in the chain. Client is expected to repeat these requests // in a loop until `response.block_header.block_num == response.chain_tip`, at which point // the client is fully synchronized with the chain. // - // Each request also returns info about new notes, nullifiers etc. created. It also returns + // Each update response also contains info about new notes, accounts etc. created. It also returns // Chain MMR delta that can be used to update the state of Chain MMR. This includes both chain // MMR peaks and chain MMR nodes. // - // For preserving some degree of privacy, note tags and nullifiers filters contain only high - // part of hashes. Thus, returned data contains excessive notes and nullifiers, client can make + // For preserving some degree of privacy, note tags contain only high + // part of hashes. Thus, returned data contains excessive notes, client can make // additional filtering of that data on its side. rpc SyncState(requests.SyncStateRequest) returns (responses.SyncStateResponse) {} } From bea3138187a6fe7d0810c2ecd7cf5e06c29f7ab1 Mon Sep 17 00:00:00 2001 From: Mirko <48352201+Mirko-von-Leipzig@users.noreply.github.com> Date: Sat, 22 Feb 2025 09:01:19 +0200 Subject: [PATCH 20/27] feat(store): track network notes (#706) --- crates/store/Cargo.toml | 1 + crates/store/src/db/migrations/001-init.sql | 10 +- crates/store/src/db/mod.rs | 51 ++++- crates/store/src/db/sql/mod.rs | 197 +++++++++----------- crates/store/src/db/tests.rs | 120 +++++++++++- crates/store/src/state.rs | 14 +- 6 files changed, 269 insertions(+), 124 deletions(-) diff --git a/crates/store/Cargo.toml b/crates/store/Cargo.toml index b1fdf2610..adc902a73 100644 --- a/crates/store/Cargo.toml +++ b/crates/store/Cargo.toml @@ -33,5 +33,6 @@ url = { workspace = true } [dev-dependencies] assert_matches = { workspace = true } +miden-lib = { workspace = true, features = ["testing"] } miden-node-utils = { workspace = true, features = ["tracing-forest"] } miden-objects = { workspace = true, features = ["testing"] } diff --git a/crates/store/src/db/migrations/001-init.sql b/crates/store/src/db/migrations/001-init.sql index 287d9b478..39953d2a5 100644 --- a/crates/store/src/db/migrations/001-init.sql +++ b/crates/store/src/db/migrations/001-init.sql @@ -28,20 +28,25 @@ CREATE TABLE note_index INTEGER NOT NULL, -- Index of note in batch, starting from 0 note_id BLOB NOT NULL, note_type INTEGER NOT NULL, -- 1-Public (0b01), 2-Private (0b10), 3-Encrypted (0b11) - sender BLOB NOT NULL, + sender BLOB NOT NULL, tag INTEGER NOT NULL, + execution_mode INTEGER NOT NULL, -- 0-Network, 1-Local aux INTEGER NOT NULL, execution_hint INTEGER NOT NULL, merkle_path BLOB NOT NULL, + consumed INTEGER NOT NULL, -- boolean + nullifier BLOB, -- Only known for public notes, null for private notes details BLOB, PRIMARY KEY (block_num, batch_index, note_index), FOREIGN KEY (block_num) REFERENCES block_headers(block_num), CONSTRAINT notes_type_in_enum CHECK (note_type BETWEEN 1 AND 3), + CONSTRAINT notes_execution_mode_in_enum CHECK (execution_mode BETWEEN 0 AND 1), + CONSTRAINT notes_consumed_is_bool CHECK (execution_mode BETWEEN 0 AND 1), CONSTRAINT notes_block_num_is_u32 CHECK (block_num BETWEEN 0 AND 0xFFFFFFFF), CONSTRAINT notes_batch_index_is_u32 CHECK (batch_index BETWEEN 0 AND 0xFFFFFFFF), CONSTRAINT notes_note_index_is_u32 CHECK (note_index BETWEEN 0 AND 0xFFFFFFFF) -) STRICT, WITHOUT ROWID; +) STRICT; CREATE TABLE accounts @@ -145,3 +150,4 @@ CREATE TABLE CREATE INDEX idx_transactions_account_id ON transactions(account_id); CREATE INDEX idx_transactions_block_num ON transactions(block_num); +CREATE INDEX unconsumed_network_notes ON notes(execution_mode, consumed); diff --git a/crates/store/src/db/mod.rs b/crates/store/src/db/mod.rs index a00094f54..6df767f9e 100644 --- a/crates/store/src/db/mod.rs +++ b/crates/store/src/db/mod.rs @@ -18,6 +18,7 @@ use miden_objects::{ utils::Serializable, }; use rusqlite::vtab::array; +use sql::utils::{column_value_as_u64, read_block_number}; use tokio::sync::oneshot; use tracing::{info, info_span, instrument}; @@ -66,6 +67,54 @@ pub struct NoteRecord { pub merkle_path: MerklePath, } +impl NoteRecord { + /// Columns from the `notes` table ordered to match [`Self::from_row`]. + const SELECT_COLUMNS: &'static str = " + block_num, + batch_index, + note_index, + note_id, + note_type, + sender, + tag, + aux, + execution_hint, + merkle_path, + details + "; + + /// Parses a row from the `notes` table. The sql selection must use [`Self::SELECT_COLUMNS`] to + /// ensure ordering is correct. + fn from_row(row: &rusqlite::Row<'_>) -> Result { + let block_num = read_block_number(row, 0)?; + let note_index = BlockNoteIndex::new(row.get(1)?, row.get(2)?)?; + let note_id = row.get_ref(3)?.as_blob()?; + let note_id = RpoDigest::read_from_bytes(note_id)?; + let note_type = row.get::<_, u8>(4)?.try_into()?; + let sender = AccountId::read_from_bytes(row.get_ref(5)?.as_blob()?)?; + let tag: u32 = row.get(6)?; + let aux: u64 = row.get(7)?; + let aux = aux.try_into().map_err(DatabaseError::InvalidFelt)?; + let execution_hint = column_value_as_u64(row, 8)?; + let merkle_path_data = row.get_ref(9)?.as_blob()?; + let merkle_path = MerklePath::read_from_bytes(merkle_path_data)?; + let details_data = row.get_ref(10)?.as_blob_or_null()?; + let details = details_data.map(>::read_from_bytes).transpose()?; + + let metadata = + NoteMetadata::new(sender, note_type, tag.into(), execution_hint.try_into()?, aux)?; + + Ok(NoteRecord { + block_num, + note_index, + note_id, + metadata, + details, + merkle_path, + }) + } +} + impl From for proto::Note { fn from(note: NoteRecord) -> Self { Self { @@ -406,7 +455,7 @@ impl Db { allow_acquire: oneshot::Sender<()>, acquire_done: oneshot::Receiver<()>, block: Block, - notes: Vec, + notes: Vec<(NoteRecord, Option)>, ) -> Result<()> { self.pool .get() diff --git a/crates/store/src/db/sql/mod.rs b/crates/store/src/db/sql/mod.rs index 87ff16c3d..25cb00d8c 100644 --- a/crates/store/src/db/sql/mod.rs +++ b/crates/store/src/db/sql/mod.rs @@ -6,6 +6,7 @@ pub(crate) mod utils; use std::{ borrow::Cow, collections::{btree_map::Entry, BTreeMap, BTreeSet}, + num::NonZeroUsize, rc::Rc, }; @@ -19,7 +20,7 @@ use miden_objects::{ asset::NonFungibleAsset, block::{BlockAccountUpdate, BlockHeader, BlockNoteIndex, BlockNumber}, crypto::{hash::rpo::RpoDigest, merkle::MerklePath}, - note::{NoteId, NoteInclusionProof, NoteMetadata, NoteType, Nullifier}, + note::{NoteExecutionMode, NoteId, NoteInclusionProof, NoteMetadata, NoteType, Nullifier}, transaction::TransactionId, utils::serde::{Deserializable, Serializable}, Digest, Word, @@ -567,7 +568,8 @@ fn insert_account_delta( // NULLIFIER QUERIES // ================================================================================================ -/// Insert nullifiers to the DB using the given [Transaction]. +/// Commit nullifiers to the DB using the given [Transaction]. This inserts the nullifiers into the +/// nullifiers table, and marks the note as consumed (if it was public). /// /// # Returns /// @@ -582,17 +584,21 @@ pub fn insert_nullifiers_for_block( nullifiers: &[Nullifier], block_num: BlockNumber, ) -> Result { + let serialized_nullifiers: Vec = + nullifiers.iter().map(Nullifier::to_bytes).map(Into::into).collect(); + let serialized_nullifiers = Rc::new(serialized_nullifiers); + + let mut stmt = transaction + .prepare_cached("UPDATE notes SET consumed = TRUE WHERE nullifier IN rarray(?1)")?; + let mut count = stmt.execute(params![serialized_nullifiers])?; + let mut stmt = transaction.prepare_cached( "INSERT INTO nullifiers (nullifier, nullifier_prefix, block_num) VALUES (?1, ?2, ?3);", )?; - let mut count = 0; - for nullifier in nullifiers { - count += stmt.execute(params![ - nullifier.to_bytes(), - get_nullifier_prefix(nullifier), - block_num.as_u32() - ])?; + for (nullifier, bytes) in nullifiers.iter().zip(serialized_nullifiers.iter()) { + count += + stmt.execute(params![bytes, get_nullifier_prefix(nullifier), block_num.as_u32()])?; } Ok(count) } @@ -675,62 +681,20 @@ pub fn select_nullifiers_by_prefix( /// A vector with notes, or an error. #[cfg(test)] pub fn select_all_notes(conn: &mut Connection) -> Result> { - let mut stmt = conn.prepare_cached( - " - SELECT - block_num, - batch_index, - note_index, - note_id, - note_type, - sender, - tag, - aux, - execution_hint, - merkle_path, - details - FROM - notes - ORDER BY - block_num ASC; - ", - )?; + let mut stmt = conn.prepare_cached(&format!( + "SELECT {} FROM notes ORDER BY block_num ASC", + NoteRecord::SELECT_COLUMNS, + ))?; let mut rows = stmt.query([])?; let mut notes = vec![]; while let Some(row) = rows.next()? { - let note_id_data = row.get_ref(3)?.as_blob()?; - let note_id = RpoDigest::read_from_bytes(note_id_data)?; - - let merkle_path_data = row.get_ref(9)?.as_blob()?; - let merkle_path = MerklePath::read_from_bytes(merkle_path_data)?; - - let details_data = row.get_ref(10)?.as_blob_or_null()?; - let details = details_data.map(>::read_from_bytes).transpose()?; - - let note_type = row.get::<_, u8>(4)?.try_into()?; - let sender = AccountId::read_from_bytes(row.get_ref(5)?.as_blob()?)?; - let tag: u32 = row.get(6)?; - let aux: u64 = row.get(7)?; - let aux = aux.try_into().map_err(DatabaseError::InvalidFelt)?; - let execution_hint = column_value_as_u64(row, 8)?; - - let metadata = - NoteMetadata::new(sender, note_type, tag.into(), execution_hint.try_into()?, aux)?; - - notes.push(NoteRecord { - block_num: read_block_number(row, 0)?, - note_index: BlockNoteIndex::new(row.get(1)?, row.get(2)?)?, - note_id, - metadata, - details, - merkle_path, - }); + notes.push(NoteRecord::from_row(row)?); } Ok(notes) } -/// Insert notes to the DB using the given [Transaction]. +/// Insert notes to the DB using the given [Transaction]. Public notes should also have a nullifier. /// /// # Returns /// @@ -740,7 +704,10 @@ pub fn select_all_notes(conn: &mut Connection) -> Result> { /// /// The [Transaction] object is not consumed. It's up to the caller to commit or rollback the /// transaction. -pub fn insert_notes(transaction: &Transaction, notes: &[NoteRecord]) -> Result { +pub fn insert_notes( + transaction: &Transaction, + notes: &[(NoteRecord, Option)], +) -> Result { let mut stmt = transaction.prepare_cached(insert_sql!(notes { block_num, batch_index, @@ -749,14 +716,17 @@ pub fn insert_notes(transaction: &Transaction, notes: &[NoteRecord]) -> Result Result::into(note.metadata.execution_hint()), + u64_to_value(note.metadata.execution_hint().into()), note.merkle_path.to_bytes(), + // New notes are always uncomsumed. + false, details, + // Beware: `Option` also implements `to_bytes`, but this is not what you want. + nullifier.as_ref().map(Nullifier::to_bytes), ])?; } @@ -847,56 +822,15 @@ pub fn select_notes_since_block_by_tag_and_sender( pub fn select_notes_by_id(conn: &mut Connection, note_ids: &[NoteId]) -> Result> { let note_ids: Vec = note_ids.iter().map(|id| id.to_bytes().into()).collect(); - let mut stmt = conn.prepare_cached( - " - SELECT - block_num, - batch_index, - note_index, - note_id, - note_type, - sender, - tag, - aux, - execution_hint, - merkle_path, - details - FROM - notes - WHERE - note_id IN rarray(?1) - ", - )?; + let mut stmt = conn.prepare_cached(&format!( + "SELECT {} FROM notes WHERE note_id IN rarray(?1)", + NoteRecord::SELECT_COLUMNS + ))?; let mut rows = stmt.query(params![Rc::new(note_ids)])?; let mut notes = Vec::new(); while let Some(row) = rows.next()? { - let note_id_data = row.get_ref(3)?.as_blob()?; - let note_id = NoteId::read_from_bytes(note_id_data)?; - - let merkle_path = read_from_blob_column(row, 9)?; - - let details_data = row.get_ref(10)?.as_blob_or_null()?; - let details = details_data.map(>::read_from_bytes).transpose()?; - - let note_type = row.get::<_, u8>(4)?.try_into()?; - let sender = read_from_blob_column(row, 5)?; - let tag: u32 = row.get(6)?; - let aux: u64 = row.get(7)?; - let aux = aux.try_into().map_err(DatabaseError::InvalidFelt)?; - let execution_hint = column_value_as_u64(row, 8)?; - - let metadata = - NoteMetadata::new(sender, note_type, tag.into(), execution_hint.try_into()?, aux)?; - - notes.push(NoteRecord { - block_num: read_block_number(row, 0)?, - note_index: BlockNoteIndex::new(row.get(1)?, row.get(2)?)?, - details, - note_id: note_id.into(), - metadata, - merkle_path, - }); + notes.push(NoteRecord::from_row(row)?); } Ok(notes) @@ -954,6 +888,55 @@ pub fn select_note_inclusion_proofs( Ok(result) } +/// Returns a paginated batch of network notes that have not yet been consumed. +/// +/// # Returns +/// +/// A set of unconsumed network notes with maximum length of `limit` and a pagination token to get +/// the next set. +#[cfg_attr(not(test), expect(dead_code, reason = "gRPC method is not yet implemented"))] +pub fn unconsumed_network_notes( + transaction: &Transaction, + mut token: PaginationToken, + limit: NonZeroUsize, +) -> Result<(Vec, PaginationToken)> { + assert_eq!( + NoteExecutionMode::Network as u8, + 0, + "Hardcoded execution value must match query" + ); + + // Select the rowid column so that we can return a pagination token. + // + // rowid column _must_ come after the note fields so that we don't mess up the + // `NoteRecord::from_row` call. + let mut stmt = transaction.prepare_cached(&format!( + " + SELECT {}, rowid FROM notes + WHERE + execution_mode = 0 AND consumed = FALSE AND rowid >= ? + ORDER BY rowid + LIMIT ? + ", + NoteRecord::SELECT_COLUMNS + ))?; + + let mut rows = stmt.query(params![token.0, limit])?; + + let mut notes = Vec::with_capacity(limit.into()); + while let Some(row) = rows.next()? { + notes.push(NoteRecord::from_row(row)?); + // Increment by 1 because we are using rowid >=, and otherwise we would include the last + // element in the next page as well. + token.0 = row.get::<_, i64>(11)? + 1; + } + + Ok((notes, token)) +} + +#[derive(Default, Debug, Copy, Clone)] +pub struct PaginationToken(i64); + // BLOCK CHAIN QUERIES // ================================================================================================ @@ -1206,7 +1189,7 @@ pub fn get_note_sync( pub fn apply_block( transaction: &Transaction, block_header: &BlockHeader, - notes: &[NoteRecord], + notes: &[(NoteRecord, Option)], nullifiers: &[Nullifier], accounts: &[BlockAccountUpdate], ) -> Result { diff --git a/crates/store/src/db/tests.rs b/crates/store/src/db/tests.rs index 768d6b2ff..98faf1f1e 100644 --- a/crates/store/src/db/tests.rs +++ b/crates/store/src/db/tests.rs @@ -1,6 +1,8 @@ #![allow(clippy::similar_names, reason = "naming dummy test values is hard")] #![allow(clippy::too_many_lines, reason = "test code can be long")] +use std::num::NonZeroUsize; + use miden_lib::transaction::TransactionKernel; use miden_node_proto::domain::account::AccountSummary; use miden_objects::{ @@ -12,17 +14,20 @@ use miden_objects::{ asset::{Asset, FungibleAsset, NonFungibleAsset, NonFungibleAssetDetails}, block::{BlockAccountUpdate, BlockHeader, BlockNoteIndex, BlockNoteTree, BlockNumber}, crypto::{hash::rpo::RpoDigest, merkle::MerklePath}, - note::{NoteExecutionHint, NoteId, NoteMetadata, NoteType, Nullifier}, + note::{ + NoteExecutionHint, NoteExecutionMode, NoteId, NoteMetadata, NoteTag, NoteType, Nullifier, + }, testing::account_id::{ ACCOUNT_ID_FUNGIBLE_FAUCET_ON_CHAIN, ACCOUNT_ID_NON_FUNGIBLE_FAUCET_ON_CHAIN, ACCOUNT_ID_OFF_CHAIN_SENDER, ACCOUNT_ID_REGULAR_ACCOUNT_UPDATABLE_CODE_OFF_CHAIN, + ACCOUNT_ID_REGULAR_ACCOUNT_UPDATABLE_CODE_ON_CHAIN, }, Felt, FieldElement, Word, ZERO, }; use rusqlite::{vtab::array, Connection}; use super::{sql, AccountInfo, NoteRecord, NullifierInfo}; -use crate::db::{migrations::apply_migrations, TransactionSummary}; +use crate::db::{migrations::apply_migrations, sql::PaginationToken, TransactionSummary}; fn create_db() -> Connection { let mut conn = Connection::open_in_memory().unwrap(); @@ -191,7 +196,7 @@ fn sql_select_notes() { state.push(note.clone()); let transaction = conn.transaction().unwrap(); - let res = sql::insert_notes(&transaction, &[note]); + let res = sql::insert_notes(&transaction, &[(note, None)]); assert_eq!(res.unwrap(), 1, "One element must have been inserted"); transaction.commit().unwrap(); let notes = sql::select_all_notes(&mut conn).unwrap(); @@ -231,7 +236,7 @@ fn sql_select_notes_different_execution_hints() { state.push(note_none.clone()); let transaction = conn.transaction().unwrap(); - let res = sql::insert_notes(&transaction, &[note_none]); + let res = sql::insert_notes(&transaction, &[(note_none, None)]); assert_eq!(res.unwrap(), 1, "One element must have been inserted"); transaction.commit().unwrap(); let note = &sql::select_notes_by_id(&mut conn, &[num_to_rpo_digest(0).into()]).unwrap()[0]; @@ -255,7 +260,7 @@ fn sql_select_notes_different_execution_hints() { state.push(note_always.clone()); let transaction = conn.transaction().unwrap(); - let res = sql::insert_notes(&transaction, &[note_always]); + let res = sql::insert_notes(&transaction, &[(note_always, None)]); assert_eq!(res.unwrap(), 1, "One element must have been inserted"); transaction.commit().unwrap(); let note = &sql::select_notes_by_id(&mut conn, &[num_to_rpo_digest(1).into()]).unwrap()[0]; @@ -279,7 +284,7 @@ fn sql_select_notes_different_execution_hints() { state.push(note_after_block.clone()); let transaction = conn.transaction().unwrap(); - let res = sql::insert_notes(&transaction, &[note_after_block]); + let res = sql::insert_notes(&transaction, &[(note_after_block, None)]); assert_eq!(res.unwrap(), 1, "One element must have been inserted"); transaction.commit().unwrap(); let note = &sql::select_notes_by_id(&mut conn, &[num_to_rpo_digest(2).into()]).unwrap()[0]; @@ -289,6 +294,105 @@ fn sql_select_notes_different_execution_hints() { ); } +#[test] +fn sql_unconsumed_network_notes() { + // Number of notes to generate. + const N: u64 = 32; + + let mut conn = create_db(); + + let block_num = BlockNumber::from(1); + // An arbitrary public account (network note tag requires public account). + let account_id = ACCOUNT_ID_REGULAR_ACCOUNT_UPDATABLE_CODE_ON_CHAIN.try_into().unwrap(); + create_block(&mut conn, block_num); + + // Create some notes, of which half are network notes. + let notes = (0..N) + .map(|i| { + let is_network = i % 2 == 0; + let execution_mode = if is_network { + NoteExecutionMode::Network + } else { + NoteExecutionMode::Local + }; + let note = NoteRecord { + block_num, + note_index: BlockNoteIndex::new(0, i as usize).unwrap(), + note_id: num_to_rpo_digest(i), + metadata: NoteMetadata::new( + account_id, + NoteType::Public, + NoteTag::from_account_id(account_id, execution_mode).unwrap(), + NoteExecutionHint::none(), + Felt::default(), + ) + .unwrap(), + details: is_network.then_some(vec![1, 2, 3]), + merkle_path: MerklePath::new(vec![]), + }; + + (note, is_network.then_some(num_to_nullifier(i))) + }) + .collect::>(); + + // Copy out all network notes to assert against. These will be in chronological order already. + let network_notes = notes + .iter() + .filter_map(|(note, nullifier)| nullifier.is_some().then_some(note.clone())) + .collect::>(); + + // Insert the set of notes. + let db_tx = conn.transaction().unwrap(); + sql::insert_notes(&db_tx, ¬es).unwrap(); + + // Fetch all network notes by setting a limit larger than the amount available. + let (result, _) = sql::unconsumed_network_notes( + &db_tx, + PaginationToken::default(), + NonZeroUsize::new(N as usize * 10).unwrap(), + ) + .unwrap(); + assert_eq!(result, network_notes); + + // Check pagination works as expected. + let limit = 5; + let mut token = PaginationToken::default(); + network_notes.chunks(limit).for_each(|expected| { + let (result, new_token) = + sql::unconsumed_network_notes(&db_tx, token, NonZeroUsize::new(limit).unwrap()) + .unwrap(); + token = new_token; + assert_eq!(result, expected); + }); + + // Returns empty when paging past the total. + let (result, _) = + sql::unconsumed_network_notes(&db_tx, token, NonZeroUsize::new(100).unwrap()).unwrap(); + assert!(result.is_empty()); + + // Consume every third network note and ensure these are now excluded from the results. + let consumed = notes + .iter() + .filter_map(|(_, nullifier)| *nullifier) + .step_by(3) + .collect::>(); + sql::insert_nullifiers_for_block(&db_tx, &consumed, block_num).unwrap(); + + let expected = network_notes + .iter() + .enumerate() + .filter(|(i, _)| i % 3 != 0) + .map(|(_, note)| note.clone()) + .collect::>(); + let (result, _) = sql::unconsumed_network_notes( + &db_tx, + PaginationToken::default(), + NonZeroUsize::new(N as usize * 10).unwrap(), + ) + .unwrap(); + assert_eq!(result, expected); +} + #[test] fn sql_select_accounts() { let mut conn = create_db(); @@ -809,7 +913,7 @@ fn notes() { }; let transaction = conn.transaction().unwrap(); - sql::insert_notes(&transaction, &[note.clone()]).unwrap(); + sql::insert_notes(&transaction, &[(note.clone(), None)]).unwrap(); transaction.commit().unwrap(); // test empty tags @@ -846,7 +950,7 @@ fn notes() { }; let transaction = conn.transaction().unwrap(); - sql::insert_notes(&transaction, &[note2.clone()]).unwrap(); + sql::insert_notes(&transaction, &[(note2.clone(), None)]).unwrap(); transaction.commit().unwrap(); // only first note is returned diff --git a/crates/store/src/state.rs b/crates/store/src/state.rs index e832c4078..c6b9e1ac9 100644 --- a/crates/store/src/state.rs +++ b/crates/store/src/state.rs @@ -381,9 +381,9 @@ impl State { let notes = block .notes() .map(|(note_index, note)| { - let details = match note { - OutputNote::Full(note) => Some(note.to_bytes()), - OutputNote::Header(_) => None, + let (details, nullifier) = match note { + OutputNote::Full(note) => (Some(note.to_bytes()), Some(note.nullifier())), + OutputNote::Header(_) => (None, None), note @ OutputNote::Partial(_) => { return Err(InvalidBlockError::InvalidOutputNoteType(Box::new( note.clone(), @@ -393,16 +393,18 @@ impl State { let merkle_path = note_tree.get_note_path(note_index); - Ok(NoteRecord { + let note_record = NoteRecord { block_num, note_index, note_id: note.id().into(), metadata: *note.metadata(), details, merkle_path, - }) + }; + + Ok((note_record, nullifier)) }) - .collect::, InvalidBlockError>>()?; + .collect::, InvalidBlockError>>()?; // Signals the transaction is ready to be committed, and the write lock can be acquired let (allow_acquire, acquired_allowed) = oneshot::channel::<()>(); From f8acaadf7f862075eef602dd47114caf19d33dda Mon Sep 17 00:00:00 2001 From: Philipp Gackstatter Date: Tue, 25 Feb 2025 07:40:51 +0100 Subject: [PATCH 21/27] Use `LocalBlockProver` to build blocks (#709) * chore: Temp rename of `ProvenBlock` to `ProvenBlockWrapper` * chore: Update node to miden-base companion PR * feat: Update `get_block_inputs` * chore: Use branch that allows empty blocks * chore: Add changelog entry * chore: Simplify nullifier witness comment * chore: Update `created_nullifiers` naming * chore: Use consistent `_iter` naming * chore: Rename `preimage` var name * feat: Check if highest batch block num is greater than latest block * chore: Simplify genesis and add safety comment * chore: Remove unused miden-stdlib * feat: Remove unused `NoteAuthenticationInfo` * feat: Remove unused `BlockInclusionProof` * chore: Remove unused error * chore: Rename unknown block ref error variants * chore: Define empty tree / vec next to each other * chore: Add changelog * chore: Remove unused test util code * chore: Annotate set type * feat: Move witness fetching to separate method * feat: Use telemetry injection ext trait and calc erased notes * chore: Rename unauthenticated notes count * chore: Add comments on `TelemetryInjector` * fix: Num erased notes calculation * chore: Add doc comments to telemetry wrappers * chore: Fix typo in proto file * chore: Add expect messages on `BlockNoteIndex` * chore: Apply `make format` * chore: Use miden-base `next` branch * chore: Increase recursion limit * chore: `make format` --- CHANGELOG.md | 2 + Cargo.lock | 193 ++-- Cargo.toml | 2 - bin/node/src/main.rs | 4 + crates/block-producer/Cargo.toml | 4 +- crates/block-producer/src/block.rs | 106 -- .../block-producer/src/block_builder/mod.rs | 245 ++--- .../prover/asm/block_kernel.masm | 244 ----- .../src/block_builder/prover/block_witness.rs | 322 ------ .../src/block_builder/prover/mod.rs | 132 --- .../src/block_builder/prover/tests.rs | 951 ------------------ crates/block-producer/src/errors.rs | 48 +- crates/block-producer/src/lib.rs | 1 - crates/block-producer/src/store/mod.rs | 16 +- crates/block-producer/src/test_utils/batch.rs | 8 +- crates/block-producer/src/test_utils/block.rs | 59 +- crates/block-producer/src/test_utils/store.rs | 85 +- crates/proto/src/domain/account.rs | 34 +- crates/proto/src/domain/block.rs | 124 ++- crates/proto/src/domain/note.rs | 43 - crates/proto/src/domain/nullifier.rs | 24 +- crates/proto/src/generated/block.rs | 13 - crates/proto/src/generated/note.rs | 10 - crates/proto/src/generated/requests.rs | 17 +- crates/proto/src/generated/responses.rs | 44 +- crates/proto/src/lib.rs | 4 +- crates/rpc-proto/proto/block.proto | 12 - crates/rpc-proto/proto/note.proto | 9 - crates/rpc-proto/proto/requests.proto | 19 +- crates/rpc-proto/proto/responses.proto | 36 +- crates/store/src/db/mod.rs | 13 +- crates/store/src/db/sql/mod.rs | 13 +- crates/store/src/errors.rs | 52 +- crates/store/src/genesis.rs | 29 +- crates/store/src/server/api.rs | 20 +- crates/store/src/state.rs | 282 +++--- proto/block.proto | 12 - proto/note.proto | 9 - proto/requests.proto | 19 +- proto/responses.proto | 36 +- 40 files changed, 722 insertions(+), 2574 deletions(-) delete mode 100644 crates/block-producer/src/block.rs delete mode 100644 crates/block-producer/src/block_builder/prover/asm/block_kernel.masm delete mode 100644 crates/block-producer/src/block_builder/prover/block_witness.rs delete mode 100644 crates/block-producer/src/block_builder/prover/mod.rs delete mode 100644 crates/block-producer/src/block_builder/prover/tests.rs diff --git a/CHANGELOG.md b/CHANGELOG.md index e51e27f17..81da63401 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -11,12 +11,14 @@ - Add an optional open-telemetry trace exporter (#659, #690). - Support tracing across gRPC boundaries using remote tracing context (#669). - Instrument the block-producer's block building process (#676). +- Use `LocalBlockProver` for block building (#709). - Initial developer and operator guides covering monitoring (#699). ### Changes - [BREAKING] Updated minimum Rust version to 1.84. - [BREAKING] `Endpoint` configuration simplified to a single string (#654). +- [BREAKING] Update `GetBlockInputs` RPC (#709). - [BREAKING] `CheckNullifiersByPrefix` now takes a starting block number (#707). - [BREAKING] Removed nullifiers from `SyncState` endpoint (#708). diff --git a/Cargo.lock b/Cargo.lock index a7eb2e6ce..fe8458343 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -105,9 +105,9 @@ dependencies = [ [[package]] name = "anyhow" -version = "1.0.95" +version = "1.0.96" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "34ac096ce696dc2fcabef30516bb13c0a68a11d30131d3df6f04711467681b04" +checksum = "6b964d184e89d9b6b67dd2715bc8e74cf3107fb2b529990c90cf517326150bf4" [[package]] name = "arrayref" @@ -385,9 +385,9 @@ checksum = "8f68f53c83ab957f72c32642f3868eec03eb974d1fb82e453128456482613d36" [[package]] name = "blake3" -version = "1.5.5" +version = "1.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b8ee0c1824c4dea5b5f81736aff91bae041d2c07ee1192bec91054e10e3e601e" +checksum = "1230237285e3e10cde447185e8975408ae24deaa67205ce684805c25bc0c7937" dependencies = [ "arrayref", "arrayvec", @@ -463,9 +463,9 @@ dependencies = [ [[package]] name = "cc" -version = "1.2.11" +version = "1.2.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e4730490333d58093109dc02c23174c3f4d490998c3fed3cc8e82d57afedb9cf" +checksum = "c736e259eea577f443d5c86c304f9f4ae0295c43f3ba05c21f1d66b5f06001af" dependencies = [ "jobserver", "libc", @@ -524,9 +524,9 @@ dependencies = [ [[package]] name = "clap" -version = "4.5.28" +version = "4.5.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3e77c3243bd94243c03672cb5154667347c457ca271254724f9f393aee1c05ff" +checksum = "92b7b18d71fad5313a1e320fa9897994228ce274b60faa4d694fe0ea89cd9e6d" dependencies = [ "clap_builder", "clap_derive", @@ -534,9 +534,9 @@ dependencies = [ [[package]] name = "clap_builder" -version = "4.5.27" +version = "4.5.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1b26884eb4b57140e4d2d93652abfa49498b938b3c9179f9fc487b0acc3edad7" +checksum = "a35db2071778a7344791a4fb4f95308b5673d219dee3ae348b86642574ecc90c" dependencies = [ "anstream", "anstyle", @@ -811,9 +811,9 @@ checksum = "59f8e79d1fbf76bdfbde321e902714bf6c49df88a7dda6fc682fc2979226962d" [[package]] name = "either" -version = "1.13.0" +version = "1.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "60b1af1c220855b6ceac025d3f6ecdd2b7c4894bfe9cd9bda4fbb4bc7c0d4cf0" +checksum = "b7914353092ddf589ad78f25c5c1c21b7f80b0ff8621e7c814c3485b5306da9d" [[package]] name = "ena" @@ -826,9 +826,9 @@ dependencies = [ [[package]] name = "equivalent" -version = "1.0.1" +version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5" +checksum = "877a4ace8713b0bcf2a4e7eec82529c029f1d0619886d18145fea96c3ffe5c0f" [[package]] name = "errno" @@ -880,6 +880,12 @@ version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80" +[[package]] +name = "fixedbitset" +version = "0.5.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1d674e81391d1e1ab681a28d99df07927c6d4aa5b027d7da16ba32d1d21ecd99" + [[package]] name = "fnv" version = "1.0.7" @@ -1046,9 +1052,9 @@ checksum = "a8d1add55171497b4705a648c6b583acafb01d58050a51727785f0b2c8e0a2b2" [[package]] name = "h2" -version = "0.4.7" +version = "0.4.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ccae279728d634d083c00f6099cb58f01cc99c145b84b8be2f6c74618d79922e" +checksum = "5017294ff4bb30944501348f6f8e42e6ad28f42c8bbef7a74029aff064a4e3c2" dependencies = [ "atomic-waker", "bytes", @@ -1440,15 +1446,6 @@ dependencies = [ "either", ] -[[package]] -name = "itertools" -version = "0.13.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "413ee7dfc52ee1a4949ceeb7dbc8a33f2d6c088194d9f922fb8318faf1f01186" -dependencies = [ - "either", -] - [[package]] name = "itertools" version = "0.14.0" @@ -1503,7 +1500,7 @@ dependencies = [ "ena", "itertools 0.11.0", "lalrpop-util", - "petgraph", + "petgraph 0.6.5", "regex", "regex-syntax 0.8.5", "string_cache", @@ -1533,9 +1530,9 @@ checksum = "830d08ce1d1d941e6b30645f1a0eb5643013d835ce3779a5fc208261dbe10f55" [[package]] name = "libc" -version = "0.2.169" +version = "0.2.170" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b5aba8db14291edd000dfcc4d620c7ebfb122c613afb886ca8803fa4e128a20a" +checksum = "875b3680cb2f8f71bdcf9a30f38d48282f5d3c95cbf9b3fa57269bb5d5c06828" [[package]] name = "libloading" @@ -1599,9 +1596,9 @@ dependencies = [ [[package]] name = "log" -version = "0.4.25" +version = "0.4.26" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "04cbf5b083de1c7e0222a7a51dbfdba1cbe1c6ab0b15e29fff3f6c077fd9cd9f" +checksum = "30bde2b3dc3671ae49d8e2e9f044c7c005836e7a023ee57cffa25ab82764bb9e" [[package]] name = "logos" @@ -1706,6 +1703,17 @@ dependencies = [ "unicode-width 0.2.0", ] +[[package]] +name = "miden-block-prover" +version = "0.8.0" +source = "git+https://github.com/0xPolygonMiden/miden-base?branch=next#f2d50bfa4a83841875570d1301adccbe164ea111" +dependencies = [ + "miden-crypto", + "miden-lib", + "miden-objects", + "thiserror 2.0.11", +] + [[package]] name = "miden-core" version = "0.12.0" @@ -1728,9 +1736,9 @@ dependencies = [ [[package]] name = "miden-crypto" -version = "0.13.2" +version = "0.13.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1945918276152bd9b8e8434643ad24d4968e075b68a5ed03927b53ac75490a79" +checksum = "1d8f76b64bfbb75705403ec3e2faad6a045544871d9c441758becc55415cfe64" dependencies = [ "blake3", "cc", @@ -1787,7 +1795,7 @@ dependencies = [ [[package]] name = "miden-lib" version = "0.8.0" -source = "git+https://github.com/0xPolygonMiden/miden-base?branch=next#66cf1bc8744cf739aa3ef726300c389796047394" +source = "git+https://github.com/0xPolygonMiden/miden-base?branch=next#f2d50bfa4a83841875570d1301adccbe164ea111" dependencies = [ "miden-assembly", "miden-objects", @@ -1870,13 +1878,13 @@ dependencies = [ "futures", "itertools 0.14.0", "miden-air", + "miden-block-prover", "miden-lib", "miden-node-proto", "miden-node-test-macro", "miden-node-utils", "miden-objects", "miden-processor", - "miden-stdlib", "miden-tx", "miden-tx-batch-prover", "pretty_assertions", @@ -1983,7 +1991,7 @@ dependencies = [ [[package]] name = "miden-objects" version = "0.8.0" -source = "git+https://github.com/0xPolygonMiden/miden-base?branch=next#66cf1bc8744cf739aa3ef726300c389796047394" +source = "git+https://github.com/0xPolygonMiden/miden-base?branch=next#f2d50bfa4a83841875570d1301adccbe164ea111" dependencies = [ "getrandom 0.2.15", "miden-assembly", @@ -2042,7 +2050,7 @@ dependencies = [ [[package]] name = "miden-tx" version = "0.8.0" -source = "git+https://github.com/0xPolygonMiden/miden-base?branch=next#66cf1bc8744cf739aa3ef726300c389796047394" +source = "git+https://github.com/0xPolygonMiden/miden-base?branch=next#f2d50bfa4a83841875570d1301adccbe164ea111" dependencies = [ "async-trait", "miden-lib", @@ -2059,7 +2067,7 @@ dependencies = [ [[package]] name = "miden-tx-batch-prover" version = "0.8.0" -source = "git+https://github.com/0xPolygonMiden/miden-base?branch=next#66cf1bc8744cf739aa3ef726300c389796047394" +source = "git+https://github.com/0xPolygonMiden/miden-base?branch=next#f2d50bfa4a83841875570d1301adccbe164ea111" dependencies = [ "miden-core", "miden-crypto", @@ -2129,9 +2137,9 @@ checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a" [[package]] name = "miniz_oxide" -version = "0.8.3" +version = "0.8.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b8402cab7aefae129c6977bb0ff1b8fd9a04eb5b51efc50a70bea51cda0c7924" +checksum = "8e3e04debbb59698c15bacbb6d93584a8c0ca9cc3213cb423d31f760d8843ce5" dependencies = [ "adler2", ] @@ -2300,9 +2308,9 @@ dependencies = [ [[package]] name = "once_cell" -version = "1.20.2" +version = "1.20.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1261fe7e33c73b354eab43b1273a57c8f967d0391e80353e51f764ac02cf6775" +checksum = "945462a4b81e43c4e3ba96bd7b49d834c6f61198356aa858733bc4acf3cbe62e" [[package]] name = "openssl-probe" @@ -2383,9 +2391,9 @@ checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39" [[package]] name = "owo-colors" -version = "4.1.0" +version = "4.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fb37767f6569cd834a413442455e0f066d0d522de8630436e2a1761d9726ba56" +checksum = "1036865bb9422d3300cf723f657c2851d0e9ab12567854b1f4eba3d77decf564" [[package]] name = "parking_lot" @@ -2460,7 +2468,17 @@ version = "0.6.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b4c5cc86750666a3ed20bdaf5ca2a0344f9c67674cae0515bec2da16fbaa47db" dependencies = [ - "fixedbitset", + "fixedbitset 0.4.2", + "indexmap 2.7.1", +] + +[[package]] +name = "petgraph" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3672b37090dbd86368a4145bc067582552b29c27377cad4e0a306c97f9bd7772" +dependencies = [ + "fixedbitset 0.5.7", "indexmap 2.7.1", ] @@ -2596,9 +2614,9 @@ dependencies = [ [[package]] name = "prost" -version = "0.13.4" +version = "0.13.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2c0fef6c4230e4ccf618a35c59d7ede15dea37de8427500f50aff708806e42ec" +checksum = "2796faa41db3ec313a31f7624d9286acf277b52de526150b7e69f3debf891ee5" dependencies = [ "bytes", "prost-derive", @@ -2606,16 +2624,16 @@ dependencies = [ [[package]] name = "prost-build" -version = "0.13.4" +version = "0.13.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d0f3e5beed80eb580c68e2c600937ac2c4eedabdfd5ef1e5b7ea4f3fba84497b" +checksum = "be769465445e8c1474e9c5dac2018218498557af32d9ed057325ec9a41ae81bf" dependencies = [ "heck", - "itertools 0.13.0", + "itertools 0.14.0", "log", "multimap", "once_cell", - "petgraph", + "petgraph 0.7.1", "prettyplease", "prost", "prost-types", @@ -2626,12 +2644,12 @@ dependencies = [ [[package]] name = "prost-derive" -version = "0.13.4" +version = "0.13.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "157c5a9d7ea5c2ed2d9fb8f495b64759f7816c7eaea54ba3978f0d63000162e3" +checksum = "8a56d757972c98b346a9b766e3f02746cde6dd1cd1d1d563472929fdd74bec4d" dependencies = [ "anyhow", - "itertools 0.13.0", + "itertools 0.14.0", "proc-macro2", "quote", "syn", @@ -2639,9 +2657,9 @@ dependencies = [ [[package]] name = "prost-reflect" -version = "0.14.5" +version = "0.14.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e92b959d24e05a3e2da1d0beb55b48bc8a97059b8336ea617780bd6addbbfb5a" +checksum = "a7b318f733603136dcc61aa9e77c928d67f87d2436c34ec052ba3f1b5ca219de" dependencies = [ "logos", "miette", @@ -2652,9 +2670,9 @@ dependencies = [ [[package]] name = "prost-types" -version = "0.13.4" +version = "0.13.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cc2f1e56baa61e93533aebc21af4d2134b70f66275e0fcdf3cbe43d77ff7e8fc" +checksum = "52c2c1bf36ddb1a1c396b3601a3cec27c2462e45f07c386894ec3ccf5332bd16" dependencies = [ "prost", ] @@ -2771,9 +2789,9 @@ dependencies = [ [[package]] name = "redox_syscall" -version = "0.5.8" +version = "0.5.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "03a862b389f93e68874fbf580b9de08dd02facb9a788ebadaf4a3fd33cf58834" +checksum = "82b568323e98e49e2a0899dcee453dd679fae22d69adf9b11dd508d1549b7e2f" dependencies = [ "bitflags", ] @@ -2835,15 +2853,14 @@ checksum = "2b15c43186be67a4fd63bee50d0303afffcef381492ebe2c5d87f324e1b8815c" [[package]] name = "ring" -version = "0.17.8" +version = "0.17.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c17fa4cb658e3583423e915b9f3acc01cceaee1860e33d59ebae66adc3a2dc0d" +checksum = "da5349ae27d3887ca812fb375b45a4fbb36d8d12d2df394968cd86e35683fe73" dependencies = [ "cc", "cfg-if", "getrandom 0.2.15", "libc", - "spin", "untrusted", "windows-sys 0.52.0", ] @@ -2917,9 +2934,9 @@ dependencies = [ [[package]] name = "rustls" -version = "0.23.22" +version = "0.23.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9fb9263ab4eb695e42321db096e3b8fbd715a59b154d5c88d82db2175b681ba7" +checksum = "47796c98c480fce5406ef69d1c76378375492c3b0a0de587be0c1d9feb12f395" dependencies = [ "log", "once_cell", @@ -3071,18 +3088,18 @@ checksum = "388a1df253eca08550bef6c72392cfe7c30914bf41df5269b68cbd6ff8f570a3" [[package]] name = "serde" -version = "1.0.217" +version = "1.0.218" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "02fc4265df13d6fa1d00ecff087228cc0a2b5f3c0e87e258d8b94a156e984c70" +checksum = "e8dfc9d19bdbf6d17e22319da49161d5d0108e4188e8b680aef6299eed22df60" dependencies = [ "serde_derive", ] [[package]] name = "serde_derive" -version = "1.0.217" +version = "1.0.218" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a9bf7cf98d04a2b28aead066b7496853d4779c9cc183c440dbac457641e19a0" +checksum = "f09503e191f4e797cb8aac08e9a4a4695c5edf6a2e70e376d961ddd5c969f82b" dependencies = [ "proc-macro2", "quote", @@ -3091,9 +3108,9 @@ dependencies = [ [[package]] name = "serde_json" -version = "1.0.138" +version = "1.0.139" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d434192e7da787e94a6ea7e9670b26a036d0ca41e0b7efb2676dd32bae872949" +checksum = "44f86c3acccc9c65b153fe1b85a3be07fe5515274ec9f0653b4a0875731c72a6" dependencies = [ "itoa", "memchr", @@ -3174,9 +3191,9 @@ dependencies = [ [[package]] name = "smallvec" -version = "1.13.2" +version = "1.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3c5e1a9a646d36c3599cd173a41282daf47c44583ad367b8e6837255952e5c67" +checksum = "7fcf8323ef1faaee30a44a340193b1ac6814fd9b7b4e88e9d4519a3e4abe1cfd" [[package]] name = "smawk" @@ -3301,15 +3318,15 @@ dependencies = [ [[package]] name = "target-triple" -version = "0.1.3" +version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "42a4d50cdb458045afc8131fd91b64904da29548bcb63c7236e0844936c13078" +checksum = "1ac9aa371f599d22256307c24a9d748c041e548cbf599f35d890f9d365361790" [[package]] name = "tempfile" -version = "3.16.0" +version = "3.17.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "38c246215d7d24f48ae091a2902398798e05d978b24315d6efbc00ede9a8bb91" +checksum = "22e5a0acb1f3f55f65cc4a866c361b2fb2a0ff6366785ae6fbb5f85df07ba230" dependencies = [ "cfg-if", "fastrand", @@ -3525,9 +3542,9 @@ dependencies = [ [[package]] name = "toml" -version = "0.8.19" +version = "0.8.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a1ed1f98e3fdc28d6d910e6737ae6ab1a93bf1985935a1193e68f93eeb68d24e" +checksum = "cd87a5cdd6ffab733b2f74bc4fd7ee5fff6634124999ac278c35fc78c6120148" dependencies = [ "serde", "serde_spanned", @@ -3546,9 +3563,9 @@ dependencies = [ [[package]] name = "toml_edit" -version = "0.22.23" +version = "0.22.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "02a8b472d1a3d7c18e2d61a489aee3453fd9031c33e4f55bd533f4a7adca1bee" +checksum = "17b4795ff5edd201c7cd6dca065ae59972ce77d1b80fa0a84d94950ece7d1474" dependencies = [ "indexmap 2.7.1", "serde", @@ -3834,9 +3851,9 @@ dependencies = [ [[package]] name = "typenum" -version = "1.17.0" +version = "1.18.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "42ff0bf0c66b8238c6f3b578df37d0b7848e55df8577b3f74f92a69acceeb825" +checksum = "1dccffe3ce07af9386bfd29e80c0ab1a8205a2fc34e4bcd40364df902cfa8f3f" [[package]] name = "unarray" @@ -3861,9 +3878,9 @@ checksum = "75b844d17643ee918803943289730bec8aac480150456169e647ed0b576ba539" [[package]] name = "unicode-ident" -version = "1.0.16" +version = "1.0.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a210d160f08b701c8721ba1c726c11662f877ea6b7094007e1ca9a1041945034" +checksum = "00e2473a93778eb0bad35909dff6a10d28e63f792f16ed15e404fca9d5eeedbe" [[package]] name = "unicode-linebreak" @@ -4357,18 +4374,18 @@ checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" [[package]] name = "winnow" -version = "0.7.1" +version = "0.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "86e376c75f4f43f44db463cf729e0d3acbf954d13e22c51e26e4c264b4ab545f" +checksum = "0e7f4ea97f6f78012141bcdb6a216b2609f0979ada50b20ca5b52dde2eac2bb1" dependencies = [ "memchr", ] [[package]] name = "winter-air" -version = "0.11.0" +version = "0.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3a8fdb702503625f54dcaf9222aa2c7a0b2e868b3eb84b90d1837d68034bf999" +checksum = "827ef2aa5a5ab663936e0a6326286e0fc83321771df0d9ea20c46c72c8baa90d" dependencies = [ "libm", "winter-crypto", diff --git a/Cargo.toml b/Cargo.toml index 2c05af39a..7d8563b0e 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -37,9 +37,7 @@ miden-node-test-macro = { path = "crates/test-macro" } miden-node-utils = { path = "crates/utils", version = "0.8" } miden-objects = { git = "https://github.com/0xPolygonMiden/miden-base", branch = "next" } miden-processor = { version = "0.12" } -miden-stdlib = { version = "0.12", default-features = false } miden-tx = { git = "https://github.com/0xPolygonMiden/miden-base", branch = "next" } -miden-tx-batch-prover = { git = "https://github.com/0xPolygonMiden/miden-base.git", branch = "next" } prost = { version = "0.13" } rand = { version = "0.8" } thiserror = { version = "2.0", default-features = false } diff --git a/bin/node/src/main.rs b/bin/node/src/main.rs index 599e2a6fc..2845b9395 100644 --- a/bin/node/src/main.rs +++ b/bin/node/src/main.rs @@ -1,3 +1,7 @@ +// This is required due to a long chain of and_then in BlockBuilder::build_block causing rust error +// E0275. +#![recursion_limit = "256"] + use std::path::PathBuf; use anyhow::{anyhow, Context}; diff --git a/crates/block-producer/Cargo.toml b/crates/block-producer/Cargo.toml index dabf8d7bd..6c354019c 100644 --- a/crates/block-producer/Cargo.toml +++ b/crates/block-producer/Cargo.toml @@ -21,14 +21,14 @@ tracing-forest = ["miden-node-utils/tracing-forest"] async-trait = { version = "0.1" } futures = { version = "0.3" } itertools = { workspace = true } +miden-block-prover = { git = "https://github.com/0xPolygonMiden/miden-base.git", branch = "next" } miden-lib = { workspace = true } miden-node-proto = { workspace = true } miden-node-utils = { workspace = true } miden-objects = { workspace = true } miden-processor = { workspace = true } -miden-stdlib = { workspace = true } miden-tx = { workspace = true } -miden-tx-batch-prover = { workspace = true } +miden-tx-batch-prover = { git = "https://github.com/0xPolygonMiden/miden-base.git", branch = "next" } rand = { version = "0.8" } serde = { version = "1.0", features = ["derive"] } thiserror = { workspace = true } diff --git a/crates/block-producer/src/block.rs b/crates/block-producer/src/block.rs deleted file mode 100644 index 857a5b018..000000000 --- a/crates/block-producer/src/block.rs +++ /dev/null @@ -1,106 +0,0 @@ -use std::collections::BTreeMap; - -use miden_node_proto::{ - domain::note::NoteAuthenticationInfo, - errors::{ConversionError, MissingFieldHelper}, - generated::responses::GetBlockInputsResponse, - AccountInputRecord, NullifierWitness, -}; -use miden_objects::{ - account::AccountId, - block::BlockHeader, - crypto::merkle::{MerklePath, MmrPeaks, SmtProof}, - note::Nullifier, - Digest, -}; - -// BLOCK INPUTS -// ================================================================================================ - -/// Information needed from the store to build a block -#[derive(Clone, Debug)] -pub struct BlockInputs { - /// Previous block header - pub block_header: BlockHeader, - - /// MMR peaks for the current chain state - pub chain_peaks: MmrPeaks, - - /// The hashes of the requested accounts and their authentication paths - pub accounts: BTreeMap, - - /// The requested nullifiers and their authentication paths - pub nullifiers: BTreeMap, - - /// List of unauthenticated notes found in the store - pub found_unauthenticated_notes: NoteAuthenticationInfo, -} - -#[derive(Clone, Debug, Default)] -pub struct AccountWitness { - pub hash: Digest, - pub proof: MerklePath, -} - -impl TryFrom for BlockInputs { - type Error = ConversionError; - - fn try_from(response: GetBlockInputsResponse) -> Result { - let block_header: BlockHeader = response - .block_header - .ok_or(miden_node_proto::generated::block::BlockHeader::missing_field("block_header"))? - .try_into()?; - - let chain_peaks = { - // setting the number of leaves to the current block number gives us one leaf less than - // what is currently in the chain MMR (i.e., chain MMR with block_num = 1 has 2 leave); - // this is because GetBlockInputs returns the state of the chain MMR as of one block - // ago so that block_header.chain_root matches the hash of MMR peaks. - let num_leaves = block_header.block_num().as_usize(); - - MmrPeaks::new( - num_leaves, - response - .mmr_peaks - .into_iter() - .map(TryInto::try_into) - .collect::>()?, - )? - }; - - let accounts = response - .account_states - .into_iter() - .map(|entry| { - let domain: AccountInputRecord = entry.try_into()?; - let witness = AccountWitness { - hash: domain.account_hash, - proof: domain.proof, - }; - Ok((domain.account_id, witness)) - }) - .collect::, ConversionError>>()?; - - let nullifiers = response - .nullifiers - .into_iter() - .map(|entry| { - let witness: NullifierWitness = entry.try_into()?; - Ok((witness.nullifier, witness.proof)) - }) - .collect::, ConversionError>>()?; - - let found_unauthenticated_notes = response - .found_unauthenticated_notes - .ok_or(GetBlockInputsResponse::missing_field("found_authenticated_notes"))? - .try_into()?; - - Ok(Self { - block_header, - chain_peaks, - accounts, - nullifiers, - found_unauthenticated_notes, - }) - } -} diff --git a/crates/block-producer/src/block_builder/mod.rs b/crates/block-producer/src/block_builder/mod.rs index bbdc23c5a..9a24f4aa4 100644 --- a/crates/block-producer/src/block_builder/mod.rs +++ b/crates/block-producer/src/block_builder/mod.rs @@ -1,30 +1,23 @@ -use std::{ - collections::BTreeSet, - ops::{Add, Range}, -}; +use std::ops::Range; use futures::FutureExt; +use miden_block_prover::LocalBlockProver; use miden_node_utils::tracing::OpenTelemetrySpanExt; use miden_objects::{ - account::AccountId, batch::ProvenBatch, - block::{Block, BlockNumber}, - note::{NoteHeader, NoteId, Nullifier}, - transaction::{InputNoteCommitment, OutputNote}, + block::{BlockInputs, BlockNumber, ProposedBlock, ProvenBlock}, + note::NoteHeader, + MIN_PROOF_SECURITY_LEVEL, }; use rand::Rng; use tokio::time::Duration; use tracing::{instrument, Span}; use crate::{ - block::BlockInputs, errors::BuildBlockError, mempool::SharedMempool, store::StoreClient, - COMPONENT, SERVER_BLOCK_FREQUENCY, + errors::BuildBlockError, mempool::SharedMempool, store::StoreClient, COMPONENT, + SERVER_BLOCK_FREQUENCY, }; -pub(crate) mod prover; - -use self::prover::{block_witness::BlockWitness, BlockProver}; - // BLOCK BUILDER // ================================================================================================= @@ -39,7 +32,9 @@ pub struct BlockBuilder { pub failure_rate: f64, pub store: StoreClient, - pub block_kernel: BlockProver, + + /// The prover used to prove a proposed block into a proven block. + pub block_prover: LocalBlockProver, } impl BlockBuilder { @@ -49,7 +44,7 @@ impl BlockBuilder { // Note: The range cannot be empty. simulated_proof_time: Duration::ZERO..Duration::from_millis(1), failure_rate: 0.0, - block_kernel: BlockProver::new(), + block_prover: LocalBlockProver::new(MIN_PROOF_SECURITY_LEVEL), store, } } @@ -98,7 +93,9 @@ impl BlockBuilder { Self::select_block(mempool) .inspect(SelectedBlock::inject_telemetry) .then(|selected| self.get_block_inputs(selected)) - .inspect_ok(BlockSummaryAndInputs::inject_telemetry) + .inspect_ok(BlockBatchesAndInputs::inject_telemetry) + .and_then(|inputs| self.propose_block(inputs)) + .inspect_ok(ProposedBlock::inject_telemetry) .and_then(|inputs| self.prove_block(inputs)) .inspect_ok(ProvenBlock::inject_telemetry) // Failure must be injected before the final pipeline stage i.e. before commit is called. The system cannot @@ -119,67 +116,98 @@ impl BlockBuilder { SelectedBlock { block_number, batches } } + /// Fetches block inputs from the store for the [`SelectedBlock`]. + /// + /// For a given set of batches, we need to get the following block inputs from the store: + /// + /// - Note inclusion proofs for unauthenticated notes (not required to be complete due to the + /// possibility of note erasure) + /// - A chain MMR with: + /// - All blocks referenced by batches + /// - All blocks referenced by note inclusion proofs + /// - Account witnesses for all accounts updated in the block + /// - Nullifier witnesses for all nullifiers created in the block + /// - Due to note erasure the set of nullifiers the block creates it not necessarily equal to + /// the union of all nullifiers created in proven batches. However, since we don't yet know + /// which nullifiers the block will actually create, we fetch witnesses for all nullifiers + /// created by batches. If we knew that a certain note will be erased, we would not have to + /// supply a nullifier witness for it. #[instrument(target = COMPONENT, name = "block_builder.get_block_inputs", skip_all, err)] async fn get_block_inputs( &self, selected_block: SelectedBlock, - ) -> Result { + ) -> Result { let SelectedBlock { block_number: _, batches } = selected_block; - let summary = BlockSummary::summarize_batches(&batches); + + let batch_iter = batches.iter(); + + let unauthenticated_notes_iter = batch_iter.clone().flat_map(|batch| { + // Note: .cloned() shouldn't be necessary but not having it produces an odd lifetime + // error in BlockProducer::serve. Not sure if there's a better fix. Error: + // implementation of `FnOnce` is not general enough + // closure with signature `fn(&InputNoteCommitment) -> miden_objects::note::NoteId` must + // implement `FnOnce<(&InputNoteCommitment,)>` ...but it actually implements + // `FnOnce<(&InputNoteCommitment,)>` + batch + .input_notes() + .iter() + .cloned() + .filter_map(|note| note.header().map(NoteHeader::id)) + }); + let block_references_iter = batch_iter.clone().map(ProvenBatch::reference_block_num); + let account_ids_iter = batch_iter.clone().flat_map(ProvenBatch::updated_accounts); + let created_nullifiers_iter = batch_iter.flat_map(ProvenBatch::created_nullifiers); let inputs = self .store .get_block_inputs( - summary.updated_accounts.iter().copied(), - summary.nullifiers.iter(), - summary.dangling_notes.iter(), + account_ids_iter, + created_nullifiers_iter, + unauthenticated_notes_iter, + block_references_iter, ) .await .map_err(BuildBlockError::GetBlockInputsFailed)?; - let missing_notes: Vec<_> = summary - .dangling_notes - .difference(&inputs.found_unauthenticated_notes.note_ids()) - .copied() - .collect(); - if !missing_notes.is_empty() { - return Err(BuildBlockError::UnauthenticatedNotesNotFound(missing_notes)); - } + Ok(BlockBatchesAndInputs { batches, inputs }) + } + + #[instrument(target = COMPONENT, name = "block_builder.propose_block", skip_all, err)] + async fn propose_block( + &self, + batches_inputs: BlockBatchesAndInputs, + ) -> Result { + let BlockBatchesAndInputs { batches, inputs } = batches_inputs; - Ok(BlockSummaryAndInputs { batches, summary, inputs }) + let proposed_block = + ProposedBlock::new(inputs, batches).map_err(BuildBlockError::ProposeBlockFailed)?; + + Ok(proposed_block) } #[instrument(target = COMPONENT, name = "block_builder.prove_block", skip_all, err)] async fn prove_block( &self, - preimage: BlockSummaryAndInputs, + proposed_block: ProposedBlock, ) -> Result { - let BlockSummaryAndInputs { batches, summary, inputs } = preimage; - - let (block_header_witness, updated_accounts) = BlockWitness::new(inputs, &batches)?; - - let new_block_header = self.block_kernel.prove(block_header_witness)?; - - let block = Block::new( - new_block_header, - updated_accounts, - summary.output_notes, - summary.nullifiers, - )?; + let proven_block = self + .block_prover + .prove(proposed_block) + .map_err(BuildBlockError::ProveBlockFailed)?; self.simulate_proving().await; - Ok(ProvenBlock { block }) + Ok(proven_block) } #[instrument(target = COMPONENT, name = "block_builder.commit_block", skip_all, err)] async fn commit_block( &self, mempool: &SharedMempool, - proven_block: ProvenBlock, + built_block: ProvenBlock, ) -> Result<(), BuildBlockError> { self.store - .apply_block(&proven_block.block) + .apply_block(&built_block) .await .map_err(BuildBlockError::StoreApplyBlockFailed)?; @@ -219,65 +247,12 @@ impl BlockBuilder { } } -struct BlockSummary { - updated_accounts: BTreeSet, - nullifiers: Vec, - output_notes: Vec>, - dangling_notes: BTreeSet, -} - -impl BlockSummary { - #[instrument(target = COMPONENT, name = "block_builder.summarize_batches", skip_all)] - fn summarize_batches(batches: &[ProvenBatch]) -> Self { - let updated_accounts: BTreeSet = batches - .iter() - .flat_map(ProvenBatch::account_updates) - .map(|(account_id, _)| *account_id) - .collect(); - - let output_notes: Vec<_> = - batches.iter().map(|batch| batch.output_notes().to_vec()).collect(); - - let nullifiers: Vec = - batches.iter().flat_map(ProvenBatch::produced_nullifiers).collect(); - - // Populate set of output notes from all batches - let output_notes_set: BTreeSet<_> = output_notes - .iter() - .flat_map(|output_notes| output_notes.iter().map(OutputNote::id)) - .collect(); - - // Build a set of unauthenticated input notes for this block which do not have a - // matching output note produced in this block - let dangling_notes: BTreeSet<_> = batches - .iter() - .flat_map(ProvenBatch::input_notes) - .filter_map(InputNoteCommitment::header) - .map(NoteHeader::id) - .filter(|note_id| !output_notes_set.contains(note_id)) - .collect(); - - Self { - updated_accounts, - nullifiers, - output_notes, - dangling_notes, - } - } -} - +/// A wrapper around batches selected for inlucion in a block, primarily used to be able to inject +/// telemetry in-between the selection and fetching the required [`BlockInputs`]. struct SelectedBlock { block_number: BlockNumber, batches: Vec, } -struct BlockSummaryAndInputs { - batches: Vec, - summary: BlockSummary, - inputs: BlockInputs, -} -struct ProvenBlock { - block: Block, -} impl SelectedBlock { fn inject_telemetry(&self) { @@ -287,41 +262,85 @@ impl SelectedBlock { } } -impl BlockSummaryAndInputs { +/// A wrapper around the inputs needed to build a [`ProposedBlock`], primarily used to be able to +/// inject telemetry in-between fetching block inputs and proposing the block. +struct BlockBatchesAndInputs { + batches: Vec, + inputs: BlockInputs, +} + +impl BlockBatchesAndInputs { fn inject_telemetry(&self) { let span = Span::current(); // SAFETY: We do not expect to have more than u32::MAX of any count per block. span.set_attribute( "block.updated_accounts.count", - i64::try_from(self.summary.updated_accounts.len()) + i64::try_from(self.inputs.account_witnesses().len()) .expect("less than u32::MAX account updates"), ); span.set_attribute( - "block.output_notes.count", - i64::try_from(self.summary.output_notes.iter().fold(0, |acc, x| acc.add(x.len()))) - .expect("less than u32::MAX output notes"), + "block.erased_note_proofs.count", + i64::try_from(self.inputs.unauthenticated_note_proofs().len()) + .expect("less than u32::MAX unauthenticated notes"), ); + } +} + +/// An extension trait used only locally to implement telemetry injection. +trait TelemetryInjectorExt { + /// Inject [`tracing`] telemetry from self. + fn inject_telemetry(&self); +} + +impl TelemetryInjectorExt for ProposedBlock { + /// Emit the input and output note related attributes. We do this here since this is the + /// earliest point we can set attributes after note erasure was done. + fn inject_telemetry(&self) { + let span = Span::current(); + span.set_attribute( "block.nullifiers.count", - i64::try_from(self.summary.nullifiers.len()).expect("less than u32::MAX nullifiers"), + u32::try_from(self.created_nullifiers().len()) + .expect("should have less than u32::MAX created nullifiers"), ); + let num_block_created_notes = self + .output_note_batches() + .iter() + .fold(0, |acc, output_notes| acc + output_notes.len()); + span.set_attribute( + "block.output_notes.count", + u32::try_from(num_block_created_notes) + .expect("should have less than u32::MAX output notes"), + ); + + let num_batch_created_notes = + self.batches().iter().fold(0, |acc, batch| acc + batch.output_notes().len()); + span.set_attribute( + "block.batches.output_notes.count", + u32::try_from(num_batch_created_notes) + .expect("should have less than u32::MAX erased notes"), + ); + + let num_erased_notes = num_batch_created_notes + .checked_sub(num_block_created_notes) + .expect("all batches in the block should not create fewer notes than the block itself"); span.set_attribute( - "block.dangling_notes.count", - i64::try_from(self.summary.dangling_notes.len()) - .expect("less than u32::MAX dangling notes"), + "block.erased_notes.count", + u32::try_from(num_erased_notes).expect("should have less than u32::MAX erased notes"), ); } } -impl ProvenBlock { +impl TelemetryInjectorExt for ProvenBlock { fn inject_telemetry(&self) { let span = Span::current(); - let header = self.block.header(); + let header = self.header(); span.set_attribute("block.hash", header.hash()); span.set_attribute("block.sub_hash", header.sub_hash()); span.set_attribute("block.parent_hash", header.prev_hash()); + span.set_attribute("block.timestamp", header.timestamp()); span.set_attribute("block.protocol.version", i64::from(header.version())); diff --git a/crates/block-producer/src/block_builder/prover/asm/block_kernel.masm b/crates/block-producer/src/block_builder/prover/asm/block_kernel.masm deleted file mode 100644 index 309501d59..000000000 --- a/crates/block-producer/src/block_builder/prover/asm/block_kernel.masm +++ /dev/null @@ -1,244 +0,0 @@ -#! Note: For now, the "block kernel" only computes the account root. Eventually, it will compute -#! the entire block header. -#! -#! Stack inputs: [num_accounts_updated, OLD_ACCOUNT_ROOT, NEW_ACCOUNT_HASH_0, account_id_0, ... , -#! NEW_ACCOUNT_HASH_n, account_id_n] - -use.std::collections::smt -use.std::collections::mmr -use.std::sys - -const.ACCOUNT_TREE_DEPTH=64 -const.BLOCK_NOTES_BATCH_TREE_DEPTH=6 -const.CHAIN_MMR_PTR=1000 - -#! Compute the account root -#! -#! Inputs: -#! Operand stack: [] -#! Advice stack: [num_accounts_updated, OLD_ACCOUNT_ROOT, [NEW_ACCOUNT_HASH_i, account_id_i]] -#! Outputs: -#! Operand stack: [NEW_ACCOUNT_ROOT] -proc.compute_account_root - # move the number of updated accounts and an old account root to the operand stack - adv_push.5 - # OS => [OLD_ACCOUNT_ROOT, num_accounts_updated] - # AS => [[NEW_ACCOUNT_HASH_i, account_id_i]] - - # assess if we should loop - dup.4 neq.0 - # OS => [flag, OLD_ACCOUNT_ROOT, num_accounts_updated] - # AS => [[NEW_ACCOUNT_HASH_i, account_id_i]] - - while.true - # num_accounts_updated here serves as a counter, so rename it accordingly - # old account root will be updated in each iteration, so rename it to the ROOT_i - # OS => [ROOT_i, counter] - # AS => [[NEW_ACCOUNT_HASH_i, account_id_i]] - - # move the account hash to the operand stack and move it below the root - adv_push.4 swapw - # OS => [ROOT_i, NEW_ACCOUNT_HASH_i, counter] - # AS => [account_id_i, [NEW_ACCOUNT_HASH_{i+1}, account_id_{i+1}]] - - # move the account id to the operand stack, push the account tree depth - adv_push.1 push.ACCOUNT_TREE_DEPTH - # OS => [account_tree_depth, account_id_i, ROOT_i, NEW_ACCOUNT_HASH_i, counter] - # AS => [[NEW_ACCOUNT_HASH_{i+1}, account_id_{i+1}]] - - # set new value in SMT - mtree_set dropw - # OS => [ROOT_{i+1}, counter] - # AS => [[NEW_ACCOUNT_HASH_{i+1}, account_id_{i+1}]] - - # loop counter - movup.4 sub.1 dup movdn.5 neq.0 - # OS => [flag, ROOT_{i+1}, counter] - # AS => [[NEW_ACCOUNT_HASH_{i+1}, account_id_{i+1}]] - end - - # drop the counter - movup.4 drop - # OS => [ROOT_{n-1}] - # AS => [] -end - -#! Compute the note root. -#! -#! Each batch contains a tree of depth 10 for its created notes. The block's created notes tree is -#! created by aggregating up to 2^6 tree roots coming from the batches contained in the block. -#! -#! `SMT_EMPTY_ROOT` must be `E16`, the root of the empty tree of depth 16. If less than 2^6 batches -#! are contained in the block, `E10` is used as the padding value; this is derived from the fact -#! that `SMT_EMPTY_ROOT` is `E16`, and that our tree has depth 6. -#! -#! Inputs: -#! Operand stack: [] -#! Advice stack: [num_notes_updated, SMT_EMPTY_ROOT, [BATCH_NOTE_TREE_ROOT_i, batch_note_root_idx_i]] -#! Outputs: -#! Operand stack: [NOTES_ROOT] -proc.compute_note_root - # move the number of updated notes and empty root to the operand stack - adv_push.5 - # OS => [SMT_EMPTY_ROOT, num_notes_updated] - # AS => [[BATCH_NOTE_TREE_ROOT_i, batch_note_root_idx_i]] - - # assess if we should loop - dup.4 neq.0 - # OS => [flag, SMT_EMPTY_ROOT, num_notes_updated] - # AS => [[BATCH_NOTE_TREE_ROOT_i, batch_note_root_idx_i]] - - while.true - # num_notes_updated here serves as a counter, so rename it accordingly - # empty root will be updated in each iteration, so rename it to the ROOT_i - # OS => [ROOT_i, counter] - # AS => [[BATCH_NOTE_TREE_ROOT_i, batch_note_root_idx_i]] - - # move the batch note tree root to the operand stack and move it below the root - adv_push.4 swapw - # OS => [ROOT_i, BATCH_NOTE_TREE_ROOT_i, counter] - # AS => [batch_note_root_idx_i, [BATCH_NOTE_TREE_ROOT_{i+1}, batch_note_root_idx_{i+1}]] - - # move the batch note root index to the operand stack, push the block notes batch tree depth - adv_push.1 push.BLOCK_NOTES_BATCH_TREE_DEPTH - # OS => [batch_tree_depth, batch_note_root_idx_i, ROOT_i, BATCH_NOTE_TREE_ROOT_i, counter] - # AS => [[BATCH_NOTE_TREE_ROOT_{i+1}, batch_note_root_idx_{i+1}]] - - # set new value in SMT - mtree_set dropw - # OS => [ROOT_{i+1}, counter] - # AS => [[BATCH_NOTE_TREE_ROOT_{i+1}, batch_note_root_idx_{i+1}]] - - # loop counter - movup.4 sub.1 dup movdn.5 neq.0 - # OS => [flag, ROOT_{i+1}, counter] - # AS => [[BATCH_NOTE_TREE_ROOT_{i+1}, batch_note_root_idx_{i+1}]] - end - - # drop the counter - movup.4 drop - # OS => [ROOT_{n-1}] - # AS => [] -end - -#! Compute the nullifier root. -#! -#! Inputs: -#! Operand stack: [] -#! Advice stack: [num_produced_nullifiers, OLD_NULLIFIER_ROOT, NULLIFIER_VALUE, [NULLIFIER_i]] -#! Outputs: -#! Operand stack: [NULLIFIER_ROOT] -proc.compute_nullifier_root - # move the number of produced nullifiers, old root and nullifier value to the operand stack; - # move nullifier value below the root - adv_push.9 swapw - # OS => [OLD_NULLIFIER_ROOT, NULLIFIER_VALUE, num_produced_nullifiers] - # AS => [[NULLIFIER_i]] - - # assess if we should loop - dup.8 neq.0 - # OS => [flag, OLD_NULLIFIER_ROOT, NULLIFIER_VALUE, num_produced_nullifiers] - # AS => [[NULLIFIER_i]] - - while.true - # num_produced_nullifiers here serves as a counter, so rename it accordingly - # old nullifier root will be updated in each iteration, so rename it to the ROOT_i - # OS => [ROOT_i, NULLIFIER_VALUE, counter] - # AS => [[NULLIFIER_i]] - - # move the nullifier hash to the operand stack - adv_push.4 - # OS => [NULLIFIER_i, ROOT_i, NULLIFIER_VALUE, counter] - # AS => [[NULLIFIER_{i+1}]] - - # dup the nullifier value - dupw.2 - # OS => [NULLIFIER_VALUE, NULLIFIER_i, ROOT_i, NULLIFIER_VALUE, counter] - # AS => [[NULLIFIER_{i+1}]] - - exec.smt::set - # OS => [OLD_VALUE, ROOT_{i+1}, NULLIFIER_VALUE, counter] - # AS => [[NULLIFIER_{i+1}]] - - # Check that OLD_VALUE == 0 (i.e. that nullifier was indeed not previously produced) - assertz assertz assertz assertz - # OS => [ROOT_{i+1}, NULLIFIER_VALUE, counter] - # AS => [[NULLIFIER_{i+1}]] - - # loop counter - movup.8 sub.1 dup movdn.9 neq.0 - # OS => [flag, ROOT_{i+1}, NULLIFIER_VALUE, counter] - # AS => [[NULLIFIER_{i+1}]] - end - - # drop the counter and the nullifier value - swapw dropw movup.4 drop - # OS => [ROOT_{n-1}] - # AS => [] -end - -#! Compute the chain MMR root -#! -#! Inputs: -#! Operand stack: [] -#! Advice stack: [PREV_BLOCK_HASH_TO_INSERT, PREV_CHAIN_MMR_HASH] -#! Advice map: { -#! PREV_CHAIN_MMR_HASH: [NUM_LEAVES, [peak_i], ] -#! } -#! Outputs: -#! Operand stack: [CHAIN_MMR_ROOT] -proc.compute_chain_mmr_root - # move the previous block hash and chain MMR hash to the operand stack - adv_push.8 - # OS => [PREV_CHAIN_MMR_HASH, PREV_BLOCK_HASH_TO_INSERT] - # AS => [] - - # push chain MMR pointer to the operand stack - push.CHAIN_MMR_PTR movdn.4 - # OS => [PREV_CHAIN_MMR_HASH, chain_mmr_ptr, PREV_BLOCK_HASH_TO_INSERT] - - # load the chain MMR (as of previous block) at memory location CHAIN_MMR_PTR - exec.mmr::unpack - # OS => [PREV_BLOCK_HASH_TO_INSERT] - - # push chain MMR pointer to the operand stack - push.CHAIN_MMR_PTR movdn.4 - # OS => [PREV_BLOCK_HASH_TO_INSERT, chain_mmr_ptr] - - # add PREV_BLOCK_HASH_TO_INSERT to chain MMR - exec.mmr::add - # OS => [] - - # Compute new MMR root - push.CHAIN_MMR_PTR exec.mmr::pack - # OS => [CHAIN_MMR_ROOT] -end - -#! Inputs: -#! Operand stack: [] -#! Advice stack: [, , , ] -#! Advice map: { -#! PREV_CHAIN_MMR_HASH: [NUM_LEAVES, [peak_i], ] -#! } -#! Outputs: -#! Operand stack: [ACCOUNT_ROOT, NOTE_ROOT, NULLIFIER_ROOT, CHAIN_MMR_ROOT] -begin - exec.compute_account_root mem_storew.0 dropw - # => [, , ] - - exec.compute_note_root mem_storew.4 dropw - # => [, ] - - exec.compute_nullifier_root mem_storew.8 dropw - # => [] - - exec.compute_chain_mmr_root - # => [CHAIN_MMR_ROOT] - - # Load output on stack - padw mem_loadw.8 padw mem_loadw.4 padw mem_loadw.0 - # => [ACCOUNT_ROOT, NOTE_ROOT, NULLIFIER_ROOT, CHAIN_MMR_ROOT] - - # truncate the stack - exec.sys::truncate_stack -end diff --git a/crates/block-producer/src/block_builder/prover/block_witness.rs b/crates/block-producer/src/block_builder/prover/block_witness.rs deleted file mode 100644 index a0972e541..000000000 --- a/crates/block-producer/src/block_builder/prover/block_witness.rs +++ /dev/null @@ -1,322 +0,0 @@ -use std::collections::{BTreeMap, BTreeSet}; - -use miden_objects::{ - account::{delta::AccountUpdateDetails, AccountId}, - batch::{BatchAccountUpdate, ProvenBatch}, - block::{BlockAccountUpdate, BlockHeader}, - crypto::merkle::{EmptySubtreeRoots, MerklePath, MerkleStore, MmrPeaks, SmtProof}, - note::Nullifier, - transaction::TransactionId, - vm::{AdviceInputs, StackInputs}, - Digest, Felt, BLOCK_NOTE_TREE_DEPTH, MAX_BATCHES_PER_BLOCK, ZERO, -}; - -use crate::{ - block::BlockInputs, - errors::{BlockProverError, BuildBlockError}, -}; - -// BLOCK WITNESS -// ================================================================================================= - -/// Provides inputs to the `BlockKernel` so that it can generate the new header. -#[derive(Debug, PartialEq)] -pub struct BlockWitness { - pub(super) updated_accounts: Vec<(AccountId, AccountUpdateWitness)>, - /// (`batch_index`, `created_notes_root`) for batches that contain notes - pub(super) batch_created_notes_roots: BTreeMap, - pub(super) produced_nullifiers: BTreeMap, - pub(super) chain_peaks: MmrPeaks, - pub(super) prev_header: BlockHeader, -} - -impl BlockWitness { - pub fn new( - mut block_inputs: BlockInputs, - batches: &[ProvenBatch], - ) -> Result<(Self, Vec), BuildBlockError> { - // This limit should be enforced by the mempool. - assert!(batches.len() <= MAX_BATCHES_PER_BLOCK); - - Self::validate_nullifiers(&block_inputs, batches)?; - - let batch_created_notes_roots = batches - .iter() - .enumerate() - .filter(|(_, batch)| !batch.output_notes().is_empty()) - .map(|(batch_index, batch)| (batch_index, batch.output_notes_tree().root())) - .collect(); - - // Order account updates by account ID and each update's initial state hash. - // - // This let's us chronologically order the updates per account across batches. - let mut updated_accounts = - BTreeMap::>::new(); - for (account_id, update) in batches.iter().flat_map(ProvenBatch::account_updates) { - updated_accounts - .entry(*account_id) - .or_default() - .insert(update.initial_state_commitment(), update.clone()); - } - - // Build account witnesses. - let mut account_witnesses = Vec::with_capacity(updated_accounts.len()); - let mut block_updates = Vec::with_capacity(updated_accounts.len()); - - for (account_id, mut updates) in updated_accounts { - let (initial_state_hash, proof) = block_inputs - .accounts - .remove(&account_id) - .map(|witness| (witness.hash, witness.proof)) - .ok_or(BuildBlockError::MissingAccountInput(account_id))?; - - let mut details: Option = None; - - // Chronologically chain updates for this account together using the state hashes to - // link them. - let mut transactions = Vec::new(); - let mut current_hash = initial_state_hash; - while !updates.is_empty() { - let update = updates.remove(¤t_hash).ok_or_else(|| { - BuildBlockError::InconsistentAccountStateTransition( - account_id, - current_hash, - updates.keys().copied().collect(), - ) - })?; - - current_hash = update.final_state_commitment(); - let (update_transactions, update_details) = update.into_parts(); - transactions.extend(update_transactions); - - details = Some(match details { - None => update_details, - Some(details) => details.merge(update_details).map_err(|source| { - BuildBlockError::AccountUpdateError { account_id, source } - })?, - }); - } - - account_witnesses.push(( - account_id, - AccountUpdateWitness { - initial_state_hash, - final_state_hash: current_hash, - proof, - transactions: transactions.clone(), - }, - )); - - block_updates.push(BlockAccountUpdate::new( - account_id, - current_hash, - details.expect("Must be some by now"), - transactions, - )); - } - - if !block_inputs.accounts.is_empty() { - return Err(BuildBlockError::ExtraStoreData( - block_inputs.accounts.keys().copied().collect(), - )); - } - - Ok(( - Self { - updated_accounts: account_witnesses, - batch_created_notes_roots, - produced_nullifiers: block_inputs.nullifiers, - chain_peaks: block_inputs.chain_peaks, - prev_header: block_inputs.block_header, - }, - block_updates, - )) - } - - /// Converts [`BlockWitness`] into inputs to the block kernel program - pub(super) fn into_program_inputs( - self, - ) -> Result<(AdviceInputs, StackInputs), BlockProverError> { - let advice_inputs = self.build_advice_inputs()?; - - Ok((advice_inputs, StackInputs::default())) - } - - /// Returns an iterator over all transactions which affected accounts in the block with - /// corresponding account IDs. - pub(super) fn transactions(&self) -> impl Iterator + '_ { - self.updated_accounts.iter().flat_map(|(account_id, update)| { - update.transactions.iter().map(move |tx_id| (*tx_id, *account_id)) - }) - } - - // HELPERS - // --------------------------------------------------------------------------------------------- - - /// Validates that the nullifiers returned from the store are the same the produced nullifiers - /// in the batches. Note that validation that the value of the nullifiers is `0` will be - /// done in MASM. - fn validate_nullifiers( - block_inputs: &BlockInputs, - batches: &[ProvenBatch], - ) -> Result<(), BuildBlockError> { - let produced_nullifiers_from_store: BTreeSet = - block_inputs.nullifiers.keys().copied().collect(); - - let produced_nullifiers_from_batches: BTreeSet = - batches.iter().flat_map(ProvenBatch::produced_nullifiers).collect(); - - if produced_nullifiers_from_store == produced_nullifiers_from_batches { - Ok(()) - } else { - let differing_nullifiers: Vec = produced_nullifiers_from_store - .symmetric_difference(&produced_nullifiers_from_batches) - .copied() - .collect(); - - Err(BuildBlockError::InconsistentNullifiers(differing_nullifiers)) - } - } - - /// Builds the advice inputs to the block kernel - fn build_advice_inputs(self) -> Result { - let advice_stack = { - let mut advice_stack = Vec::new(); - - // add account stack inputs to the advice stack - { - let mut account_data = Vec::new(); - let mut num_accounts_updated: u64 = 0; - for (idx, (account_id, account_update)) in self.updated_accounts.iter().enumerate() - { - account_data.extend(account_update.final_state_hash); - account_data.push(account_id.prefix().as_felt()); - - let idx = u64::try_from(idx).expect("can't be more than 2^64 - 1 accounts"); - num_accounts_updated = idx + 1; - } - - // append number of accounts updated - advice_stack.push(num_accounts_updated.try_into().expect( - "updated accounts number is greater than or equal to the field modulus", - )); - - // append initial account root - advice_stack.extend(self.prev_header.account_root()); - - // append the updated accounts data - advice_stack.extend(account_data); - } - - // add notes stack inputs to the advice stack - { - // append the number of updated notes - advice_stack - .push(Felt::try_from(self.batch_created_notes_roots.len() as u64).expect( - "notes roots number is greater than or equal to the field modulus", - )); - - // append the empty root - let empty_root = EmptySubtreeRoots::entry(BLOCK_NOTE_TREE_DEPTH, 0); - advice_stack.extend(*empty_root); - - for (batch_index, batch_created_notes_root) in &self.batch_created_notes_roots { - advice_stack.extend(batch_created_notes_root.iter()); - - let batch_index = Felt::try_from(*batch_index as u64) - .expect("batch index is greater than or equal to the field modulus"); - advice_stack.push(batch_index); - } - } - - // Nullifiers stack inputs - { - let num_produced_nullifiers: Felt = (self.produced_nullifiers.len() as u64) - .try_into() - .expect("nullifiers number is greater than or equal to the field modulus"); - - // append number of nullifiers - advice_stack.push(num_produced_nullifiers); - - // append initial nullifier root - advice_stack.extend(self.prev_header.nullifier_root()); - - // append nullifier value (`[block_num, 0, 0, 0]`) - let block_num = self.prev_header.block_num() + 1; - advice_stack.extend([block_num.into(), ZERO, ZERO, ZERO]); - - for nullifier in self.produced_nullifiers.keys() { - advice_stack.extend(nullifier.inner()); - } - } - - // Chain MMR stack inputs - { - advice_stack.extend(self.prev_header.hash()); - advice_stack.extend(self.chain_peaks.hash_peaks()); - } - - advice_stack - }; - - let merkle_store = { - let mut merkle_store = MerkleStore::default(); - - // add accounts merkle paths - merkle_store - .add_merkle_paths(self.updated_accounts.into_iter().map( - |(account_id, AccountUpdateWitness { initial_state_hash, proof, .. })| { - (account_id.prefix().into(), initial_state_hash, proof) - }, - )) - .map_err(BlockProverError::InvalidMerklePaths)?; - - // add nullifiers merkle paths - merkle_store - .add_merkle_paths(self.produced_nullifiers.iter().map(|(nullifier, proof)| { - // Note: the initial value for all nullifiers in the tree is `[0, 0, 0, 0]` - ( - u64::from(nullifier.most_significant_felt()), - Digest::default(), - proof.path().clone(), - ) - })) - .map_err(BlockProverError::InvalidMerklePaths)?; - - merkle_store - }; - - let advice_map: Vec<_> = self - .produced_nullifiers - .values() - .map(|proof| (proof.leaf().hash(), proof.leaf().to_elements())) - .chain(std::iter::once(mmr_peaks_advice_map_key_value(&self.chain_peaks))) - .collect(); - - let advice_inputs = AdviceInputs::default() - .with_merkle_store(merkle_store) - .with_map(advice_map) - .with_stack(advice_stack); - - Ok(advice_inputs) - } -} - -#[derive(Debug, PartialEq, Eq)] -pub(super) struct AccountUpdateWitness { - pub initial_state_hash: Digest, - pub final_state_hash: Digest, - pub proof: MerklePath, - pub transactions: Vec, -} - -// HELPERS -// ================================================================================================= - -// Generates the advice map key/value for Mmr peaks -fn mmr_peaks_advice_map_key_value(peaks: &MmrPeaks) -> (Digest, Vec) { - let mut elements = vec![Felt::new(peaks.num_leaves() as u64), ZERO, ZERO, ZERO]; - elements.extend(peaks.flatten_and_pad_peaks()); - - (peaks.hash_peaks(), elements) -} diff --git a/crates/block-producer/src/block_builder/prover/mod.rs b/crates/block-producer/src/block_builder/prover/mod.rs deleted file mode 100644 index ded93a7d4..000000000 --- a/crates/block-producer/src/block_builder/prover/mod.rs +++ /dev/null @@ -1,132 +0,0 @@ -use std::time::{SystemTime, UNIX_EPOCH}; - -use miden_lib::transaction::TransactionKernel; -use miden_objects::{ - assembly::Assembler, - block::{compute_tx_hash, BlockHeader}, - Digest, -}; -use miden_processor::{execute, DefaultHost, ExecutionOptions, MemAdviceProvider, Program}; -use miden_stdlib::StdLibrary; - -use self::block_witness::BlockWitness; -use crate::errors::{BlockProverError, BuildBlockError}; - -/// The index of the word at which the account root is stored on the output stack. -pub const ACCOUNT_ROOT_WORD_IDX: usize = 0; - -/// The index of the word at which the note root is stored on the output stack. -pub const NOTE_ROOT_WORD_IDX: usize = 4; - -/// The index of the word at which the nullifier root is stored on the output stack. -pub const NULLIFIER_ROOT_WORD_IDX: usize = 8; - -/// The index of the word at which the note root is stored on the output stack. -pub const CHAIN_MMR_ROOT_WORD_IDX: usize = 12; - -pub mod block_witness; - -#[cfg(test)] -mod tests; - -const BLOCK_KERNEL_MASM: &str = include_str!("asm/block_kernel.masm"); - -#[derive(Debug)] -pub(crate) struct BlockProver { - kernel: Program, -} - -impl BlockProver { - pub fn new() -> Self { - let account_program = { - let assembler = Assembler::default() - .with_library(StdLibrary::default()) - .expect("failed to load std-lib"); - - assembler - .assemble_program(BLOCK_KERNEL_MASM) - .expect("failed to load account update program") - }; - - Self { kernel: account_program } - } - - // Note: this will eventually all be done in the VM, and also return an `ExecutionProof` - pub fn prove(&self, witness: BlockWitness) -> Result { - let prev_hash = witness.prev_header.hash(); - let block_num = witness.prev_header.block_num() + 1; - let version = witness.prev_header.version(); - - let tx_hash = compute_tx_hash(witness.transactions()); - let (account_root, note_root, nullifier_root, chain_root) = self.compute_roots(witness)?; - - let proof_hash = Digest::default(); - let timestamp = SystemTime::now() - .duration_since(UNIX_EPOCH) - .expect("today is expected to be after 1970") - .as_secs() - .try_into() - .expect("timestamp must fit in a `u32`"); - - Ok(BlockHeader::new( - version, - prev_hash, - block_num, - chain_root, - account_root, - nullifier_root, - note_root, - tx_hash, - TransactionKernel::kernel_root(), - proof_hash, - timestamp, - )) - } - - fn compute_roots( - &self, - witness: BlockWitness, - ) -> Result<(Digest, Digest, Digest, Digest), BlockProverError> { - let (advice_inputs, stack_inputs) = witness.into_program_inputs()?; - let mut host = { - let advice_provider = MemAdviceProvider::from(advice_inputs); - - let mut host = DefaultHost::new(advice_provider); - host.load_mast_forest(StdLibrary::default().mast_forest().clone()) - .expect("failed to load mast forest"); - - host - }; - - let execution_output = - execute(&self.kernel, stack_inputs, &mut host, ExecutionOptions::default()) - .map_err(BlockProverError::ProgramExecutionFailed)?; - - let new_account_root = execution_output - .stack_outputs() - .get_stack_word(ACCOUNT_ROOT_WORD_IDX) - .ok_or(BlockProverError::InvalidRootOutput("account"))?; - - let new_note_root = execution_output - .stack_outputs() - .get_stack_word(NOTE_ROOT_WORD_IDX) - .ok_or(BlockProverError::InvalidRootOutput("note"))?; - - let new_nullifier_root = execution_output - .stack_outputs() - .get_stack_word(NULLIFIER_ROOT_WORD_IDX) - .ok_or(BlockProverError::InvalidRootOutput("nullifier"))?; - - let new_chain_mmr_root = execution_output - .stack_outputs() - .get_stack_word(CHAIN_MMR_ROOT_WORD_IDX) - .ok_or(BlockProverError::InvalidRootOutput("chain mmr"))?; - - Ok(( - new_account_root.into(), - new_note_root.into(), - new_nullifier_root.into(), - new_chain_mmr_root.into(), - )) - } -} diff --git a/crates/block-producer/src/block_builder/prover/tests.rs b/crates/block-producer/src/block_builder/prover/tests.rs deleted file mode 100644 index f020faf62..000000000 --- a/crates/block-producer/src/block_builder/prover/tests.rs +++ /dev/null @@ -1,951 +0,0 @@ -use std::{collections::BTreeMap, iter}; - -use assert_matches::assert_matches; -use miden_node_proto::domain::note::NoteAuthenticationInfo; -use miden_objects::{ - account::{ - delta::AccountUpdateDetails, AccountId, AccountIdVersion, AccountStorageMode, AccountType, - }, - batch::ProvenBatch, - block::{BlockAccountUpdate, BlockNoteIndex, BlockNoteTree, BlockNumber}, - crypto::merkle::{ - EmptySubtreeRoots, LeafIndex, MerklePath, Mmr, MmrPeaks, Smt, SmtLeaf, SmtProof, SMT_DEPTH, - }, - note::{NoteExecutionHint, NoteHeader, NoteMetadata, NoteTag, NoteType, Nullifier}, - testing::account_id::{ - ACCOUNT_ID_OFF_CHAIN_SENDER, ACCOUNT_ID_REGULAR_ACCOUNT_UPDATABLE_CODE_OFF_CHAIN, - }, - transaction::{OutputNote, ProvenTransaction}, - Felt, BATCH_NOTE_TREE_DEPTH, BLOCK_NOTE_TREE_DEPTH, ONE, ZERO, -}; - -use self::block_witness::AccountUpdateWitness; -use super::*; -use crate::{ - block::{AccountWitness, BlockInputs}, - test_utils::{ - batch::TransactionBatchConstructor, - block::{build_actual_block_header, build_expected_block_header, MockBlockBuilder}, - MockProvenTxBuilder, MockStoreSuccessBuilder, - }, -}; - -// BLOCK WITNESS TESTS -// ================================================================================================= - -/// Tests that `BlockWitness` constructor fails if the store and transaction batches contain a -/// different set of account ids. -/// -/// The store will contain accounts 1 & 2, while the transaction batches will contain 2 & 3. -#[test] -fn block_witness_validation_inconsistent_account_ids() { - let account_id_1 = AccountId::dummy( - [0; 15], - AccountIdVersion::Version0, - AccountType::RegularAccountImmutableCode, - miden_objects::account::AccountStorageMode::Private, - ); - let account_id_2 = AccountId::dummy( - [1; 15], - AccountIdVersion::Version0, - AccountType::RegularAccountImmutableCode, - miden_objects::account::AccountStorageMode::Private, - ); - let account_id_3 = AccountId::dummy( - [2; 15], - AccountIdVersion::Version0, - AccountType::RegularAccountImmutableCode, - miden_objects::account::AccountStorageMode::Private, - ); - - let block_inputs_from_store: BlockInputs = { - let block_header = BlockHeader::mock(0, None, None, &[], Digest::default()); - let chain_peaks = MmrPeaks::new(0, Vec::new()).unwrap(); - - let accounts = BTreeMap::from_iter(vec![ - (account_id_1, AccountWitness::default()), - (account_id_2, AccountWitness::default()), - ]); - - BlockInputs { - block_header, - chain_peaks, - accounts, - nullifiers: BTreeMap::default(), - found_unauthenticated_notes: NoteAuthenticationInfo::default(), - } - }; - - let batches: Vec = { - let batch_1 = { - let tx = MockProvenTxBuilder::with_account( - account_id_2, - Digest::default(), - Digest::default(), - ) - .build(); - - ProvenBatch::mocked_from_transactions([&tx]) - }; - - let batch_2 = { - let tx = MockProvenTxBuilder::with_account( - account_id_3, - Digest::default(), - Digest::default(), - ) - .build(); - - ProvenBatch::mocked_from_transactions([&tx]) - }; - - vec![batch_1, batch_2] - }; - - let block_witness_result = BlockWitness::new(block_inputs_from_store, &batches); - - assert!(block_witness_result.is_err()); -} - -/// Tests that `BlockWitness` constructor fails if the store and transaction batches contain a -/// different at least 1 account who's state hash is different. -/// -/// Only account 1 will have a different state hash -#[test] -fn block_witness_validation_inconsistent_account_hashes() { - let account_id_1 = - AccountId::try_from(ACCOUNT_ID_REGULAR_ACCOUNT_UPDATABLE_CODE_OFF_CHAIN).unwrap(); - let account_id_2 = AccountId::try_from(ACCOUNT_ID_OFF_CHAIN_SENDER).unwrap(); - - let account_1_hash_store = - Digest::new([Felt::new(1u64), Felt::new(2u64), Felt::new(3u64), Felt::new(4u64)]); - let account_1_hash_batches = - Digest::new([Felt::new(4u64), Felt::new(3u64), Felt::new(2u64), Felt::new(1u64)]); - - let block_inputs_from_store: BlockInputs = { - let block_header = BlockHeader::mock(0, None, None, &[], Digest::default()); - let chain_peaks = MmrPeaks::new(0, Vec::new()).unwrap(); - - let accounts = BTreeMap::from_iter(vec![ - ( - account_id_1, - AccountWitness { - hash: account_1_hash_store, - proof: MerklePath::default(), - }, - ), - (account_id_2, AccountWitness::default()), - ]); - - BlockInputs { - block_header, - chain_peaks, - accounts, - nullifiers: BTreeMap::default(), - found_unauthenticated_notes: NoteAuthenticationInfo::default(), - } - }; - - let batches = { - let batch_1 = ProvenBatch::mocked_from_transactions([&MockProvenTxBuilder::with_account( - account_id_1, - account_1_hash_batches, - Digest::default(), - ) - .build()]); - - let batch_2 = ProvenBatch::mocked_from_transactions([&MockProvenTxBuilder::with_account( - account_id_2, - Digest::default(), - Digest::default(), - ) - .build()]); - - vec![batch_1, batch_2] - }; - - let block_witness_result = BlockWitness::new(block_inputs_from_store, &batches); - - assert_matches!( - block_witness_result, - Err(BuildBlockError::InconsistentAccountStateTransition( - account_id, - account_hash_store, - account_hash_batches - )) => { - assert_eq!(account_id, account_id_1); - assert_eq!(account_hash_store, account_1_hash_store); - assert_eq!(account_hash_batches, vec![account_1_hash_batches]); - } - ); -} - -/// Creates two batches which each update the same pair of accounts. -/// -/// The transactions are ordered such that the batches cannot be chronologically ordered -/// themselves: `[tx_x0, tx_y1], [tx_y0, tx_x1]`. This test ensures that the witness is -/// produced correctly as if for a single batch: `[tx_x0, tx_x1, tx_y0, tx_y1]`. -#[test] -fn block_witness_multiple_batches_per_account() { - let x_account_id = - AccountId::try_from(ACCOUNT_ID_REGULAR_ACCOUNT_UPDATABLE_CODE_OFF_CHAIN).unwrap(); - let y_account_id = AccountId::try_from(ACCOUNT_ID_OFF_CHAIN_SENDER).unwrap(); - - let x_hashes = [ - Digest::new((0..4).map(Felt::new).collect::>().try_into().unwrap()), - Digest::new((4..8).map(Felt::new).collect::>().try_into().unwrap()), - Digest::new((8..12).map(Felt::new).collect::>().try_into().unwrap()), - ]; - let y_hashes = [ - Digest::new((12..16).map(Felt::new).collect::>().try_into().unwrap()), - Digest::new((16..20).map(Felt::new).collect::>().try_into().unwrap()), - Digest::new((20..24).map(Felt::new).collect::>().try_into().unwrap()), - ]; - - let x_txs = [ - MockProvenTxBuilder::with_account(x_account_id, x_hashes[0], x_hashes[1]).build(), - MockProvenTxBuilder::with_account(x_account_id, x_hashes[1], x_hashes[2]).build(), - ]; - let y_txs = [ - MockProvenTxBuilder::with_account(y_account_id, y_hashes[0], y_hashes[1]).build(), - MockProvenTxBuilder::with_account(y_account_id, y_hashes[1], y_hashes[2]).build(), - ]; - - let x_proof = MerklePath::new(vec![Digest::new( - (24..28).map(Felt::new).collect::>().try_into().unwrap(), - )]); - let y_proof = MerklePath::new(vec![Digest::new( - (28..32).map(Felt::new).collect::>().try_into().unwrap(), - )]); - - let block_inputs_from_store: BlockInputs = { - let block_header = BlockHeader::mock(0, None, None, &[], Digest::default()); - let chain_peaks = MmrPeaks::new(0, Vec::new()).unwrap(); - - let x_witness = AccountWitness { - hash: x_hashes[0], - proof: x_proof.clone(), - }; - let y_witness = AccountWitness { - hash: y_hashes[0], - proof: y_proof.clone(), - }; - let accounts = BTreeMap::from_iter([(x_account_id, x_witness), (y_account_id, y_witness)]); - - BlockInputs { - block_header, - chain_peaks, - accounts, - nullifiers: BTreeMap::default(), - found_unauthenticated_notes: NoteAuthenticationInfo::default(), - } - }; - - let batches = { - let batch_1 = ProvenBatch::mocked_from_transactions([&x_txs[0], &y_txs[1]]); - let batch_2 = ProvenBatch::mocked_from_transactions([&y_txs[0], &x_txs[1]]); - - vec![batch_1, batch_2] - }; - - let (block_witness, _) = BlockWitness::new(block_inputs_from_store, &batches).unwrap(); - let account_witnesses = block_witness.updated_accounts.into_iter().collect::>(); - - let x_expected = AccountUpdateWitness { - initial_state_hash: x_hashes[0], - final_state_hash: *x_hashes.last().unwrap(), - proof: x_proof, - transactions: x_txs.iter().map(ProvenTransaction::id).collect(), - }; - - let y_expected = AccountUpdateWitness { - initial_state_hash: y_hashes[0], - final_state_hash: *y_hashes.last().unwrap(), - proof: y_proof, - transactions: y_txs.iter().map(ProvenTransaction::id).collect(), - }; - - let expected = [(x_account_id, x_expected), (y_account_id, y_expected)].into(); - - assert_eq!(account_witnesses, expected); -} - -// ACCOUNT ROOT TESTS -// ================================================================================================= - -/// Tests that the `BlockProver` computes the proper account root. -/// -/// We assume an initial store with 5 accounts, and all will be updated. -#[tokio::test] -#[miden_node_test_macro::enable_logging] -async fn compute_account_root_success() { - // Set up account states - // --------------------------------------------------------------------------------------------- - let account_ids = [ - AccountId::dummy( - [0; 15], - AccountIdVersion::Version0, - AccountType::RegularAccountImmutableCode, - miden_objects::account::AccountStorageMode::Private, - ), - AccountId::dummy( - [1; 15], - AccountIdVersion::Version0, - AccountType::RegularAccountImmutableCode, - miden_objects::account::AccountStorageMode::Private, - ), - AccountId::dummy( - [2; 15], - AccountIdVersion::Version0, - AccountType::RegularAccountImmutableCode, - miden_objects::account::AccountStorageMode::Private, - ), - AccountId::dummy( - [3; 15], - AccountIdVersion::Version0, - AccountType::RegularAccountImmutableCode, - miden_objects::account::AccountStorageMode::Private, - ), - AccountId::dummy( - [4; 15], - AccountIdVersion::Version0, - AccountType::RegularAccountImmutableCode, - miden_objects::account::AccountStorageMode::Private, - ), - ]; - - let account_initial_states = [ - [Felt::new(1u64), Felt::new(1u64), Felt::new(1u64), Felt::new(1u64)], - [Felt::new(2u64), Felt::new(2u64), Felt::new(2u64), Felt::new(2u64)], - [Felt::new(3u64), Felt::new(3u64), Felt::new(3u64), Felt::new(3u64)], - [Felt::new(4u64), Felt::new(4u64), Felt::new(4u64), Felt::new(4u64)], - [Felt::new(5u64), Felt::new(5u64), Felt::new(5u64), Felt::new(5u64)], - ]; - - let account_final_states = [ - [Felt::new(2u64), Felt::new(2u64), Felt::new(2u64), Felt::new(2u64)], - [Felt::new(3u64), Felt::new(3u64), Felt::new(3u64), Felt::new(3u64)], - [Felt::new(4u64), Felt::new(4u64), Felt::new(4u64), Felt::new(4u64)], - [Felt::new(5u64), Felt::new(5u64), Felt::new(5u64), Felt::new(5u64)], - [Felt::new(1u64), Felt::new(1u64), Felt::new(1u64), Felt::new(1u64)], - ]; - - // Set up store's account SMT - // --------------------------------------------------------------------------------------------- - - let store = MockStoreSuccessBuilder::from_accounts( - account_ids - .iter() - .zip(account_initial_states.iter()) - .map(|(&account_id, &account_hash)| (account_id, account_hash.into())), - ) - .build(); - - // Block prover - // --------------------------------------------------------------------------------------------- - - // Block inputs is initialized with all the accounts and their initial state - let block_inputs_from_store: BlockInputs = store - .get_block_inputs(account_ids.into_iter(), std::iter::empty(), std::iter::empty()) - .await - .unwrap(); - - let batches: Vec = { - let txs: Vec<_> = account_ids - .iter() - .enumerate() - .map(|(idx, &account_id)| { - MockProvenTxBuilder::with_account( - account_id, - account_initial_states[idx].into(), - account_final_states[idx].into(), - ) - .build() - }) - .collect(); - - let batch_1 = ProvenBatch::mocked_from_transactions(&txs[..2]); - let batch_2 = ProvenBatch::mocked_from_transactions(&txs[2..]); - - vec![batch_1, batch_2] - }; - - let (block_witness, _) = BlockWitness::new(block_inputs_from_store, &batches).unwrap(); - - let block_prover = BlockProver::new(); - let block_header = block_prover.prove(block_witness).unwrap(); - - // Update SMT by hand to get new root - // --------------------------------------------------------------------------------------------- - let block = MockBlockBuilder::new(&store) - .await - .account_updates( - account_ids - .iter() - .zip(account_final_states.iter()) - .map(|(&account_id, &account_hash)| { - BlockAccountUpdate::new( - account_id, - account_hash.into(), - AccountUpdateDetails::Private, - vec![], - ) - }) - .collect(), - ) - .build(); - - // Compare roots - // --------------------------------------------------------------------------------------------- - assert_eq!(block_header.account_root(), block.header().account_root()); -} - -/// Test that the current account root is returned if the batches are empty -#[tokio::test] -#[miden_node_test_macro::enable_logging] -async fn compute_account_root_empty_batches() { - // Set up account states - // --------------------------------------------------------------------------------------------- - let account_ids = [ - AccountId::dummy( - [0; 15], - AccountIdVersion::Version0, - AccountType::RegularAccountImmutableCode, - AccountStorageMode::Private, - ), - AccountId::dummy( - [1; 15], - AccountIdVersion::Version0, - AccountType::RegularAccountImmutableCode, - AccountStorageMode::Private, - ), - AccountId::dummy( - [2; 15], - AccountIdVersion::Version0, - AccountType::RegularAccountImmutableCode, - AccountStorageMode::Private, - ), - AccountId::dummy( - [3; 15], - AccountIdVersion::Version0, - AccountType::RegularAccountImmutableCode, - AccountStorageMode::Private, - ), - AccountId::dummy( - [4; 15], - AccountIdVersion::Version0, - AccountType::RegularAccountImmutableCode, - AccountStorageMode::Private, - ), - ]; - - let account_initial_states = [ - [Felt::new(1u64), Felt::new(1u64), Felt::new(1u64), Felt::new(1u64)], - [Felt::new(2u64), Felt::new(2u64), Felt::new(2u64), Felt::new(2u64)], - [Felt::new(3u64), Felt::new(3u64), Felt::new(3u64), Felt::new(3u64)], - [Felt::new(4u64), Felt::new(4u64), Felt::new(4u64), Felt::new(4u64)], - [Felt::new(5u64), Felt::new(5u64), Felt::new(5u64), Felt::new(5u64)], - ]; - - // Set up store's account SMT - // --------------------------------------------------------------------------------------------- - - let store = MockStoreSuccessBuilder::from_accounts( - account_ids - .iter() - .zip(account_initial_states.iter()) - .map(|(&account_id, &account_hash)| (account_id, account_hash.into())), - ) - .build(); - - // Block prover - // --------------------------------------------------------------------------------------------- - - // Block inputs is initialized with all the accounts and their initial state - let block_inputs_from_store: BlockInputs = store - .get_block_inputs(std::iter::empty(), std::iter::empty(), std::iter::empty()) - .await - .unwrap(); - - let batches = Vec::new(); - let (block_witness, _) = BlockWitness::new(block_inputs_from_store, &batches).unwrap(); - - let block_prover = BlockProver::new(); - let block_header = block_prover.prove(block_witness).unwrap(); - - // Compare roots - // --------------------------------------------------------------------------------------------- - assert_eq!(block_header.account_root(), store.account_root().await); -} - -// NOTE ROOT TESTS -// ================================================================================================= - -/// Tests that the block kernel returns the empty tree (depth 20) if no notes were created, and -/// contains no batches -#[tokio::test] -#[miden_node_test_macro::enable_logging] -async fn compute_note_root_empty_batches_success() { - // Set up store - // --------------------------------------------------------------------------------------------- - - let store = MockStoreSuccessBuilder::from_batches(iter::empty()).build(); - - // Block prover - // --------------------------------------------------------------------------------------------- - - // Block inputs is initialized with all the accounts and their initial state - let block_inputs_from_store: BlockInputs = store - .get_block_inputs(std::iter::empty(), std::iter::empty(), std::iter::empty()) - .await - .unwrap(); - - let batches: Vec = Vec::new(); - - let (block_witness, _) = BlockWitness::new(block_inputs_from_store, &batches).unwrap(); - - let block_prover = BlockProver::new(); - let block_header = block_prover.prove(block_witness).unwrap(); - - // Compare roots - // --------------------------------------------------------------------------------------------- - let created_notes_empty_root = EmptySubtreeRoots::entry(BLOCK_NOTE_TREE_DEPTH, 0); - assert_eq!(block_header.note_root(), *created_notes_empty_root); -} - -/// Tests that the block kernel returns the empty tree (depth 20) if no notes were created, but -/// which contains at least 1 batch. -#[tokio::test] -#[miden_node_test_macro::enable_logging] -async fn compute_note_root_empty_notes_success() { - // Set up store - // --------------------------------------------------------------------------------------------- - - let store = MockStoreSuccessBuilder::from_batches(iter::empty()).build(); - - // Block prover - // --------------------------------------------------------------------------------------------- - - // Block inputs is initialized with all the accounts and their initial state - let block_inputs_from_store: BlockInputs = store - .get_block_inputs(std::iter::empty(), std::iter::empty(), std::iter::empty()) - .await - .unwrap(); - - let batches: Vec = { - let batch = ProvenBatch::mocked_from_transactions(vec![]); - vec![batch] - }; - - let (block_witness, _) = BlockWitness::new(block_inputs_from_store, &batches).unwrap(); - - let block_prover = BlockProver::new(); - let block_header = block_prover.prove(block_witness).unwrap(); - - // Compare roots - // --------------------------------------------------------------------------------------------- - let created_notes_empty_root = EmptySubtreeRoots::entry(BLOCK_NOTE_TREE_DEPTH, 0); - assert_eq!(block_header.note_root(), *created_notes_empty_root); -} - -/// Tests that the block kernel returns the expected tree when multiple notes were created across -/// many batches. -#[tokio::test] -#[miden_node_test_macro::enable_logging] -async fn compute_note_root_success() { - let account_ids = [ - AccountId::dummy( - [0; 15], - AccountIdVersion::Version0, - AccountType::RegularAccountImmutableCode, - miden_objects::account::AccountStorageMode::Private, - ), - AccountId::dummy( - [1; 15], - AccountIdVersion::Version0, - AccountType::RegularAccountImmutableCode, - miden_objects::account::AccountStorageMode::Private, - ), - AccountId::dummy( - [2; 15], - AccountIdVersion::Version0, - AccountType::RegularAccountImmutableCode, - miden_objects::account::AccountStorageMode::Private, - ), - ]; - - let notes_created: Vec = [ - Digest::from([Felt::new(1u64), Felt::new(1u64), Felt::new(1u64), Felt::new(1u64)]), - Digest::from([Felt::new(2u64), Felt::new(2u64), Felt::new(2u64), Felt::new(2u64)]), - Digest::from([Felt::new(3u64), Felt::new(3u64), Felt::new(3u64), Felt::new(3u64)]), - ] - .into_iter() - .zip(account_ids.iter()) - .map(|(note_digest, &account_id)| { - NoteHeader::new( - note_digest.into(), - NoteMetadata::new( - account_id, - NoteType::Private, - NoteTag::for_local_use_case(0u16, 0u16).unwrap(), - NoteExecutionHint::none(), - ONE, - ) - .unwrap(), - ) - }) - .collect(); - - // Set up store - // --------------------------------------------------------------------------------------------- - - let store = MockStoreSuccessBuilder::from_batches(iter::empty()).build(); - - // Block prover - // --------------------------------------------------------------------------------------------- - - // Block inputs is initialized with all the accounts and their initial state - let block_inputs_from_store: BlockInputs = store - .get_block_inputs(account_ids.into_iter(), std::iter::empty(), std::iter::empty()) - .await - .unwrap(); - - let batches: Vec = { - let txs: Vec<_> = notes_created - .iter() - .zip(account_ids.iter()) - .map(|(note, &account_id)| { - let note = OutputNote::Header(*note); - MockProvenTxBuilder::with_account(account_id, Digest::default(), Digest::default()) - .output_notes(vec![note]) - .build() - }) - .collect(); - - let batch_1 = ProvenBatch::mocked_from_transactions(&txs[..2]); - let batch_2 = ProvenBatch::mocked_from_transactions(&txs[2..]); - - vec![batch_1, batch_2] - }; - - let (block_witness, _) = BlockWitness::new(block_inputs_from_store, &batches).unwrap(); - - let block_prover = BlockProver::new(); - let block_header = block_prover.prove(block_witness).unwrap(); - - // Create block note tree to get new root - // --------------------------------------------------------------------------------------------- - - // The current logic is hardcoded to a depth of 6 - // Specifically, we assume the block has up to 2^6 batches, and each batch up to 2^10 created - // notes, where each note is stored at depth 10 in the batch tree. - #[allow(clippy::items_after_statements, reason = "assert belongs to this section")] - const _: () = assert!(BLOCK_NOTE_TREE_DEPTH - BATCH_NOTE_TREE_DEPTH == 6); - - // The first 2 txs were put in the first batch; the 3rd was put in the second - let note_tree = BlockNoteTree::with_entries([ - ( - BlockNoteIndex::new(0, 0).unwrap(), - notes_created[0].id(), - *notes_created[0].metadata(), - ), - ( - BlockNoteIndex::new(0, 1).unwrap(), - notes_created[1].id(), - *notes_created[1].metadata(), - ), - ( - BlockNoteIndex::new(1, 0).unwrap(), - notes_created[2].id(), - *notes_created[2].metadata(), - ), - ]) - .unwrap(); - - // Compare roots - // --------------------------------------------------------------------------------------------- - assert_eq!(block_header.note_root(), note_tree.root()); -} - -// NULLIFIER ROOT TESTS -// ================================================================================================= - -/// Tests that `BlockWitness` constructor fails if the store and transaction batches contain a -/// different set of nullifiers. -/// -/// The transaction batches will contain nullifiers 1 & 2, while the store will contain 2 & 3. -#[test] -fn block_witness_validation_inconsistent_nullifiers() { - let batches: Vec = { - let batch_1 = { - let tx = MockProvenTxBuilder::with_account_index(0).nullifiers_range(0..1).build(); - - ProvenBatch::mocked_from_transactions([&tx]) - }; - - let batch_2 = { - let tx = MockProvenTxBuilder::with_account_index(1).nullifiers_range(1..2).build(); - - ProvenBatch::mocked_from_transactions([&tx]) - }; - - vec![batch_1, batch_2] - }; - - let nullifier_1 = batches[0].produced_nullifiers().next().unwrap(); - let nullifier_2 = batches[1].produced_nullifiers().next().unwrap(); - let nullifier_3 = - Nullifier::from([101_u32.into(), 102_u32.into(), 103_u32.into(), 104_u32.into()]); - - let block_inputs_from_store: BlockInputs = { - let block_header = BlockHeader::mock(0, None, None, &[], Digest::default()); - let chain_peaks = MmrPeaks::new(0, Vec::new()).unwrap(); - - let accounts = batches - .iter() - .flat_map(|batch| { - batch - .account_updates() - .iter() - .map(|(account_id, update)| (*account_id, update.initial_state_commitment())) - }) - .map(|(account_id, hash)| { - (account_id, AccountWitness { hash, proof: MerklePath::default() }) - }) - .collect(); - - let nullifiers = BTreeMap::from_iter(vec![ - ( - nullifier_2, - SmtProof::new( - MerklePath::new(vec![Digest::default(); SMT_DEPTH as usize]), - SmtLeaf::new_empty(LeafIndex::new_max_depth( - nullifier_2.most_significant_felt().into(), - )), - ) - .unwrap(), - ), - ( - nullifier_3, - SmtProof::new( - MerklePath::new(vec![Digest::default(); SMT_DEPTH as usize]), - SmtLeaf::new_empty(LeafIndex::new_max_depth( - nullifier_3.most_significant_felt().into(), - )), - ) - .unwrap(), - ), - ]); - - BlockInputs { - block_header, - chain_peaks, - accounts, - nullifiers, - found_unauthenticated_notes: NoteAuthenticationInfo::default(), - } - }; - - let block_witness_result = BlockWitness::new(block_inputs_from_store, &batches); - - assert_matches!( - block_witness_result, - Err(BuildBlockError::InconsistentNullifiers(nullifiers)) => { - assert_eq!(nullifiers, vec![nullifier_1, nullifier_3]); - } - ); -} - -/// Tests that the block kernel returns the expected nullifier tree when no nullifiers are present -/// in the transaction -#[tokio::test] -async fn compute_nullifier_root_empty_success() { - let batches: Vec = { - let batch_1 = { - let tx = MockProvenTxBuilder::with_account_index(0).build(); - - ProvenBatch::mocked_from_transactions([&tx]) - }; - - let batch_2 = { - let tx = MockProvenTxBuilder::with_account_index(1).build(); - - ProvenBatch::mocked_from_transactions([&tx]) - }; - - vec![batch_1, batch_2] - }; - - let account_ids: Vec = batches - .iter() - .flat_map(|batch| { - batch - .account_updates() - .iter() - .map(|(account_id, update)| (*account_id, update.initial_state_commitment())) - }) - .map(|(account_id, _)| account_id) - .collect(); - - // Set up store - // --------------------------------------------------------------------------------------------- - - let store = MockStoreSuccessBuilder::from_batches(batches.iter()).build(); - - // Block prover - // --------------------------------------------------------------------------------------------- - - // Block inputs is initialized with all the accounts and their initial state - let block_inputs_from_store: BlockInputs = store - .get_block_inputs(account_ids.into_iter(), std::iter::empty(), std::iter::empty()) - .await - .unwrap(); - - let (block_witness, _) = BlockWitness::new(block_inputs_from_store, &batches).unwrap(); - - let block_prover = BlockProver::new(); - let block_header = block_prover.prove(block_witness).unwrap(); - - // Create SMT by hand to get new root - // --------------------------------------------------------------------------------------------- - let nullifier_smt = Smt::new(); - - // Compare roots - // --------------------------------------------------------------------------------------------- - assert_eq!(block_header.nullifier_root(), nullifier_smt.root()); -} - -/// Tests that the block kernel returns the expected nullifier tree when multiple nullifiers are -/// present in the transaction -#[tokio::test] -async fn compute_nullifier_root_success() { - let batches: Vec = { - let batch_1 = { - let tx = MockProvenTxBuilder::with_account_index(0).nullifiers_range(0..1).build(); - - ProvenBatch::mocked_from_transactions([&tx]) - }; - - let batch_2 = { - let tx = MockProvenTxBuilder::with_account_index(1).nullifiers_range(1..2).build(); - - ProvenBatch::mocked_from_transactions([&tx]) - }; - - vec![batch_1, batch_2] - }; - - let account_ids: Vec = batches - .iter() - .flat_map(|batch| { - batch - .account_updates() - .iter() - .map(|(account_id, update)| (*account_id, update.initial_state_commitment())) - }) - .map(|(account_id, _)| account_id) - .collect(); - - let nullifiers = [ - batches[0].produced_nullifiers().next().unwrap(), - batches[1].produced_nullifiers().next().unwrap(), - ]; - - // Set up store - // --------------------------------------------------------------------------------------------- - let initial_block_num = BlockNumber::from(42); - - let store = MockStoreSuccessBuilder::from_batches(batches.iter()) - .initial_block_num(initial_block_num) - .build(); - - // Block prover - // --------------------------------------------------------------------------------------------- - - // Block inputs is initialized with all the accounts and their initial state - let block_inputs_from_store: BlockInputs = store - .get_block_inputs(account_ids.into_iter(), nullifiers.iter(), std::iter::empty()) - .await - .unwrap(); - - let (block_witness, _) = BlockWitness::new(block_inputs_from_store, &batches).unwrap(); - - let block_prover = BlockProver::new(); - let block_header = block_prover.prove(block_witness).unwrap(); - - // Create SMT by hand to get new root - // --------------------------------------------------------------------------------------------- - - // Note that the block number in store is 42; the nullifiers get added to the next block (i.e. - // block number 43) - let nullifier_smt = - Smt::with_entries(nullifiers.into_iter().map(|nullifier| { - (nullifier.inner(), [(initial_block_num + 1).into(), ZERO, ZERO, ZERO]) - })) - .unwrap(); - - // Compare roots - // --------------------------------------------------------------------------------------------- - assert_eq!(block_header.nullifier_root(), nullifier_smt.root()); -} - -// CHAIN MMR ROOT TESTS -// ================================================================================================= - -/// Test that the chain mmr root is as expected if the batches are empty -#[tokio::test] -#[miden_node_test_macro::enable_logging] -async fn compute_chain_mmr_root_empty_mmr() { - let store = MockStoreSuccessBuilder::from_batches(iter::empty()).build(); - - let expected_block_header = build_expected_block_header(&store, &[]).await; - let actual_block_header = build_actual_block_header(&store, Vec::new()).await; - - assert_eq!(actual_block_header.chain_root(), expected_block_header.chain_root()); -} - -/// add header to non-empty MMR (1 peak), and check that we get the expected commitment -#[tokio::test] -#[miden_node_test_macro::enable_logging] -async fn compute_chain_mmr_root_mmr_1_peak() { - let initial_chain_mmr = { - let mut mmr = Mmr::new(); - mmr.add(Digest::default()); - - mmr - }; - - let store = MockStoreSuccessBuilder::from_batches(iter::empty()) - .initial_chain_mmr(initial_chain_mmr) - .build(); - - let expected_block_header = build_expected_block_header(&store, &[]).await; - let actual_block_header = build_actual_block_header(&store, Vec::new()).await; - - assert_eq!(actual_block_header.chain_root(), expected_block_header.chain_root()); -} - -/// add header to an MMR with 17 peaks, and check that we get the expected commitment -#[tokio::test] -#[miden_node_test_macro::enable_logging] -async fn compute_chain_mmr_root_mmr_17_peaks() { - let initial_chain_mmr = { - let mut mmr = Mmr::new(); - for _ in 0..(2_u32.pow(17) - 1) { - mmr.add(Digest::default()); - } - - assert_eq!(mmr.peaks().peaks().len(), 17); - - mmr - }; - - let store = MockStoreSuccessBuilder::from_batches(iter::empty()) - .initial_chain_mmr(initial_chain_mmr) - .build(); - - let expected_block_header = build_expected_block_header(&store, &[]).await; - let actual_block_header = build_actual_block_header(&store, Vec::new()).await; - - assert_eq!(actual_block_header.chain_root(), expected_block_header.chain_root()); -} diff --git a/crates/block-producer/src/errors.rs b/crates/block-producer/src/errors.rs index 02bc01568..6c9de3e37 100644 --- a/crates/block-producer/src/errors.rs +++ b/crates/block-producer/src/errors.rs @@ -1,15 +1,13 @@ +use miden_block_prover::ProvenBlockError; use miden_node_proto::errors::ConversionError; use miden_node_utils::formatting::format_opt; use miden_objects::{ - account::AccountId, block::BlockNumber, - crypto::merkle::MerkleError, note::{NoteId, Nullifier}, transaction::TransactionId, - AccountDeltaError, BlockError, Digest, ProposedBatchError, + Digest, ProposedBatchError, ProposedBlockError, }; -use miden_processor::ExecutionError; -use miden_tx_batch_prover::errors::BatchProveError; +use miden_tx_batch_prover::errors::ProvenBatchError; use thiserror::Error; use tokio::task::JoinError; @@ -143,20 +141,7 @@ pub enum BuildBatchError { ProposeBatchError(#[source] ProposedBatchError), #[error("failed to prove proposed transaction batch")] - ProveBatchError(#[source] BatchProveError), -} - -// Block prover errors -// ================================================================================================= - -#[derive(Debug, Error)] -pub enum BlockProverError { - #[error("received invalid merkle path")] - InvalidMerklePaths(#[source] MerkleError), - #[error("program execution failed")] - ProgramExecutionFailed(#[source] ExecutionError), - #[error("failed to retrieve {0} root from stack outputs")] - InvalidRootOutput(&'static str), + ProveBatchError(#[source] ProvenBatchError), } // Block building errors @@ -164,31 +149,14 @@ pub enum BlockProverError { #[derive(Debug, Error)] pub enum BuildBlockError { - #[error("failed to compute new block")] - BlockProverFailed(#[from] BlockProverError), #[error("failed to apply block to store")] StoreApplyBlockFailed(#[source] StoreError), #[error("failed to get block inputs from store")] GetBlockInputsFailed(#[source] StoreError), - #[error("block inputs from store did not contain data for account {0}")] - MissingAccountInput(AccountId), - #[error("block inputs from store contained extra data for accounts {0:?}")] - ExtraStoreData(Vec), - #[error("account {0} with state {1} cannot transaction to remaining states {2:?}")] - InconsistentAccountStateTransition(AccountId, Digest, Vec), - #[error( - "block inputs from store and transaction batches produced different nullifiers: {0:?}" - )] - InconsistentNullifiers(Vec), - #[error("unauthenticated transaction notes not found in the store or in outputs of other transactions in the block: {0:?}")] - UnauthenticatedNotesNotFound(Vec), - #[error("failed to merge transaction delta into account {account_id}")] - AccountUpdateError { - account_id: AccountId, - source: AccountDeltaError, - }, - #[error("block construction failed")] - BlockConstructionError(#[from] BlockError), + #[error("failed to propose block")] + ProposeBlockFailed(#[source] ProposedBlockError), + #[error("failed to prove block")] + ProveBlockFailed(#[source] ProvenBlockError), /// We sometimes randomly inject errors into the batch building process to test our failure /// responses. #[error("nothing actually went wrong, failure was injected on purpose")] diff --git a/crates/block-producer/src/lib.rs b/crates/block-producer/src/lib.rs index 1cb3b62c9..8aa594232 100644 --- a/crates/block-producer/src/lib.rs +++ b/crates/block-producer/src/lib.rs @@ -10,7 +10,6 @@ mod errors; mod mempool; mod store; -pub mod block; pub mod config; pub mod server; diff --git a/crates/block-producer/src/store/mod.rs b/crates/block-producer/src/store/mod.rs index 3d8009cda..9d4ad4d40 100644 --- a/crates/block-producer/src/store/mod.rs +++ b/crates/block-producer/src/store/mod.rs @@ -22,7 +22,7 @@ use miden_node_proto::{ use miden_node_utils::{formatting::format_opt, tracing::grpc::OtelInterceptor}; use miden_objects::{ account::AccountId, - block::{Block, BlockHeader, BlockNumber}, + block::{BlockHeader, BlockInputs, BlockNumber, ProvenBlock}, note::{NoteId, Nullifier}, transaction::ProvenTransaction, utils::Serializable, @@ -32,7 +32,7 @@ use miden_processor::crypto::RpoDigest; use tonic::{service::interceptor::InterceptedService, transport::Channel}; use tracing::{debug, info, instrument}; -use crate::{block::BlockInputs, errors::StoreError, COMPONENT}; +use crate::{errors::StoreError, COMPONENT}; // TRANSACTION INPUTS // ================================================================================================ @@ -197,13 +197,15 @@ impl StoreClient { pub async fn get_block_inputs( &self, updated_accounts: impl Iterator + Send, - produced_nullifiers: impl Iterator + Send, - notes: impl Iterator + Send, + created_nullifiers: impl Iterator + Send, + unauthenticated_notes: impl Iterator + Send, + reference_blocks: impl Iterator + Send, ) -> Result { let request = tonic::Request::new(GetBlockInputsRequest { account_ids: updated_accounts.map(Into::into).collect(), - nullifiers: produced_nullifiers.map(digest::Digest::from).collect(), - unauthenticated_notes: notes.map(digest::Digest::from).collect(), + nullifiers: created_nullifiers.map(digest::Digest::from).collect(), + unauthenticated_notes: unauthenticated_notes.map(digest::Digest::from).collect(), + reference_blocks: reference_blocks.map(|block_num| block_num.as_u32()).collect(), }); let store_response = self.inner.clone().get_block_inputs(request).await?.into_inner(); @@ -228,7 +230,7 @@ impl StoreClient { } #[instrument(target = COMPONENT, name = "store.client.apply_block", skip_all, err)] - pub async fn apply_block(&self, block: &Block) -> Result<(), StoreError> { + pub async fn apply_block(&self, block: &ProvenBlock) -> Result<(), StoreError> { let request = tonic::Request::new(ApplyBlockRequest { block: block.to_bytes() }); self.inner.clone().apply_block(request).await.map(|_| ()).map_err(Into::into) diff --git a/crates/block-producer/src/test_utils/batch.rs b/crates/block-producer/src/test_utils/batch.rs index 37c2041fb..8e4371a3f 100644 --- a/crates/block-producer/src/test_utils/batch.rs +++ b/crates/block-producer/src/test_utils/batch.rs @@ -1,7 +1,7 @@ use std::collections::BTreeMap; use miden_objects::{ - batch::{BatchAccountUpdate, BatchId, BatchNoteTree, ProvenBatch}, + batch::{BatchAccountUpdate, BatchId, ProvenBatch}, block::BlockNumber, transaction::{InputNotes, ProvenTransaction}, Digest, @@ -56,16 +56,12 @@ impl TransactionBatchConstructor for ProvenBatch { output_notes.extend(tx.output_notes().iter().cloned()); } - ProvenBatch::new( + ProvenBatch::new_unchecked( BatchId::from_transactions(txs.into_iter()), Digest::default(), BlockNumber::GENESIS, account_updates, InputNotes::new_unchecked(input_notes), - BatchNoteTree::with_contiguous_leaves( - output_notes.iter().map(|x| (x.id(), x.metadata())), - ) - .unwrap(), output_notes, BlockNumber::from(u32::MAX), ) diff --git a/crates/block-producer/src/test_utils/block.rs b/crates/block-producer/src/test_utils/block.rs index 03ea004f3..7fcf0bd4c 100644 --- a/crates/block-producer/src/test_utils/block.rs +++ b/crates/block-producer/src/test_utils/block.rs @@ -1,8 +1,9 @@ -use std::iter; - use miden_objects::{ batch::ProvenBatch, - block::{Block, BlockAccountUpdate, BlockHeader, BlockNoteIndex, BlockNoteTree, NoteBatch}, + block::{ + BlockAccountUpdate, BlockHeader, BlockNoteIndex, BlockNoteTree, OutputNoteBatch, + ProvenBlock, + }, crypto::merkle::{Mmr, SimpleSmt}, note::Nullifier, transaction::OutputNote, @@ -10,10 +11,6 @@ use miden_objects::{ }; use super::MockStoreSuccess; -use crate::{ - block::BlockInputs, - block_builder::prover::{block_witness::BlockWitness, BlockProver}, -}; /// Constructs the block we expect to be built given the store state, and a set of transaction /// batches to be applied @@ -71,31 +68,6 @@ pub async fn build_expected_block_header( ) } -/// Builds the "actual" block header; i.e. the block header built using the Miden VM, used in the -/// node -pub async fn build_actual_block_header( - store: &MockStoreSuccess, - batches: Vec, -) -> BlockHeader { - let updated_accounts: Vec<_> = - batches.iter().flat_map(|batch| batch.account_updates().iter()).collect(); - let produced_nullifiers: Vec = - batches.iter().flat_map(ProvenBatch::produced_nullifiers).collect(); - - let block_inputs_from_store: BlockInputs = store - .get_block_inputs( - updated_accounts.iter().map(|(&account_id, _)| account_id), - produced_nullifiers.iter(), - iter::empty(), - ) - .await - .unwrap(); - - let (block_witness, _) = BlockWitness::new(block_inputs_from_store, &batches).unwrap(); - - BlockProver::new().prove(block_witness).unwrap() -} - #[derive(Debug)] pub struct MockBlockBuilder { store_accounts: SimpleSmt, @@ -103,7 +75,7 @@ pub struct MockBlockBuilder { last_block_header: BlockHeader, updated_accounts: Option>, - created_notes: Option>, + created_notes: Option>, produced_nullifiers: Option>, } @@ -140,7 +112,7 @@ impl MockBlockBuilder { } #[must_use] - pub fn created_notes(mut self, created_notes: Vec) -> Self { + pub fn created_notes(mut self, created_notes: Vec) -> Self { self.created_notes = Some(created_notes); self @@ -153,7 +125,7 @@ impl MockBlockBuilder { self } - pub fn build(self) -> Block { + pub fn build(self) -> ProvenBlock { let created_notes = self.created_notes.unwrap_or_default(); let header = BlockHeader::new( @@ -170,28 +142,27 @@ impl MockBlockBuilder { 1, ); - Block::new( + ProvenBlock::new_unchecked( header, self.updated_accounts.unwrap_or_default(), created_notes, self.produced_nullifiers.unwrap_or_default(), ) - .unwrap() } } pub(crate) fn flatten_output_notes<'a>( - batches: impl Iterator, + batches: impl Iterator, ) -> impl Iterator { batches.enumerate().flat_map(|(batch_idx, batch)| { - batch.iter().enumerate().map(move |(note_idx_in_batch, note)| { - (BlockNoteIndex::new(batch_idx, note_idx_in_batch).unwrap(), note) + batch.iter().map(move |(note_idx_in_batch, note)| { + (BlockNoteIndex::new(batch_idx, *note_idx_in_batch).unwrap(), note) }) }) } pub(crate) fn note_created_smt_from_note_batches<'a>( - batches: impl Iterator, + batches: impl Iterator, ) -> BlockNoteTree { let note_leaf_iterator = flatten_output_notes(batches).map(|(index, note)| (index, note.id(), *note.metadata())); @@ -201,6 +172,8 @@ pub(crate) fn note_created_smt_from_note_batches<'a>( pub(crate) fn block_output_notes<'a>( batches: impl Iterator + Clone, -) -> Vec> { - batches.map(|batch| batch.output_notes().to_vec()).collect() +) -> Vec { + batches + .map(|batch| batch.output_notes().iter().cloned().enumerate().collect()) + .collect() } diff --git a/crates/block-producer/src/test_utils/store.rs b/crates/block-producer/src/test_utils/store.rs index af404acd9..4d0507e33 100644 --- a/crates/block-producer/src/test_utils/store.rs +++ b/crates/block-producer/src/test_utils/store.rs @@ -3,12 +3,11 @@ use std::{ num::NonZeroU32, }; -use miden_node_proto::domain::{block::BlockInclusionProof, note::NoteAuthenticationInfo}; use miden_objects::{ batch::ProvenBatch, - block::{Block, BlockHeader, BlockNumber, NoteBatch}, - crypto::merkle::{Mmr, SimpleSmt, Smt, ValuePath}, - note::{NoteId, NoteInclusionProof, Nullifier}, + block::{BlockHeader, BlockNumber, OutputNoteBatch, ProvenBlock}, + crypto::merkle::{Mmr, SimpleSmt, Smt}, + note::{NoteId, NoteInclusionProof}, transaction::ProvenTransaction, ACCOUNT_TREE_DEPTH, EMPTY_WORD, ZERO, }; @@ -16,7 +15,6 @@ use tokio::sync::RwLock; use super::*; use crate::{ - block::{AccountWitness, BlockInputs}, errors::StoreError, store::TransactionInputs, test_utils::block::{ @@ -28,7 +26,7 @@ use crate::{ #[derive(Debug)] pub struct MockStoreSuccessBuilder { accounts: Option>, - notes: Option>, + notes: Option>, produced_nullifiers: Option>, chain_mmr: Option, block_num: Option, @@ -76,7 +74,10 @@ impl MockStoreSuccessBuilder { } #[must_use] - pub fn initial_notes<'a>(mut self, notes: impl Iterator + Clone) -> Self { + pub fn initial_notes<'a>( + mut self, + notes: impl Iterator + Clone, + ) -> Self { self.notes = Some(notes.cloned().collect()); self @@ -191,7 +192,7 @@ impl MockStoreSuccess { locked_accounts.root() } - pub async fn apply_block(&self, block: &Block) -> Result<(), StoreError> { + pub async fn apply_block(&self, block: &ProvenBlock) -> Result<(), StoreError> { // Intentionally, we take and hold both locks, to prevent calls to `get_tx_inputs()` from // going through while we're updating the store's data structure let mut locked_accounts = self.accounts.write().await; @@ -206,7 +207,7 @@ impl MockStoreSuccess { debug_assert_eq!(locked_accounts.root(), header.account_root()); // update nullifiers - for nullifier in block.nullifiers() { + for nullifier in block.created_nullifiers() { locked_produced_nullifiers .insert(nullifier.inner(), [header.block_num().into(), ZERO, ZERO, ZERO]); } @@ -219,11 +220,11 @@ impl MockStoreSuccess { } // build note tree - let note_tree = block.build_note_tree(); + let note_tree = block.build_output_note_tree(); // update notes let mut locked_notes = self.notes.write().await; - for (note_index, note) in block.notes() { + for (note_index, note) in block.output_notes() { locked_notes.insert( note.id(), NoteInclusionProof::new( @@ -289,66 +290,4 @@ impl MockStoreSuccess { current_block_height: 0.into(), }) } - - pub async fn get_block_inputs( - &self, - updated_accounts: impl Iterator + Send, - produced_nullifiers: impl Iterator + Send, - notes: impl Iterator + Send, - ) -> Result { - let locked_accounts = self.accounts.read().await; - let locked_produced_nullifiers = self.produced_nullifiers.read().await; - - let chain_peaks = { - let locked_chain_mmr = self.chain_mmr.read().await; - locked_chain_mmr.peaks() - }; - - let accounts = { - updated_accounts - .map(|account_id| { - let ValuePath { value: hash, path: proof } = - locked_accounts.open(&account_id.into()); - - (account_id, AccountWitness { hash, proof }) - }) - .collect() - }; - - let nullifiers = produced_nullifiers - .map(|nullifier| (*nullifier, locked_produced_nullifiers.open(&nullifier.inner()))) - .collect(); - - let locked_notes = self.notes.read().await; - let note_proofs = notes - .filter_map(|id| locked_notes.get(id).map(|proof| (*id, proof.clone()))) - .collect::>(); - - let locked_headers = self.block_headers.read().await; - let latest_header = - *locked_headers.iter().max_by_key(|(block_num, _)| *block_num).unwrap().1; - - let locked_chain_mmr = self.chain_mmr.read().await; - let chain_length = latest_header.block_num(); - let block_proofs = note_proofs - .values() - .map(|note_proof| { - let block_num = note_proof.location().block_num(); - let block_header = *locked_headers.get(&block_num).unwrap(); - let mmr_path = locked_chain_mmr.open(block_num.as_usize()).unwrap().merkle_path; - - BlockInclusionProof { block_header, mmr_path, chain_length } - }) - .collect(); - - let found_unauthenticated_notes = NoteAuthenticationInfo { block_proofs, note_proofs }; - - Ok(BlockInputs { - block_header: latest_header, - chain_peaks, - accounts, - nullifiers, - found_unauthenticated_notes, - }) - } } diff --git a/crates/proto/src/domain/account.rs b/crates/proto/src/domain/account.rs index 3d6680065..36fadc4a0 100644 --- a/crates/proto/src/domain/account.rs +++ b/crates/proto/src/domain/account.rs @@ -149,49 +149,45 @@ impl TryInto for proto::requests::get_account_proofs_reques } } -// ACCOUNT INPUT RECORD +// ACCOUNT WITNESS RECORD // ================================================================================================ #[derive(Clone, Debug)] -pub struct AccountInputRecord { +pub struct AccountWitnessRecord { pub account_id: AccountId, - pub account_hash: Digest, + pub initial_state_commitment: Digest, pub proof: MerklePath, } -impl From for proto::responses::AccountBlockInputRecord { - fn from(from: AccountInputRecord) -> Self { +impl From for proto::responses::AccountWitness { + fn from(from: AccountWitnessRecord) -> Self { Self { account_id: Some(from.account_id.into()), - account_hash: Some(from.account_hash.into()), + initial_state_commitment: Some(from.initial_state_commitment.into()), proof: Some(Into::into(&from.proof)), } } } -impl TryFrom for AccountInputRecord { +impl TryFrom for AccountWitnessRecord { type Error = ConversionError; fn try_from( - account_input_record: proto::responses::AccountBlockInputRecord, + account_witness_record: proto::responses::AccountWitness, ) -> Result { Ok(Self { - account_id: account_input_record + account_id: account_witness_record .account_id - .ok_or(proto::responses::AccountBlockInputRecord::missing_field(stringify!( - account_id - )))? + .ok_or(proto::responses::AccountWitness::missing_field(stringify!(account_id)))? .try_into()?, - account_hash: account_input_record - .account_hash - .ok_or(proto::responses::AccountBlockInputRecord::missing_field(stringify!( - account_hash - )))? + initial_state_commitment: account_witness_record + .initial_state_commitment + .ok_or(proto::responses::AccountWitness::missing_field(stringify!(account_hash)))? .try_into()?, - proof: account_input_record + proof: account_witness_record .proof .as_ref() - .ok_or(proto::responses::AccountBlockInputRecord::missing_field(stringify!(proof)))? + .ok_or(proto::responses::AccountWitness::missing_field(stringify!(proof)))? .try_into()?, }) } diff --git a/crates/proto/src/domain/block.rs b/crates/proto/src/domain/block.rs index fa7e4bcfb..ff19518bb 100644 --- a/crates/proto/src/domain/block.rs +++ b/crates/proto/src/domain/block.rs @@ -1,11 +1,18 @@ +use std::collections::BTreeMap; + use miden_objects::{ - block::{BlockHeader, BlockNumber}, - crypto::merkle::MerklePath, + block::{AccountWitness, BlockHeader, BlockInputs, NullifierWitness}, + note::{NoteId, NoteInclusionProof}, + transaction::ChainMmr, + utils::{Deserializable, Serializable}, }; use crate::{ errors::{ConversionError, MissingFieldHelper}, - generated::block as proto, + generated::{ + block as proto, note::NoteInclusionInBlockProof, responses::GetBlockInputsResponse, + }, + AccountWitnessRecord, NullifierWitnessRecord, }; // BLOCK HEADER @@ -87,40 +94,97 @@ impl TryFrom for BlockHeader { } } -/// Data required to verify a block's inclusion proof. -#[derive(Clone, Debug)] -pub struct BlockInclusionProof { - pub block_header: BlockHeader, - pub mmr_path: MerklePath, - pub chain_length: BlockNumber, -} +// BLOCK INPUTS +// ================================================================================================ -impl From for proto::BlockInclusionProof { - fn from(value: BlockInclusionProof) -> Self { - Self { - block_header: Some(value.block_header.into()), - mmr_path: Some((&value.mmr_path).into()), - chain_length: value.chain_length.as_u32(), +impl From for GetBlockInputsResponse { + fn from(inputs: BlockInputs) -> Self { + let ( + prev_block_header, + chain_mmr, + account_witnesses, + nullifier_witnesses, + unauthenticated_note_proofs, + ) = inputs.into_parts(); + + GetBlockInputsResponse { + latest_block_header: Some(prev_block_header.into()), + account_witnesses: account_witnesses + .into_iter() + .map(|(id, witness)| { + let (initial_state_commitment, proof) = witness.into_parts(); + AccountWitnessRecord { + account_id: id, + initial_state_commitment, + proof, + } + .into() + }) + .collect(), + nullifier_witnesses: nullifier_witnesses + .into_iter() + .map(|(nullifier, witness)| { + let proof = witness.into_proof(); + NullifierWitnessRecord { nullifier, proof }.into() + }) + .collect(), + chain_mmr: chain_mmr.to_bytes(), + unauthenticated_note_proofs: unauthenticated_note_proofs + .iter() + .map(NoteInclusionInBlockProof::from) + .collect(), } } } -impl TryFrom for BlockInclusionProof { +impl TryFrom for BlockInputs { type Error = ConversionError; - fn try_from(value: proto::BlockInclusionProof) -> Result { - let result = Self { - block_header: value - .block_header - .ok_or(proto::BlockInclusionProof::missing_field("block_header"))? - .try_into()?, - mmr_path: (&value - .mmr_path - .ok_or(proto::BlockInclusionProof::missing_field("mmr_path"))?) - .try_into()?, - chain_length: value.chain_length.into(), - }; + fn try_from(response: GetBlockInputsResponse) -> Result { + let latest_block_header: BlockHeader = response + .latest_block_header + .ok_or(proto::BlockHeader::missing_field("block_header"))? + .try_into()?; + + let account_witnesses = response + .account_witnesses + .into_iter() + .map(|entry| { + let witness_record: AccountWitnessRecord = entry.try_into()?; + Ok(( + witness_record.account_id, + AccountWitness::new( + witness_record.initial_state_commitment, + witness_record.proof, + ), + )) + }) + .collect::, ConversionError>>()?; + + let nullifier_witnesses = response + .nullifier_witnesses + .into_iter() + .map(|entry| { + let witness: NullifierWitnessRecord = entry.try_into()?; + Ok((witness.nullifier, NullifierWitness::new(witness.proof))) + }) + .collect::, ConversionError>>()?; - Ok(result) + let unauthenticated_note_proofs = response + .unauthenticated_note_proofs + .iter() + .map(<(NoteId, NoteInclusionProof)>::try_from) + .collect::>()?; + + let chain_mmr = ChainMmr::read_from_bytes(&response.chain_mmr) + .map_err(|source| ConversionError::deserialization_error("ChainMmr", source))?; + + Ok(BlockInputs::new( + latest_block_header, + chain_mmr, + account_witnesses, + nullifier_witnesses, + unauthenticated_note_proofs, + )) } } diff --git a/crates/proto/src/domain/note.rs b/crates/proto/src/domain/note.rs index 14131bc5b..6c3b7f165 100644 --- a/crates/proto/src/domain/note.rs +++ b/crates/proto/src/domain/note.rs @@ -1,16 +1,11 @@ -use std::collections::{BTreeMap, BTreeSet}; - use miden_objects::{ note::{NoteExecutionHint, NoteId, NoteInclusionProof, NoteMetadata, NoteTag, NoteType}, Digest, Felt, }; use crate::{ - convert, - domain::block::BlockInclusionProof, errors::{ConversionError, MissingFieldHelper}, generated::note as proto, - try_convert, }; impl TryFrom for NoteMetadata { @@ -89,41 +84,3 @@ impl TryFrom<&proto::NoteInclusionInBlockProof> for (NoteId, NoteInclusionProof) )) } } - -#[derive(Clone, Default, Debug)] -pub struct NoteAuthenticationInfo { - pub block_proofs: Vec, - pub note_proofs: BTreeMap, -} - -impl NoteAuthenticationInfo { - pub fn contains_note(&self, note: &NoteId) -> bool { - self.note_proofs.contains_key(note) - } - - pub fn note_ids(&self) -> BTreeSet { - self.note_proofs.keys().copied().collect() - } -} - -impl From for proto::NoteAuthenticationInfo { - fn from(value: NoteAuthenticationInfo) -> Self { - Self { - note_proofs: convert(&value.note_proofs), - block_proofs: convert(value.block_proofs), - } - } -} - -impl TryFrom for NoteAuthenticationInfo { - type Error = ConversionError; - - fn try_from(value: proto::NoteAuthenticationInfo) -> Result { - let result = Self { - block_proofs: try_convert(value.block_proofs)?, - note_proofs: try_convert(&value.note_proofs)?, - }; - - Ok(result) - } -} diff --git a/crates/proto/src/domain/nullifier.rs b/crates/proto/src/domain/nullifier.rs index 482183a0f..ef19397ca 100644 --- a/crates/proto/src/domain/nullifier.rs +++ b/crates/proto/src/domain/nullifier.rs @@ -35,40 +35,36 @@ impl TryFrom for Nullifier { } } -// NULLIFIER INPUT RECORD +// NULLIFIER WITNESS RECORD // ================================================================================================ #[derive(Clone, Debug)] -pub struct NullifierWitness { +pub struct NullifierWitnessRecord { pub nullifier: Nullifier, pub proof: SmtProof, } -impl TryFrom for NullifierWitness { +impl TryFrom for NullifierWitnessRecord { type Error = ConversionError; fn try_from( - nullifier_input_record: proto::responses::NullifierBlockInputRecord, + nullifier_witness_record: proto::responses::NullifierWitness, ) -> Result { Ok(Self { - nullifier: nullifier_input_record + nullifier: nullifier_witness_record .nullifier - .ok_or(proto::responses::NullifierBlockInputRecord::missing_field(stringify!( - nullifier - )))? + .ok_or(proto::responses::NullifierWitness::missing_field(stringify!(nullifier)))? .try_into()?, - proof: nullifier_input_record + proof: nullifier_witness_record .opening - .ok_or(proto::responses::NullifierBlockInputRecord::missing_field(stringify!( - opening - )))? + .ok_or(proto::responses::NullifierWitness::missing_field(stringify!(opening)))? .try_into()?, }) } } -impl From for proto::responses::NullifierBlockInputRecord { - fn from(value: NullifierWitness) -> Self { +impl From for proto::responses::NullifierWitness { + fn from(value: NullifierWitnessRecord) -> Self { Self { nullifier: Some(value.nullifier.into()), opening: Some(value.proof.into()), diff --git a/crates/proto/src/generated/block.rs b/crates/proto/src/generated/block.rs index 915dce7ec..1dd74f21c 100644 --- a/crates/proto/src/generated/block.rs +++ b/crates/proto/src/generated/block.rs @@ -36,16 +36,3 @@ pub struct BlockHeader { #[prost(fixed32, tag = "11")] pub timestamp: u32, } -/// Represents a block inclusion proof. -#[derive(Clone, PartialEq, ::prost::Message)] -pub struct BlockInclusionProof { - /// Block header associated with the inclusion proof. - #[prost(message, optional, tag = "1")] - pub block_header: ::core::option::Option, - /// Merkle path associated with the inclusion proof. - #[prost(message, optional, tag = "2")] - pub mmr_path: ::core::option::Option, - /// The chain length associated with `mmr_path`. - #[prost(fixed32, tag = "3")] - pub chain_length: u32, -} diff --git a/crates/proto/src/generated/note.rs b/crates/proto/src/generated/note.rs index 77a9bae41..293eaddab 100644 --- a/crates/proto/src/generated/note.rs +++ b/crates/proto/src/generated/note.rs @@ -78,13 +78,3 @@ pub struct NoteSyncRecord { #[prost(message, optional, tag = "4")] pub merkle_path: ::core::option::Option, } -/// Represents proof of notes inclusion in the block(s) and block(s) inclusion in the chain. -#[derive(Clone, PartialEq, ::prost::Message)] -pub struct NoteAuthenticationInfo { - /// Proof of each note's inclusion in a block. - #[prost(message, repeated, tag = "1")] - pub note_proofs: ::prost::alloc::vec::Vec, - /// Proof of each block's inclusion in the chain. - #[prost(message, repeated, tag = "2")] - pub block_proofs: ::prost::alloc::vec::Vec, -} diff --git a/crates/proto/src/generated/requests.rs b/crates/proto/src/generated/requests.rs index 9194e084f..e4dc3e131 100644 --- a/crates/proto/src/generated/requests.rs +++ b/crates/proto/src/generated/requests.rs @@ -81,15 +81,26 @@ pub struct SyncNoteRequest { /// Returns data required to prove the next block. #[derive(Clone, PartialEq, ::prost::Message)] pub struct GetBlockInputsRequest { - /// ID of the account against which a transaction is executed. + /// IDs of all accounts updated in the proposed block for which to retrieve account witnesses. #[prost(message, repeated, tag = "1")] pub account_ids: ::prost::alloc::vec::Vec, - /// Set of nullifiers consumed by this transaction. + /// Nullifiers of all notes consumed by the block for which to retrieve witnesses. + /// + /// Due to note erasure it will generally not be possible to know the exact set of nullifiers + /// a block will create, unless we pre-execute note erasure. So in practice, this set of + /// nullifiers will be the set of nullifiers of all proven batches in the block, which is a + /// superset of the nullifiers the block may create. + /// + /// However, if it is known that a certain note will be erased, it would not be necessary to + /// provide a nullifier witness for it. #[prost(message, repeated, tag = "2")] pub nullifiers: ::prost::alloc::vec::Vec, - /// Array of note IDs to be checked for existence in the database. + /// Array of note IDs for which to retrieve note inclusion proofs, **if they exist in the store**. #[prost(message, repeated, tag = "3")] pub unauthenticated_notes: ::prost::alloc::vec::Vec, + /// Array of block numbers referenced by all batches in the block. + #[prost(fixed32, repeated, tag = "4")] + pub reference_blocks: ::prost::alloc::vec::Vec, } /// Returns the inputs for a transaction batch. #[derive(Clone, PartialEq, ::prost::Message)] diff --git a/crates/proto/src/generated/responses.rs b/crates/proto/src/generated/responses.rs index ac764735d..f0fd0d8ac 100644 --- a/crates/proto/src/generated/responses.rs +++ b/crates/proto/src/generated/responses.rs @@ -83,24 +83,25 @@ pub struct SyncNoteResponse { } /// An account returned as a response to the `GetBlockInputs`. #[derive(Clone, PartialEq, ::prost::Message)] -pub struct AccountBlockInputRecord { +pub struct AccountWitness { /// The account ID. #[prost(message, optional, tag = "1")] pub account_id: ::core::option::Option, - /// The latest account hash, zero hash if the account doesn't exist. + /// The latest account state commitment used as the initial state of the requested block. + /// This will be the zero digest if the account doesn't exist. #[prost(message, optional, tag = "2")] - pub account_hash: ::core::option::Option, - /// Merkle path to verify the account's inclusion in the MMR. + pub initial_state_commitment: ::core::option::Option, + /// Merkle path to verify the account's inclusion in the account tree. #[prost(message, optional, tag = "3")] pub proof: ::core::option::Option, } /// A nullifier returned as a response to the `GetBlockInputs`. #[derive(Clone, PartialEq, ::prost::Message)] -pub struct NullifierBlockInputRecord { - /// The nullifier ID. +pub struct NullifierWitness { + /// The nullifier. #[prost(message, optional, tag = "1")] pub nullifier: ::core::option::Option, - /// Merkle path to verify the nullifier's inclusion in the MMR. + /// The SMT proof to verify the nullifier's inclusion in the nullifier tree. #[prost(message, optional, tag = "2")] pub opening: ::core::option::Option, } @@ -109,21 +110,24 @@ pub struct NullifierBlockInputRecord { pub struct GetBlockInputsResponse { /// The latest block header. #[prost(message, optional, tag = "1")] - pub block_header: ::core::option::Option, - /// Peaks of the above block's mmr, The `forest` value is equal to the block number. + pub latest_block_header: ::core::option::Option, + /// Proof of each requested unauthenticated note's inclusion in a block, **if it existed in + /// the store**. #[prost(message, repeated, tag = "2")] - pub mmr_peaks: ::prost::alloc::vec::Vec, - /// The hashes of the requested accounts and their authentication paths. - #[prost(message, repeated, tag = "3")] - pub account_states: ::prost::alloc::vec::Vec, - /// The requested nullifiers and their authentication paths. - #[prost(message, repeated, tag = "4")] - pub nullifiers: ::prost::alloc::vec::Vec, - /// The list of requested notes which were found in the database. - #[prost(message, optional, tag = "5")] - pub found_unauthenticated_notes: ::core::option::Option< - super::note::NoteAuthenticationInfo, + pub unauthenticated_note_proofs: ::prost::alloc::vec::Vec< + super::note::NoteInclusionInBlockProof, >, + /// The serialized chain MMR which includes proofs for all blocks referenced by the + /// above note inclusion proofs as well as proofs for inclusion of the requested blocks + /// referenced by the batches in the block. + #[prost(bytes = "vec", tag = "3")] + pub chain_mmr: ::prost::alloc::vec::Vec, + /// The state commitments of the requested accounts and their authentication paths. + #[prost(message, repeated, tag = "4")] + pub account_witnesses: ::prost::alloc::vec::Vec, + /// The requested nullifiers and their authentication paths. + #[prost(message, repeated, tag = "5")] + pub nullifier_witnesses: ::prost::alloc::vec::Vec, } /// Represents the result of getting batch inputs. #[derive(Clone, PartialEq, ::prost::Message)] diff --git a/crates/proto/src/lib.rs b/crates/proto/src/lib.rs index 9290fc739..ca950f020 100644 --- a/crates/proto/src/lib.rs +++ b/crates/proto/src/lib.rs @@ -8,8 +8,8 @@ pub mod generated; // ================================================================================================ pub use domain::{ - account::{AccountInputRecord, AccountState}, + account::{AccountState, AccountWitnessRecord}, convert, - nullifier::NullifierWitness, + nullifier::NullifierWitnessRecord, try_convert, }; diff --git a/crates/rpc-proto/proto/block.proto b/crates/rpc-proto/proto/block.proto index f9a41a99c..74229793c 100644 --- a/crates/rpc-proto/proto/block.proto +++ b/crates/rpc-proto/proto/block.proto @@ -39,15 +39,3 @@ message BlockHeader { // The time when the block was created. fixed32 timestamp = 11; } - -// Represents a block inclusion proof. -message BlockInclusionProof { - // Block header associated with the inclusion proof. - BlockHeader block_header = 1; - - // Merkle path associated with the inclusion proof. - merkle.MerklePath mmr_path = 2; - - // The chain length associated with `mmr_path`. - fixed32 chain_length = 3; -} diff --git a/crates/rpc-proto/proto/note.proto b/crates/rpc-proto/proto/note.proto index 9acfbd847..4a7a69703 100644 --- a/crates/rpc-proto/proto/note.proto +++ b/crates/rpc-proto/proto/note.proto @@ -80,12 +80,3 @@ message NoteSyncRecord { // The note's inclusion proof in the block. merkle.MerklePath merkle_path = 4; } - -// Represents proof of notes inclusion in the block(s) and block(s) inclusion in the chain. -message NoteAuthenticationInfo { - // Proof of each note's inclusion in a block. - repeated note.NoteInclusionInBlockProof note_proofs = 1; - - // Proof of each block's inclusion in the chain. - repeated block.BlockInclusionProof block_proofs = 2; -} diff --git a/crates/rpc-proto/proto/requests.proto b/crates/rpc-proto/proto/requests.proto index bf9fd557a..13d4568b1 100644 --- a/crates/rpc-proto/proto/requests.proto +++ b/crates/rpc-proto/proto/requests.proto @@ -76,12 +76,25 @@ message SyncNoteRequest { // Returns data required to prove the next block. message GetBlockInputsRequest { - // ID of the account against which a transaction is executed. + // IDs of all accounts updated in the proposed block for which to retrieve account witnesses. repeated account.AccountId account_ids = 1; - // Set of nullifiers consumed by this transaction. + + // Nullifiers of all notes consumed by the block for which to retrieve witnesses. + // + // Due to note erasure it will generally not be possible to know the exact set of nullifiers + // a block will create, unless we pre-execute note erasure. So in practice, this set of + // nullifiers will be the set of nullifiers of all proven batches in the block, which is a + // superset of the nullifiers the block may create. + // + // However, if it is known that a certain note will be erased, it would not be necessary to + // provide a nullifier witness for it. repeated digest.Digest nullifiers = 2; - // Array of note IDs to be checked for existence in the database. + + // Array of note IDs for which to retrieve note inclusion proofs, **if they exist in the store**. repeated digest.Digest unauthenticated_notes = 3; + + // Array of block numbers referenced by all batches in the block. + repeated fixed32 reference_blocks = 4; } // Returns the inputs for a transaction batch. diff --git a/crates/rpc-proto/proto/responses.proto b/crates/rpc-proto/proto/responses.proto index ad1f353a5..3163f993c 100644 --- a/crates/rpc-proto/proto/responses.proto +++ b/crates/rpc-proto/proto/responses.proto @@ -87,42 +87,46 @@ message SyncNoteResponse { } // An account returned as a response to the `GetBlockInputs`. -message AccountBlockInputRecord { +message AccountWitness { // The account ID. account.AccountId account_id = 1; - // The latest account hash, zero hash if the account doesn't exist. - digest.Digest account_hash = 2; + // The latest account state commitment used as the initial state of the requested block. + // This will be the zero digest if the account doesn't exist. + digest.Digest initial_state_commitment = 2; - // Merkle path to verify the account's inclusion in the MMR. + // Merkle path to verify the account's inclusion in the account tree. merkle.MerklePath proof = 3; } // A nullifier returned as a response to the `GetBlockInputs`. -message NullifierBlockInputRecord { - // The nullifier ID. +message NullifierWitness { + // The nullifier. digest.Digest nullifier = 1; - // Merkle path to verify the nullifier's inclusion in the MMR. + // The SMT proof to verify the nullifier's inclusion in the nullifier tree. smt.SmtOpening opening = 2; } // Represents the result of getting block inputs. message GetBlockInputsResponse { // The latest block header. - block.BlockHeader block_header = 1; + block.BlockHeader latest_block_header = 1; - // Peaks of the above block's mmr, The `forest` value is equal to the block number. - repeated digest.Digest mmr_peaks = 2; + // Proof of each requested unauthenticated note's inclusion in a block, **if it existed in + // the store**. + repeated note.NoteInclusionInBlockProof unauthenticated_note_proofs = 2; - // The hashes of the requested accounts and their authentication paths. - repeated AccountBlockInputRecord account_states = 3; + // The serialized chain MMR which includes proofs for all blocks referenced by the + // above note inclusion proofs as well as proofs for inclusion of the requested blocks + // referenced by the batches in the block. + bytes chain_mmr = 3; - // The requested nullifiers and their authentication paths. - repeated NullifierBlockInputRecord nullifiers = 4; + // The state commitments of the requested accounts and their authentication paths. + repeated AccountWitness account_witnesses = 4; - // The list of requested notes which were found in the database. - note.NoteAuthenticationInfo found_unauthenticated_notes = 5; + // The requested nullifiers and their authentication paths. + repeated NullifierWitness nullifier_witnesses = 5; } // Represents the result of getting batch inputs. diff --git a/crates/store/src/db/mod.rs b/crates/store/src/db/mod.rs index 6df767f9e..3b2027455 100644 --- a/crates/store/src/db/mod.rs +++ b/crates/store/src/db/mod.rs @@ -11,7 +11,7 @@ use miden_node_proto::{ }; use miden_objects::{ account::{AccountDelta, AccountId}, - block::{Block, BlockHeader, BlockNoteIndex, BlockNumber}, + block::{BlockHeader, BlockNoteIndex, BlockNumber, ProvenBlock}, crypto::{hash::rpo::RpoDigest, merkle::MerklePath, utils::Deserializable}, note::{NoteId, NoteInclusionProof, NoteMetadata, Nullifier}, transaction::TransactionId, @@ -87,7 +87,12 @@ impl NoteRecord { /// ensure ordering is correct. fn from_row(row: &rusqlite::Row<'_>) -> Result { let block_num = read_block_number(row, 0)?; - let note_index = BlockNoteIndex::new(row.get(1)?, row.get(2)?)?; + let batch_idx = row.get(1)?; + let note_idx_in_batch = row.get(2)?; + // SAFETY: We can assume the batch and note indices stored in the DB are valid so this + // should never panic. + let note_index = BlockNoteIndex::new(batch_idx, note_idx_in_batch) + .expect("batch and note index from DB should be valid"); let note_id = row.get_ref(3)?.as_blob()?; let note_id = RpoDigest::read_from_bytes(note_id)?; let note_type = row.get::<_, u8>(4)?.try_into()?; @@ -454,7 +459,7 @@ impl Db { &self, allow_acquire: oneshot::Sender<()>, acquire_done: oneshot::Receiver<()>, - block: Block, + block: ProvenBlock, notes: Vec<(NoteRecord, Option)>, ) -> Result<()> { self.pool @@ -469,7 +474,7 @@ impl Db { &transaction, &block.header(), ¬es, - block.nullifiers(), + block.created_nullifiers(), block.updated_accounts(), )?; diff --git a/crates/store/src/db/sql/mod.rs b/crates/store/src/db/sql/mod.rs index 25cb00d8c..c4f1894ba 100644 --- a/crates/store/src/db/sql/mod.rs +++ b/crates/store/src/db/sql/mod.rs @@ -783,7 +783,12 @@ pub fn select_notes_since_block_by_tag_and_sender( let mut res = Vec::new(); while let Some(row) = rows.next()? { let block_num = read_block_number(row, 0)?; - let note_index = BlockNoteIndex::new(row.get(1)?, row.get(2)?)?; + let batch_idx = row.get(1)?; + let note_idx_in_batch = row.get(2)?; + // SAFETY: We can assume the batch and note indices stored in the DB are valid so this + // should never panic. + let note_index = BlockNoteIndex::new(batch_idx, note_idx_in_batch) + .expect("batch and note index from DB should be valid"); let note_id = read_from_blob_column(row, 3)?; let note_type = row.get::<_, u8>(4)?; let sender = read_from_blob_column(row, 5)?; @@ -875,7 +880,11 @@ pub fn select_note_inclusion_proofs( let batch_index = row.get(2)?; let note_index = row.get(3)?; - let node_index_in_block = BlockNoteIndex::new(batch_index, note_index)?.leaf_index_value(); + // SAFETY: We can assume the batch and note indices stored in the DB are valid so this + // should never panic. + let node_index_in_block = BlockNoteIndex::new(batch_index, note_index) + .expect("batch and note index from DB should be valid") + .leaf_index_value(); let merkle_path_data = row.get_ref(4)?.as_blob()?; let merkle_path = MerklePath::read_from_bytes(merkle_path_data)?; diff --git a/crates/store/src/errors.rs b/crates/store/src/errors.rs index 2c6fa9e36..ebca9f753 100644 --- a/crates/store/src/errors.rs +++ b/crates/store/src/errors.rs @@ -11,7 +11,7 @@ use miden_objects::{ }, note::Nullifier, transaction::OutputNote, - AccountDeltaError, AccountError, BlockError, NoteError, + AccountDeltaError, AccountError, NoteError, }; use rusqlite::types::FromSqlError; use thiserror::Error; @@ -41,8 +41,6 @@ pub enum DatabaseError { AccountError(#[from] AccountError), #[error("account delta error")] AccountDeltaError(#[from] AccountDeltaError), - #[error("block error")] - BlockError(#[from] BlockError), #[error("closed channel")] ClosedChannel(#[from] RecvError), #[error("deserialization failed")] @@ -75,8 +73,6 @@ pub enum DatabaseError { AccountsNotFoundInDb(Vec), #[error("account {0} is not on the chain")] AccountNotPublic(AccountId), - #[error("block {0} not found")] - BlockNotFoundInDb(BlockNumber), #[error("data corrupted: {0}")] DataCorrupted(String), #[error("SQLite pool interaction failed: {0}")] @@ -95,8 +91,7 @@ impl From for Status { match err { DatabaseError::AccountNotFoundInDb(_) | DatabaseError::AccountsNotFoundInDb(_) - | DatabaseError::AccountNotPublic(_) - | DatabaseError::BlockNotFoundInDb(_) => Status::not_found(err.to_string()), + | DatabaseError::AccountNotPublic(_) => Status::not_found(err.to_string()), _ => Status::internal(err.to_string()), } @@ -136,8 +131,9 @@ pub enum GenesisError { // --------------------------------------------------------------------------------------------- #[error("database error")] DatabaseError(#[from] DatabaseError), + // TODO: Check if needed. #[error("block error")] - BlockError(#[from] BlockError), + BlockError, #[error("merkle error")] MerkleError(#[from] MerkleError), #[error("failed to deserialize genesis file")] @@ -230,27 +226,15 @@ pub enum GetBlockHeaderError { #[derive(Error, Debug)] pub enum GetBlockInputsError { - #[error("account error")] - AccountError(#[from] AccountError), - #[error("database error")] - DatabaseError(#[from] DatabaseError), - #[error("database doesn't have any block header data")] - DbBlockHeaderEmpty, - #[error("failed to get MMR peaks for forest ({forest}): {error}")] - FailedToGetMmrPeaksForForest { forest: usize, error: MmrError }, - #[error("chain MMR forest expected to be 1 less than latest header's block num. Chain MMR forest: {forest}, block num: {block_num}")] - IncorrectChainMmrForestNumber { forest: usize, block_num: BlockNumber }, - #[error("note inclusion proof MMR error")] - NoteInclusionMmr(#[from] MmrError), -} - -impl From for GetBlockInputsError { - fn from(value: GetNoteAuthenticationInfoError) -> Self { - match value { - GetNoteAuthenticationInfoError::DatabaseError(db_err) => db_err.into(), - GetNoteAuthenticationInfoError::MmrError(mmr_err) => Self::NoteInclusionMmr(mmr_err), - } - } + #[error("failed to select note inclusion proofs")] + SelectNoteInclusionProofError(#[source] DatabaseError), + #[error("failed to select block headers")] + SelectBlockHeaderError(#[source] DatabaseError), + #[error("highest block number {highest_block_number} referenced by a batch is newer than the latest block {latest_block_number}")] + UnknownBatchBlockReference { + highest_block_number: BlockNumber, + latest_block_number: BlockNumber, + }, } #[derive(Error, Debug)] @@ -273,14 +257,6 @@ pub enum NoteSyncError { MmrError(#[from] MmrError), } -#[derive(Error, Debug)] -pub enum GetNoteAuthenticationInfoError { - #[error("database error")] - DatabaseError(#[from] DatabaseError), - #[error("Mmr error")] - MmrError(#[from] MmrError), -} - #[derive(Error, Debug)] pub enum GetBatchInputsError { #[error("failed to select note inclusion proofs")] @@ -290,7 +266,7 @@ pub enum GetBatchInputsError { #[error("set of blocks refernced by transactions is empty")] TransactionBlockReferencesEmpty, #[error("highest block number {highest_block_num} referenced by a transaction is newer than the latest block {latest_block_num}")] - TransactionBlockReferenceNewerThanLatestBlock { + UnknownTransactionBlockReference { highest_block_num: BlockNumber, latest_block_num: BlockNumber, }, diff --git a/crates/store/src/genesis.rs b/crates/store/src/genesis.rs index de128879c..d78990734 100644 --- a/crates/store/src/genesis.rs +++ b/crates/store/src/genesis.rs @@ -1,10 +1,11 @@ use miden_lib::transaction::TransactionKernel; use miden_objects::{ account::{delta::AccountUpdateDetails, Account}, - block::{Block, BlockAccountUpdate, BlockHeader, BlockNumber}, - crypto::merkle::{EmptySubtreeRoots, MmrPeaks, SimpleSmt, Smt}, + block::{BlockAccountUpdate, BlockHeader, BlockNoteTree, BlockNumber, ProvenBlock}, + crypto::merkle::{MmrPeaks, SimpleSmt, Smt}, + note::Nullifier, utils::serde::{ByteReader, ByteWriter, Deserializable, DeserializationError, Serializable}, - Digest, ACCOUNT_TREE_DEPTH, BLOCK_NOTE_TREE_DEPTH, + Digest, ACCOUNT_TREE_DEPTH, }; use crate::errors::GenesisError; @@ -26,7 +27,7 @@ impl GenesisState { } /// Returns the block header and the account SMT - pub fn into_block(self) -> Result { + pub fn into_block(self) -> Result { let accounts: Vec = self .accounts .iter() @@ -51,21 +52,35 @@ impl GenesisState { (update.account_id().prefix().into(), update.final_state_commitment().into()) }))?; + let empty_nullifiers: Vec = Vec::new(); + let empty_nullifier_tree = Smt::new(); + + let empty_output_notes = Vec::new(); + let empty_block_note_tree = BlockNoteTree::empty(); + let header = BlockHeader::new( self.version, Digest::default(), BlockNumber::GENESIS, MmrPeaks::new(0, Vec::new()).unwrap().hash_peaks(), account_smt.root(), - Smt::default().root(), - *EmptySubtreeRoots::entry(BLOCK_NOTE_TREE_DEPTH, 0), + empty_nullifier_tree.root(), + empty_block_note_tree.root(), Digest::default(), TransactionKernel::kernel_root(), Digest::default(), self.timestamp, ); - Block::new(header, accounts, vec![], vec![]).map_err(Into::into) + // SAFETY: Header and accounts should be valid by construction. + // No notes or nullifiers are created at genesis, which is consistent with the above empty + // block note tree root and empty nullifier tree root. + Ok(ProvenBlock::new_unchecked( + header, + accounts, + empty_output_notes, + empty_nullifiers, + )) } } diff --git a/crates/store/src/server/api.rs b/crates/store/src/server/api.rs index be0ea560d..f69c8779a 100644 --- a/crates/store/src/server/api.rs +++ b/crates/store/src/server/api.rs @@ -29,7 +29,7 @@ use miden_node_proto::{ }; use miden_objects::{ account::AccountId, - block::{Block, BlockNumber}, + block::{BlockNumber, ProvenBlock}, crypto::hash::rpo::RpoDigest, note::{NoteId, Nullifier}, utils::{Deserializable, Serializable}, @@ -304,7 +304,7 @@ impl api_server::Api for StoreApi { debug!(target: COMPONENT, ?request); - let block = Block::read_from_bytes(&request.block).map_err(|err| { + let block = ProvenBlock::read_from_bytes(&request.block).map_err(|err| { Status::invalid_argument(format!("Block deserialization error: {err}")) })?; @@ -315,8 +315,8 @@ impl api_server::Api for StoreApi { block_num, block_hash = %block.hash(), account_count = block.updated_accounts().len(), - note_count = block.notes().count(), - nullifier_count = block.nullifiers().len(), + note_count = block.output_notes().count(), + nullifier_count = block.created_nullifiers().len(), ); self.state.apply_block(block).await?; @@ -338,15 +338,16 @@ impl api_server::Api for StoreApi { ) -> Result, Status> { let request = request.into_inner(); - let nullifiers = validate_nullifiers(&request.nullifiers)?; let account_ids = read_account_ids(&request.account_ids)?; + let nullifiers = validate_nullifiers(&request.nullifiers)?; let unauthenticated_notes = validate_notes(&request.unauthenticated_notes)?; + let reference_blocks = read_block_numbers(&request.reference_blocks); let unauthenticated_notes = unauthenticated_notes.into_iter().collect(); self.state - .get_block_inputs(&account_ids, &nullifiers, unauthenticated_notes) + .get_block_inputs(account_ids, nullifiers, unauthenticated_notes, reference_blocks) .await - .map(Into::into) + .map(GetBlockInputsResponse::from) .map(Response::new) .map_err(internal_error) } @@ -569,3 +570,8 @@ fn validate_notes(notes: &[generated::digest::Digest]) -> Result, St .collect::>() .map_err(|_| invalid_argument("Digest field is not in the modulus range")) } + +#[instrument(target = COMPONENT, skip_all)] +fn read_block_numbers(block_numbers: &[u32]) -> BTreeSet { + block_numbers.iter().map(|raw_number| BlockNumber::from(*raw_number)).collect() +} diff --git a/crates/store/src/state.rs b/crates/store/src/state.rs index c6b9e1ac9..d1ec2b48f 100644 --- a/crates/store/src/state.rs +++ b/crates/store/src/state.rs @@ -10,22 +10,16 @@ use std::{ }; use miden_node_proto::{ - convert, domain::{ account::{AccountInfo, AccountProofRequest, StorageMapKeysProof}, batch::BatchInputs, - block::BlockInclusionProof, - note::NoteAuthenticationInfo, }, - generated::responses::{ - AccountProofsResponse, AccountStateHeader, GetBlockInputsResponse, StorageSlotMapProof, - }, - AccountInputRecord, NullifierWitness, + generated::responses::{AccountProofsResponse, AccountStateHeader, StorageSlotMapProof}, }; use miden_node_utils::formatting::format_array; use miden_objects::{ account::{AccountDelta, AccountHeader, AccountId, StorageSlot}, - block::{Block, BlockHeader, BlockNumber}, + block::{AccountWitness, BlockHeader, BlockInputs, BlockNumber, NullifierWitness, ProvenBlock}, crypto::{ hash::rpo::RpoDigest, merkle::{ @@ -49,8 +43,8 @@ use crate::{ db::{Db, NoteRecord, NoteSyncUpdate, NullifierInfo, StateSyncUpdate}, errors::{ ApplyBlockError, DatabaseError, GetBatchInputsError, GetBlockHeaderError, - GetBlockInputsError, GetNoteAuthenticationInfoError, InvalidBlockError, NoteSyncError, - StateInitializationError, StateSyncError, + GetBlockInputsError, InvalidBlockError, NoteSyncError, StateInitializationError, + StateSyncError, }, nullifier_tree::NullifierTree, COMPONENT, @@ -58,37 +52,6 @@ use crate::{ // STRUCTURES // ================================================================================================ -/// Information needed from the store to validate and build a block -#[derive(Debug)] -pub struct BlockInputs { - /// Previous block header - pub block_header: BlockHeader, - - /// MMR peaks for the current chain state - pub chain_peaks: MmrPeaks, - - /// The hashes of the requested accounts and their authentication paths - pub account_states: Vec, - - /// The requested nullifiers and their authentication paths - pub nullifiers: Vec, - - /// List of notes found in the store - pub found_unauthenticated_notes: NoteAuthenticationInfo, -} - -impl From for GetBlockInputsResponse { - fn from(value: BlockInputs) -> Self { - Self { - block_header: Some(value.block_header.into()), - mmr_peaks: convert(value.chain_peaks.peaks()), - account_states: convert(value.account_states), - nullifiers: convert(value.nullifiers), - found_unauthenticated_notes: Some(value.found_unauthenticated_notes.into()), - } - } -} - #[derive(Debug)] pub struct TransactionInputs { pub account_hash: RpoDigest, @@ -150,12 +113,14 @@ impl Blockchain { &self.0 } - /// Returns the latest block number and partial mmr. + /// Creates a [`PartialMmr`] at the state of the latest block (i.e. the block's chain root will + /// match the hashed peaks of the returned partial MMR). This MMR will include authentication + /// paths for all blocks in the provided set. pub fn partial_mmr_from_blocks( &self, blocks: &BTreeSet, latest_block_number: BlockNumber, - ) -> Result { + ) -> PartialMmr { // Using latest block as the target forest means we take the state of the MMR one before // the latest block. This is because the latest block will be used as the reference // block of the batch and will be added to the MMR by the batch kernel. @@ -183,7 +148,8 @@ impl Blockchain { .track(block_num, leaf, &path) .expect("filling partial mmr with data from mmr should succeed"); } - Ok(partial_mmr) + + partial_mmr } } @@ -268,12 +234,12 @@ impl State { /// released. // TODO: This span is logged in a root span, we should connect it to the parent span. #[instrument(target = COMPONENT, skip_all, err)] - pub async fn apply_block(&self, block: Block) -> Result<(), ApplyBlockError> { + pub async fn apply_block(&self, block: ProvenBlock) -> Result<(), ApplyBlockError> { let _lock = self.writer.try_lock().map_err(|_| ApplyBlockError::ConcurrentWrite)?; let header = block.header(); - let tx_hash = block.compute_tx_hash(); + let tx_hash = BlockHeader::compute_tx_commitment(block.transactions()); if header.tx_hash() != tx_hash { return Err(InvalidBlockError::InvalidBlockTxHash { expected: tx_hash, @@ -324,7 +290,7 @@ impl State { // nullifiers can be produced only once let duplicate_nullifiers: Vec<_> = block - .nullifiers() + .created_nullifiers() .iter() .filter(|&n| inner.nullifier_tree.get_block_num(n).is_some()) .copied() @@ -343,7 +309,7 @@ impl State { // compute update for nullifier tree let nullifier_tree_update = inner.nullifier_tree.compute_mutations( - block.nullifiers().iter().map(|nullifier| (*nullifier, block_num)), + block.created_nullifiers().iter().map(|nullifier| (*nullifier, block_num)), ); if nullifier_tree_update.root() != header.nullifier_root() { @@ -373,13 +339,13 @@ impl State { }; // build note tree - let note_tree = block.build_note_tree(); + let note_tree = block.build_output_note_tree(); if note_tree.root() != header.note_root() { return Err(InvalidBlockError::NewBlockInvalidNoteRoot.into()); } let notes = block - .notes() + .output_notes() .map(|(note_index, note)| { let (details, nullifier) = match note { OutputNote::Full(note) => (Some(note.to_bytes()), Some(note.nullifier())), @@ -532,65 +498,6 @@ impl State { self.db.select_notes_by_id(note_ids).await } - /// Queries all the note inclusion proofs matching a certain Note IDs from the database. - pub async fn get_note_authentication_info( - &self, - note_ids: BTreeSet, - ) -> Result { - // First we grab note inclusion proofs for the known notes. These proofs only - // prove that the note was included in a given block. We then also need to prove that - // each of those blocks is included in the chain. - let note_proofs = self.db.select_note_inclusion_proofs(note_ids).await?; - - // The set of blocks that the notes are included in. - let blocks = note_proofs - .values() - .map(|proof| proof.location().block_num()) - .collect::>(); - - // Grab the block merkle paths from the inner state. - // - // NOTE: Scoped block to automatically drop the mutex guard asap. - // - // We also avoid accessing the db in the block as this would delay - // dropping the guard. - let (chain_length, merkle_paths) = { - let state = self.inner.read().await; - let chain_length = state.blockchain.chain_length().as_usize(); - - let paths = blocks - .iter() - .map(|&block_num| { - let proof = state.blockchain.open(block_num.as_usize())?.merkle_path; - - Ok::<_, MmrError>((block_num, proof)) - }) - .collect::, MmrError>>()?; - - let chain_length = u32::try_from(chain_length) - .expect("Forest is a chain length so should fit into a u32"); - - (chain_length.into(), paths) - }; - - let headers = self.db.select_block_headers(blocks.into_iter()).await?; - - let headers = headers - .into_iter() - .map(|header| (header.block_num(), header)) - .collect::>(); - - let mut block_proofs = Vec::with_capacity(merkle_paths.len()); - for (block_num, mmr_path) in merkle_paths { - let block_header = - *headers.get(&block_num).ok_or(DatabaseError::BlockNotFoundInDb(block_num))?; - - block_proofs.push(BlockInclusionProof { block_header, mmr_path, chain_length }); - } - - Ok(NoteAuthenticationInfo { block_proofs, note_proofs }) - } - /// Fetches the inputs for a transaction batch from the database. /// /// ## Inputs @@ -633,7 +540,7 @@ impl State { // Collect all blocks we need to query without duplicates, which is: // - all blocks for which we need to prove note inclusion. // - all blocks referenced by transactions in the batch. - let mut blocks = tx_reference_blocks; + let mut blocks: BTreeSet = tx_reference_blocks; blocks.extend(note_blocks); // Scoped block to automatically drop the read lock guard as soon as we're done. @@ -646,7 +553,7 @@ impl State { let highest_block_num = *blocks.last().expect("we should have checked for empty block references"); if highest_block_num > latest_block_num { - return Err(GetBatchInputsError::TransactionBlockReferenceNewerThanLatestBlock { + return Err(GetBatchInputsError::UnknownTransactionBlockReference { highest_block_num, latest_block_num, }); @@ -659,7 +566,7 @@ impl State { ( latest_block_num, - inner_state.blockchain.partial_mmr_from_blocks(&blocks, latest_block_num)?, + inner_state.blockchain.partial_mmr_from_blocks(&blocks, latest_block_num), ) }; @@ -779,64 +686,135 @@ impl State { /// Returns data needed by the block producer to construct and prove the next block. pub async fn get_block_inputs( &self, - account_ids: &[AccountId], - nullifiers: &[Nullifier], + account_ids: Vec, + nullifiers: Vec, unauthenticated_notes: BTreeSet, + reference_blocks: BTreeSet, ) -> Result { - let inner = self.inner.read().await; + // Get the note inclusion proofs from the DB. + // We do this first so we have to acquire the lock to the state just once. There we need the + // reference blocks of the note proofs to get their authentication paths in the chain MMR. + let unauthenticated_note_proofs = self + .db + .select_note_inclusion_proofs(unauthenticated_notes) + .await + .map_err(GetBlockInputsError::SelectNoteInclusionProofError)?; - let latest = self + // The set of blocks that the notes are included in. + let note_proof_reference_blocks = + unauthenticated_note_proofs.values().map(|proof| proof.location().block_num()); + + // Collect all blocks we need to prove inclusion for, without duplicates. + let mut blocks = reference_blocks; + blocks.extend(note_proof_reference_blocks); + + let (latest_block_number, account_witnesses, nullifier_witnesses, partial_mmr) = + self.get_block_inputs_witnesses(&mut blocks, account_ids, nullifiers).await?; + + // Fetch the block headers for all blocks in the partial MMR plus the latest one which will + // be used as the previous block header of the block being built. + let mut headers = self .db - .select_block_header_by_block_num(None) - .await? - .ok_or(GetBlockInputsError::DbBlockHeaderEmpty)?; + .select_block_headers(blocks.into_iter().chain(std::iter::once(latest_block_number))) + .await + .map_err(GetBlockInputsError::SelectBlockHeaderError)?; + + // Find and remove the latest block as we must not add it to the chain MMR, since it is + // not yet in the chain. + let latest_block_header_index = headers + .iter() + .enumerate() + .find_map(|(index, header)| { + (header.block_num() == latest_block_number).then_some(index) + }) + .expect("DB should have returned the header of the latest block header"); + + // The order doesn't matter for ChainMmr::new, so swap remove is fine. + let latest_block_header = headers.swap_remove(latest_block_header_index); + + // SAFETY: This should not error because: + // - we're passing exactly the block headers that we've added to the partial MMR, + // - so none of the block header's block numbers should exceed the chain length of the + // partial MMR, + // - and we've added blocks to a BTreeSet, so there can be no duplicates. + let chain_mmr = ChainMmr::new(partial_mmr, headers) + .expect("partial mmr and block headers should be consistent"); + + Ok(BlockInputs::new( + latest_block_header, + chain_mmr, + account_witnesses, + nullifier_witnesses, + unauthenticated_note_proofs, + )) + } + + /// Get account and nullifier witnesses for the requested account IDs and nullifier as well as + /// the [`PartialMmr`] for the given blocks. The MMR won't contain the latest block and its + /// number is removed from `blocks` and returned separately. + /// + /// This method acquires the lock to the inner state and does not access the DB so we release + /// the lock asap. + async fn get_block_inputs_witnesses( + &self, + blocks: &mut BTreeSet, + account_ids: Vec, + nullifiers: Vec, + ) -> Result< + ( + BlockNumber, + BTreeMap, + BTreeMap, + PartialMmr, + ), + GetBlockInputsError, + > { + let inner = self.inner.read().await; + + let latest_block_number = inner.latest_block_num(); - // sanity check - if inner.blockchain.chain_tip() != latest.block_num() { - return Err(GetBlockInputsError::IncorrectChainMmrForestNumber { - forest: inner.blockchain.chain_tip().as_usize(), - block_num: latest.block_num(), + // If `blocks` is empty, use the latest block number which will never trigger the error. + let highest_block_number = blocks.last().copied().unwrap_or(latest_block_number); + if highest_block_number > latest_block_number { + return Err(GetBlockInputsError::UnknownBatchBlockReference { + highest_block_number, + latest_block_number, }); } - // using current block number gets us the peaks of the chain MMR as of one block ago; - // this is done so that latest.chain_root matches the returned peaks - let chain_peaks = - inner.blockchain.peaks_at(latest.block_num().as_usize()).map_err(|error| { - GetBlockInputsError::FailedToGetMmrPeaksForForest { - forest: latest.block_num().as_usize(), - error, - } - })?; - let account_states = account_ids + // The latest block is not yet in the chain MMR, so we can't (and don't need to) prove its + // inclusion in the chain. + blocks.remove(&latest_block_number); + + // Fetch the partial MMR at the state of the latest block with authentication paths for the + // provided set of blocks. + let partial_mmr = inner.blockchain.partial_mmr_from_blocks(blocks, latest_block_number); + + // Fetch witnesses for all acounts. + let account_witnesses = account_ids .iter() .copied() .map(|account_id| { - let ValuePath { value: account_hash, path: proof } = - inner.account_tree.open(&LeafIndex::new_max_depth(account_id.prefix().into())); - Ok(AccountInputRecord { account_id, account_hash, proof }) + let ValuePath { + value: latest_state_commitment, + path: proof, + } = inner.account_tree.open(&account_id.into()); + (account_id, AccountWitness::new(latest_state_commitment, proof)) }) - .collect::>()?; + .collect::>(); - let nullifiers: Vec = nullifiers + // Fetch witnesses for all nullifiers. We don't check whether the nullifiers are spent or + // not as this is done as part of proposing the block. + let nullifier_witnesses: BTreeMap = nullifiers .iter() + .copied() .map(|nullifier| { - let proof = inner.nullifier_tree.open(nullifier); - - NullifierWitness { nullifier: *nullifier, proof } + let proof = inner.nullifier_tree.open(&nullifier); + (nullifier, NullifierWitness::new(proof)) }) .collect(); - let found_unauthenticated_notes = - self.get_note_authentication_info(unauthenticated_notes).await?; - - Ok(BlockInputs { - block_header: latest, - chain_peaks, - account_states, - nullifiers, - found_unauthenticated_notes, - }) + Ok((latest_block_number, account_witnesses, nullifier_witnesses, partial_mmr)) } /// Returns data needed by the block producer to verify transactions validity. diff --git a/proto/block.proto b/proto/block.proto index f9a41a99c..74229793c 100644 --- a/proto/block.proto +++ b/proto/block.proto @@ -39,15 +39,3 @@ message BlockHeader { // The time when the block was created. fixed32 timestamp = 11; } - -// Represents a block inclusion proof. -message BlockInclusionProof { - // Block header associated with the inclusion proof. - BlockHeader block_header = 1; - - // Merkle path associated with the inclusion proof. - merkle.MerklePath mmr_path = 2; - - // The chain length associated with `mmr_path`. - fixed32 chain_length = 3; -} diff --git a/proto/note.proto b/proto/note.proto index 9acfbd847..4a7a69703 100644 --- a/proto/note.proto +++ b/proto/note.proto @@ -80,12 +80,3 @@ message NoteSyncRecord { // The note's inclusion proof in the block. merkle.MerklePath merkle_path = 4; } - -// Represents proof of notes inclusion in the block(s) and block(s) inclusion in the chain. -message NoteAuthenticationInfo { - // Proof of each note's inclusion in a block. - repeated note.NoteInclusionInBlockProof note_proofs = 1; - - // Proof of each block's inclusion in the chain. - repeated block.BlockInclusionProof block_proofs = 2; -} diff --git a/proto/requests.proto b/proto/requests.proto index bf9fd557a..13d4568b1 100644 --- a/proto/requests.proto +++ b/proto/requests.proto @@ -76,12 +76,25 @@ message SyncNoteRequest { // Returns data required to prove the next block. message GetBlockInputsRequest { - // ID of the account against which a transaction is executed. + // IDs of all accounts updated in the proposed block for which to retrieve account witnesses. repeated account.AccountId account_ids = 1; - // Set of nullifiers consumed by this transaction. + + // Nullifiers of all notes consumed by the block for which to retrieve witnesses. + // + // Due to note erasure it will generally not be possible to know the exact set of nullifiers + // a block will create, unless we pre-execute note erasure. So in practice, this set of + // nullifiers will be the set of nullifiers of all proven batches in the block, which is a + // superset of the nullifiers the block may create. + // + // However, if it is known that a certain note will be erased, it would not be necessary to + // provide a nullifier witness for it. repeated digest.Digest nullifiers = 2; - // Array of note IDs to be checked for existence in the database. + + // Array of note IDs for which to retrieve note inclusion proofs, **if they exist in the store**. repeated digest.Digest unauthenticated_notes = 3; + + // Array of block numbers referenced by all batches in the block. + repeated fixed32 reference_blocks = 4; } // Returns the inputs for a transaction batch. diff --git a/proto/responses.proto b/proto/responses.proto index ad1f353a5..3163f993c 100644 --- a/proto/responses.proto +++ b/proto/responses.proto @@ -87,42 +87,46 @@ message SyncNoteResponse { } // An account returned as a response to the `GetBlockInputs`. -message AccountBlockInputRecord { +message AccountWitness { // The account ID. account.AccountId account_id = 1; - // The latest account hash, zero hash if the account doesn't exist. - digest.Digest account_hash = 2; + // The latest account state commitment used as the initial state of the requested block. + // This will be the zero digest if the account doesn't exist. + digest.Digest initial_state_commitment = 2; - // Merkle path to verify the account's inclusion in the MMR. + // Merkle path to verify the account's inclusion in the account tree. merkle.MerklePath proof = 3; } // A nullifier returned as a response to the `GetBlockInputs`. -message NullifierBlockInputRecord { - // The nullifier ID. +message NullifierWitness { + // The nullifier. digest.Digest nullifier = 1; - // Merkle path to verify the nullifier's inclusion in the MMR. + // The SMT proof to verify the nullifier's inclusion in the nullifier tree. smt.SmtOpening opening = 2; } // Represents the result of getting block inputs. message GetBlockInputsResponse { // The latest block header. - block.BlockHeader block_header = 1; + block.BlockHeader latest_block_header = 1; - // Peaks of the above block's mmr, The `forest` value is equal to the block number. - repeated digest.Digest mmr_peaks = 2; + // Proof of each requested unauthenticated note's inclusion in a block, **if it existed in + // the store**. + repeated note.NoteInclusionInBlockProof unauthenticated_note_proofs = 2; - // The hashes of the requested accounts and their authentication paths. - repeated AccountBlockInputRecord account_states = 3; + // The serialized chain MMR which includes proofs for all blocks referenced by the + // above note inclusion proofs as well as proofs for inclusion of the requested blocks + // referenced by the batches in the block. + bytes chain_mmr = 3; - // The requested nullifiers and their authentication paths. - repeated NullifierBlockInputRecord nullifiers = 4; + // The state commitments of the requested accounts and their authentication paths. + repeated AccountWitness account_witnesses = 4; - // The list of requested notes which were found in the database. - note.NoteAuthenticationInfo found_unauthenticated_notes = 5; + // The requested nullifiers and their authentication paths. + repeated NullifierWitness nullifier_witnesses = 5; } // Represents the result of getting batch inputs. From 540e097521a5f31623d818474f00bc360fde81a9 Mon Sep 17 00:00:00 2001 From: Philipp Gackstatter Date: Tue, 25 Feb 2025 08:25:14 +0100 Subject: [PATCH 22/27] fix: cargo machete pinned to specific version (#720) --- .github/workflows/lint.yml | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index e4ac4540a..8425d4740 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -94,8 +94,15 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@main - - name: machete - uses: bnjbvr/cargo-machete@main + - name: Install cargo-machete + uses: clechasseur/rs-cargo@v2 + with: + command: install + args: cargo-machete@0.7.0 + - name: Machete + uses: clechasseur/rs-cargo@v2 + with: + command: machete proto: name: proto check From d657c7e00db163492cf8eb00690122a40273918e Mon Sep 17 00:00:00 2001 From: Tomas Date: Fri, 28 Feb 2025 17:06:13 -0300 Subject: [PATCH 23/27] feat: test faucet website (#702) * feat: add faucet website test * review: refactor test setup and comment * review: update js script to check failed requests * review: remove unreadable literals and comment genesis block * review: improve socket addr error handling * review: document test * review: remove sleep and move code * fix: add stdout to wait for chromedriver * chore: format * review: generate faucet account in the test * review: remove unnecessary PathBuf --- Cargo.lock | 252 ++++++++++++++++++++++++++++++--- bin/faucet/Cargo.toml | 6 + bin/faucet/src/main.rs | 135 +++++++++++++++++- bin/faucet/src/stub_rpc_api.rs | 166 ++++++++++++++++++++++ 4 files changed, 535 insertions(+), 24 deletions(-) create mode 100644 bin/faucet/src/stub_rpc_api.rs diff --git a/Cargo.lock b/Cargo.lock index fe8458343..07858d22f 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -200,7 +200,7 @@ dependencies = [ "axum-core 0.4.5", "bytes", "futures-util", - "http", + "http 1.2.0", "http-body", "http-body-util", "itoa", @@ -227,7 +227,7 @@ dependencies = [ "bytes", "form_urlencoded", "futures-util", - "http", + "http 1.2.0", "http-body", "http-body-util", "hyper", @@ -260,7 +260,7 @@ dependencies = [ "async-trait", "bytes", "futures-util", - "http", + "http 1.2.0", "http-body", "http-body-util", "mime", @@ -279,7 +279,7 @@ checksum = "df1362f362fd16024ae199c1970ce98f9661bf5ef94b9808fee734bc3698b733" dependencies = [ "bytes", "futures-util", - "http", + "http 1.2.0", "http-body", "http-body-util", "mime", @@ -315,6 +315,12 @@ dependencies = [ "backtrace", ] +[[package]] +name = "base64" +version = "0.21.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9d297deb1925b89f2ccc13d7635fa0714f12c87adce1c75356b39ca9b7178567" + [[package]] name = "base64" version = "0.22.1" @@ -574,6 +580,37 @@ version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7c74b8349d32d297c9134b8c88677813a227df8f779daa29bfc29c183fe3dca6" +[[package]] +name = "cookie" +version = "0.16.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e859cd57d0710d9e06c381b550c06e76992472a8c6d527aecd2fc673dcc231fb" +dependencies = [ + "time", + "version_check", +] + +[[package]] +name = "cookie" +version = "0.18.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4ddef33a339a91ea89fb53151bd0a4689cfce27055c291dfa69945475d22c747" +dependencies = [ + "percent-encoding", + "time", + "version_check", +] + +[[package]] +name = "core-foundation" +version = "0.9.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "91e195e091a93c46f7102ec7818a2aa394e1e1771c3ab4825963fa03e45afb8f" +dependencies = [ + "core-foundation-sys", + "libc", +] + [[package]] name = "core-foundation" version = "0.10.0" @@ -852,6 +889,31 @@ version = "0.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7360491ce676a36bf9bb3c56c1aa791658183a54d2744120f27285738d90465a" +[[package]] +name = "fantoccini" +version = "0.21.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7722aeee9c2be6fa131166990295089d73d973012b758a2208b9ba51af5dd024" +dependencies = [ + "base64 0.22.1", + "cookie 0.18.1", + "futures-core", + "futures-util", + "http 1.2.0", + "http-body-util", + "hyper", + "hyper-tls", + "hyper-util", + "mime", + "openssl", + "serde", + "serde_json", + "time", + "tokio", + "url", + "webdriver", +] + [[package]] name = "fastrand" version = "2.3.0" @@ -892,6 +954,21 @@ version = "1.0.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" +[[package]] +name = "foreign-types" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f6f339eb8adc052cd2ca78910fda869aefa38d22d5cb648e6485e4d3fc06f3b1" +dependencies = [ + "foreign-types-shared", +] + +[[package]] +name = "foreign-types-shared" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "00b0228411908ca8685dba7fc2cdd70ec9990a6e753e89b6ac91a84c40fbaf4b" + [[package]] name = "form_urlencoded" version = "1.2.1" @@ -1061,7 +1138,7 @@ dependencies = [ "fnv", "futures-core", "futures-sink", - "http", + "http 1.2.0", "indexmap 2.7.1", "slab", "tokio", @@ -1117,6 +1194,17 @@ version = "0.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70" +[[package]] +name = "http" +version = "0.2.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "601cbb57e577e2f5ef5be8e7b83f0f63994f25aa94d673e54a92d5c516d101f1" +dependencies = [ + "bytes", + "fnv", + "itoa", +] + [[package]] name = "http" version = "1.2.0" @@ -1135,7 +1223,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1efedce1fb8e6913f23e0c92de8e62cd5b772a67e7b3946df930a62566c93184" dependencies = [ "bytes", - "http", + "http 1.2.0", ] [[package]] @@ -1146,7 +1234,7 @@ checksum = "793429d76616a256bcb62c2a2ec2bed781c8307e797e2598c50010f2bee2544f" dependencies = [ "bytes", "futures-util", - "http", + "http 1.2.0", "http-body", "pin-project-lite", ] @@ -1173,7 +1261,7 @@ dependencies = [ "futures-channel", "futures-util", "h2", - "http", + "http 1.2.0", "http-body", "httparse", "httpdate", @@ -1197,6 +1285,22 @@ dependencies = [ "tower-service", ] +[[package]] +name = "hyper-tls" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "70206fc6890eaca9fde8a0bf71caa2ddfc9fe045ac9e5c70df101a7dbde866e0" +dependencies = [ + "bytes", + "http-body-util", + "hyper", + "hyper-util", + "native-tls", + "tokio", + "tokio-native-tls", + "tower-service", +] + [[package]] name = "hyper-util" version = "0.1.10" @@ -1206,7 +1310,7 @@ dependencies = [ "bytes", "futures-channel", "futures-util", - "http", + "http 1.2.0", "http-body", "hyper", "pin-project-lite", @@ -1761,7 +1865,8 @@ dependencies = [ "anyhow", "axum 0.8.1", "clap", - "http", + "fantoccini", + "http 1.2.0", "http-body-util", "miden-lib", "miden-node-proto", @@ -1772,11 +1877,14 @@ dependencies = [ "rand", "rand_chacha", "serde", + "serde_json", "static-files", "thiserror 2.0.11", "tokio", + "tokio-stream", "toml", "tonic", + "tonic-web", "tower 0.5.2", "tower-http 0.6.2", "tracing", @@ -1970,7 +2078,7 @@ version = "0.8.0" dependencies = [ "anyhow", "figment", - "http", + "http 1.2.0", "itertools 0.14.0", "miden-objects", "opentelemetry", @@ -2161,6 +2269,23 @@ version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "defc4c55412d89136f966bbb339008b474350e5e6e78d2714439c386b3137a03" +[[package]] +name = "native-tls" +version = "0.2.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0dab59f8e050d5df8e4dd87d9206fb6f65a483e20ac9fda365ade4fab353196c" +dependencies = [ + "libc", + "log", + "openssl", + "openssl-probe", + "openssl-sys", + "schannel", + "security-framework 2.11.1", + "security-framework-sys", + "tempfile", +] + [[package]] name = "new_debug_unreachable" version = "1.0.6" @@ -2312,12 +2437,50 @@ version = "1.20.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "945462a4b81e43c4e3ba96bd7b49d834c6f61198356aa858733bc4acf3cbe62e" +[[package]] +name = "openssl" +version = "0.10.71" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5e14130c6a98cd258fdcb0fb6d744152343ff729cbfcb28c656a9d12b999fbcd" +dependencies = [ + "bitflags", + "cfg-if", + "foreign-types", + "libc", + "once_cell", + "openssl-macros", + "openssl-sys", +] + +[[package]] +name = "openssl-macros" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + [[package]] name = "openssl-probe" version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d05e27ee213611ffe7d6348b942e8f942b37114c00cc03cec254295a4a17852e" +[[package]] +name = "openssl-sys" +version = "0.9.106" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8bb61ea9811cc39e3c2069f40b8b8e2e70d8569b361f879786cc7ed48b777cdd" +dependencies = [ + "cc", + "libc", + "pkg-config", + "vcpkg", +] + [[package]] name = "opentelemetry" version = "0.28.0" @@ -2340,7 +2503,7 @@ checksum = "5bef114c6d41bea83d6dc60eb41720eedd0261a67af57b66dd2b84ac46c01d91" dependencies = [ "async-trait", "futures-core", - "http", + "http 1.2.0", "opentelemetry", "opentelemetry-proto", "opentelemetry_sdk", @@ -2956,7 +3119,7 @@ dependencies = [ "openssl-probe", "rustls-pki-types", "schannel", - "security-framework", + "security-framework 3.2.0", ] [[package]] @@ -3039,6 +3202,19 @@ version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" +[[package]] +name = "security-framework" +version = "2.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "897b2245f0b511c87893af39b033e5ca9cce68824c4d7e7630b5a1d339658d02" +dependencies = [ + "bitflags", + "core-foundation 0.9.4", + "core-foundation-sys", + "libc", + "security-framework-sys", +] + [[package]] name = "security-framework" version = "3.2.0" @@ -3046,7 +3222,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "271720403f46ca04f7ba6f55d438f8bd878d6b8ca0a1046e8228c4145bcbb316" dependencies = [ "bitflags", - "core-foundation", + "core-foundation 0.10.0", "core-foundation-sys", "libc", "security-framework-sys", @@ -3506,6 +3682,16 @@ dependencies = [ "syn", ] +[[package]] +name = "tokio-native-tls" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbae76ab933c85776efabc971569dd6119c580d8f5d448769dec1764bf796ef2" +dependencies = [ + "native-tls", + "tokio", +] + [[package]] name = "tokio-rustls" version = "0.26.1" @@ -3583,10 +3769,10 @@ dependencies = [ "async-stream", "async-trait", "axum 0.7.9", - "base64", + "base64 0.22.1", "bytes", "h2", - "http", + "http 1.2.0", "http-body", "http-body-util", "hyper", @@ -3627,9 +3813,9 @@ version = "0.12.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5299dd20801ad736dccb4a5ea0da7376e59cd98f213bf1c3d478cf53f4834b58" dependencies = [ - "base64", + "base64 0.22.1", "bytes", - "http", + "http 1.2.0", "http-body", "http-body-util", "pin-project", @@ -3685,7 +3871,7 @@ checksum = "1e9cd434a998747dd2c4276bc96ee2e0c7a2eadf3cae88e52be55a05fa9053f5" dependencies = [ "bitflags", "bytes", - "http", + "http 1.2.0", "http-body", "http-body-util", "pin-project-lite", @@ -3701,7 +3887,7 @@ checksum = "403fa3b783d4b626a8ad51d766ab03cb6d2dbfc46b1c5d4448395e6628dc9697" dependencies = [ "bitflags", "bytes", - "http", + "http 1.2.0", "http-body", "pin-project-lite", "tower-layer", @@ -3888,6 +4074,12 @@ version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3b09c83c3c29d37506a3e260c08c03743a6bb66a9cd432c6934ab501a190571f" +[[package]] +name = "unicode-segmentation" +version = "1.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f6ccf251212114b54433ec949fd6a7841275f9ada20dddd2f29e9ceea4501493" + [[package]] name = "unicode-width" version = "0.1.14" @@ -4120,6 +4312,26 @@ dependencies = [ "wasm-bindgen", ] +[[package]] +name = "webdriver" +version = "0.50.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "144ab979b12d36d65065635e646549925de229954de2eb3b47459b432a42db71" +dependencies = [ + "base64 0.21.7", + "bytes", + "cookie 0.16.2", + "http 0.2.12", + "log", + "serde", + "serde_derive", + "serde_json", + "thiserror 1.0.69", + "time", + "unicode-segmentation", + "url", +] + [[package]] name = "winapi" version = "0.3.9" diff --git a/bin/faucet/Cargo.toml b/bin/faucet/Cargo.toml index 83194cc0f..a1b66b565 100644 --- a/bin/faucet/Cargo.toml +++ b/bin/faucet/Cargo.toml @@ -43,3 +43,9 @@ url = { workspace = true } # Required to inject build metadata. miden-node-utils = { workspace = true, features = ["vergen"] } static-files = "0.2" + +[dev-dependencies] +fantoccini = { version = "0.21" } +serde_json = { version = "1.0" } +tokio-stream = { workspace = true, features = ["net"] } +tonic-web = { version = "0.12" } diff --git a/bin/faucet/src/main.rs b/bin/faucet/src/main.rs index 64e223d0b..c6dce5eed 100644 --- a/bin/faucet/src/main.rs +++ b/bin/faucet/src/main.rs @@ -5,6 +5,9 @@ mod handlers; mod state; mod store; +#[cfg(test)] +mod stub_rpc_api; + use std::path::PathBuf; use anyhow::Context; @@ -96,11 +99,14 @@ async fn main() -> anyhow::Result<()> { let cli = Cli::parse(); + run_faucet_command(cli).await +} + +async fn run_faucet_command(cli: Cli) -> anyhow::Result<()> { match &cli.command { Command::Start { config } => { let config: FaucetConfig = load_config(config).context("failed to load configuration file")?; - let faucet_state = FaucetState::new(config.clone()).await?; info!(target: COMPONENT, %config, "Initializing server"); @@ -128,9 +134,12 @@ async fn main() -> anyhow::Result<()> { ) .with_state(faucet_state); - let socket_addr = config.endpoint.socket_addrs(|| None)?.into_iter().next().ok_or( - anyhow::anyhow!("Couldn't get any socket addrs for endpoint: {}", config.endpoint), - )?; + let socket_addr = config + .endpoint + .socket_addrs(|| None)? + .into_iter() + .next() + .with_context(|| format!("no sockets available on {}", config.endpoint))?; let listener = TcpListener::bind(socket_addr).await.context("failed to bind TCP listener")?; @@ -229,3 +238,121 @@ fn long_version() -> LongVersion { debug: option_env!("VERGEN_CARGO_DEBUG").unwrap_or_default(), } } + +#[cfg(test)] +mod test { + use std::{ + env::temp_dir, + io::{BufRead, BufReader}, + process::{Command, Stdio}, + str::FromStr, + }; + + use fantoccini::ClientBuilder; + use serde_json::{json, Map}; + use url::Url; + + use crate::{config::FaucetConfig, run_faucet_command, stub_rpc_api::serve_stub, Cli}; + + /// This test starts a stub node, a faucet connected to the stub node, and a chromedriver + /// to test the faucet website. It then loads the website and checks that all the requests + /// made return status 200. + #[tokio::test] + async fn test_website() { + let stub_node_url = Url::from_str("http://localhost:50051").unwrap(); + + // Start the stub node + tokio::spawn({ + let stub_node_url = stub_node_url.clone(); + async move { serve_stub(&stub_node_url).await.unwrap() } + }); + + let config_path = temp_dir().join("faucet.toml"); + let faucet_account_path = temp_dir().join("account.mac"); + + // Create config + let config = FaucetConfig { + node_url: stub_node_url, + faucet_account_path: faucet_account_path.clone(), + ..FaucetConfig::default() + }; + let config_as_toml_string = toml::to_string(&config).unwrap(); + std::fs::write(&config_path, config_as_toml_string).unwrap(); + + // Create faucet account + run_faucet_command(Cli { + command: crate::Command::CreateFaucetAccount { + config_path: config_path.clone(), + output_path: faucet_account_path.clone(), + token_symbol: "TEST".to_string(), + decimals: 2, + max_supply: 1000, + }, + }) + .await + .unwrap(); + + // Start the faucet connected to the stub + let website_url = config.endpoint.clone(); + tokio::spawn(async move { + run_faucet_command(Cli { + command: crate::Command::Start { config: config_path }, + }) + .await + .unwrap(); + }); + + // Start chromedriver. This requires having chromedriver and chrome installed + let chromedriver_port = "57709"; + #[expect(clippy::zombie_processes)] + let mut chromedriver = Command::new("chromedriver") + .arg(format!("--port={chromedriver_port}")) + .stdout(Stdio::piped()) + .spawn() + .expect("failed to start chromedriver"); + // Wait for chromedriver to be running + let stdout = chromedriver.stdout.take().unwrap(); + for line in BufReader::new(stdout).lines() { + if line.unwrap().contains("ChromeDriver was started successfully") { + break; + } + } + + // Start fantoccini client + let client = ClientBuilder::native() + .capabilities( + [( + "goog:chromeOptions".to_string(), + json!({"args": ["--headless", "--disable-gpu", "--no-sandbox"]}), + )] + .into_iter() + .collect::>(), + ) + .connect(&format!("http://localhost:{chromedriver_port}")) + .await + .expect("failed to connect to WebDriver"); + + // Open the website + client.goto(website_url.as_str()).await.unwrap(); + + let title = client.title().await.unwrap(); + assert_eq!(title, "Miden Faucet"); + + // Execute a script to get all the failed requests + let script = r" + let errors = []; + performance.getEntriesByType('resource').forEach(entry => { + if (entry.responseStatus && entry.responseStatus >= 400) { + errors.push({url: entry.name, status: entry.responseStatus}); + } + }); + return errors; + "; + let failed_requests = client.execute(script, vec![]).await.unwrap(); + assert!(failed_requests.as_array().unwrap().is_empty()); + + // Close the client and kill chromedriver + client.close().await.unwrap(); + chromedriver.kill().unwrap(); + } +} diff --git a/bin/faucet/src/stub_rpc_api.rs b/bin/faucet/src/stub_rpc_api.rs new file mode 100644 index 000000000..0a89336ff --- /dev/null +++ b/bin/faucet/src/stub_rpc_api.rs @@ -0,0 +1,166 @@ +use miden_node_proto::generated::{ + block::BlockHeader, + digest::Digest, + requests::{ + CheckNullifiersByPrefixRequest, CheckNullifiersRequest, GetAccountDetailsRequest, + GetAccountProofsRequest, GetAccountStateDeltaRequest, GetBlockByNumberRequest, + GetBlockHeaderByNumberRequest, GetNotesByIdRequest, SubmitProvenTransactionRequest, + SyncNoteRequest, SyncStateRequest, + }, + responses::{ + CheckNullifiersByPrefixResponse, CheckNullifiersResponse, GetAccountDetailsResponse, + GetAccountProofsResponse, GetAccountStateDeltaResponse, GetBlockByNumberResponse, + GetBlockHeaderByNumberResponse, GetNotesByIdResponse, SubmitProvenTransactionResponse, + SyncNoteResponse, SyncStateResponse, + }, + rpc::api_server, +}; +use miden_node_utils::errors::ApiError; +use tokio::net::TcpListener; +use tokio_stream::wrappers::TcpListenerStream; +use tonic::{Request, Response, Status}; +use url::Url; + +#[derive(Clone)] +pub struct StubRpcApi; + +#[tonic::async_trait] +impl api_server::Api for StubRpcApi { + async fn check_nullifiers( + &self, + _request: Request, + ) -> Result, Status> { + unimplemented!(); + } + + async fn check_nullifiers_by_prefix( + &self, + _request: Request, + ) -> Result, Status> { + unimplemented!(); + } + + async fn get_block_header_by_number( + &self, + _request: Request, + ) -> Result, Status> { + // Values are taken from the default genesis block as at v0.7 + Ok(Response::new(GetBlockHeaderByNumberResponse { + block_header: Some(BlockHeader { + version: 1, + prev_hash: Some(Digest { d0: 0, d1: 0, d2: 0, d3: 0 }), + block_num: 0, + chain_root: Some(Digest { + d0: 0x9729_9D39_2DA8_DC69, + d1: 0x674_44AF_6294_0719, + d2: 0x7B97_0BC7_07A0_F7D6, + d3: 0xE423_8D7C_78F3_9D8B, + }), + account_root: Some(Digest { + d0: 0x9666_5D75_8487_401A, + d1: 0xB7BF_DF8B_379F_ED71, + d2: 0xFCA7_82CB_2406_2222, + d3: 0x8D0C_B80F_6377_4E9A, + }), + nullifier_root: Some(Digest { + d0: 0xD4A0_CFF6_578C_123E, + d1: 0xF11A_1794_8930_B14A, + d2: 0xD128_DD2A_4213_B53C, + d3: 0x2DF8_FE54_F23F_6B91, + }), + note_root: Some(Digest { + d0: 0x93CE_DDC8_A187_24FE, + d1: 0x4E32_9917_2E91_30ED, + d2: 0x8022_9E0E_1808_C860, + d3: 0x13F4_7934_7EB7_FD78, + }), + tx_hash: Some(Digest { d0: 0, d1: 0, d2: 0, d3: 0 }), + kernel_root: Some(Digest { + d0: 0x7B6F_43E5_2910_C8C3, + d1: 0x99B3_2868_577E_5779, + d2: 0xAF9E_6424_57CD_B8C1, + d3: 0xB1DD_E61B_F983_2DBD, + }), + proof_hash: Some(Digest { d0: 0, d1: 0, d2: 0, d3: 0 }), + timestamp: 0x63B0_CD00, + }), + mmr_path: None, + chain_length: None, + })) + } + + async fn sync_state( + &self, + _request: Request, + ) -> Result, Status> { + unimplemented!(); + } + + async fn sync_notes( + &self, + _request: Request, + ) -> Result, Status> { + unimplemented!(); + } + + async fn get_notes_by_id( + &self, + _request: Request, + ) -> Result, Status> { + unimplemented!(); + } + + async fn submit_proven_transaction( + &self, + _request: Request, + ) -> Result, Status> { + Ok(Response::new(SubmitProvenTransactionResponse { block_height: 0 })) + } + + async fn get_account_details( + &self, + _request: Request, + ) -> Result, Status> { + Err(Status::not_found("account not found")) + } + + async fn get_block_by_number( + &self, + _request: Request, + ) -> Result, Status> { + unimplemented!() + } + + async fn get_account_state_delta( + &self, + _request: Request, + ) -> Result, Status> { + unimplemented!() + } + + async fn get_account_proofs( + &self, + _request: Request, + ) -> Result, Status> { + unimplemented!() + } +} + +pub async fn serve_stub(endpoint: &Url) -> Result<(), ApiError> { + let addr = endpoint + .socket_addrs(|| None) + .map_err(ApiError::EndpointToSocketFailed)? + .into_iter() + .next() + .unwrap(); + + let listener = TcpListener::bind(addr).await?; + let api_service = api_server::ApiServer::new(StubRpcApi); + + tonic::transport::Server::builder() + .accept_http1(true) + .add_service(tonic_web::enable(api_service)) + .serve_with_incoming(TcpListenerStream::new(listener)) + .await + .map_err(ApiError::ApiServeFailed) +} From 3c5f411738ca1fc6c77cc238b1eb028504ad75fe Mon Sep 17 00:00:00 2001 From: Serge Radinovich <47865535+sergerad@users.noreply.github.com> Date: Mon, 3 Mar 2025 22:00:45 +1300 Subject: [PATCH 24/27] chore: add Open Telemetry attributes to grpc spans (#698) --- Cargo.lock | 319 ++++++++++++++++++++++++++- Cargo.toml | 3 + bin/faucet/Cargo.toml | 4 +- crates/block-producer/Cargo.toml | 3 +- crates/block-producer/src/server.rs | 6 +- crates/store/Cargo.toml | 1 + crates/store/src/server/mod.rs | 6 +- crates/utils/Cargo.toml | 5 +- crates/utils/src/logging.rs | 24 +- crates/utils/src/tracing/grpc.rs | 81 +++++-- crates/utils/src/tracing/span_ext.rs | 53 +++-- docs/operator.md | 3 +- 12 files changed, 451 insertions(+), 57 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 07858d22f..198bf8e17 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -136,6 +136,151 @@ version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9b34d609dfbaf33d6889b2b7106d3ca345eacad44200913df5ba02bfd31d2ba9" +[[package]] +name = "async-channel" +version = "1.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "81953c529336010edd6d8e358f886d9581267795c61b19475b71314bffa46d35" +dependencies = [ + "concurrent-queue", + "event-listener 2.5.3", + "futures-core", +] + +[[package]] +name = "async-channel" +version = "2.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "89b47800b0be77592da0afd425cc03468052844aff33b84e33cc696f64e77b6a" +dependencies = [ + "concurrent-queue", + "event-listener-strategy", + "futures-core", + "pin-project-lite", +] + +[[package]] +name = "async-executor" +version = "1.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "30ca9a001c1e8ba5149f91a74362376cc6bc5b919d92d988668657bd570bdcec" +dependencies = [ + "async-task", + "concurrent-queue", + "fastrand", + "futures-lite", + "slab", +] + +[[package]] +name = "async-global-executor" +version = "2.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "05b1b633a2115cd122d73b955eadd9916c18c8f510ec9cd1686404c60ad1c29c" +dependencies = [ + "async-channel 2.3.1", + "async-executor", + "async-io", + "async-lock", + "blocking", + "futures-lite", + "once_cell", +] + +[[package]] +name = "async-io" +version = "2.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "43a2b323ccce0a1d90b449fd71f2a06ca7faa7c54c2751f06c9bd851fc061059" +dependencies = [ + "async-lock", + "cfg-if", + "concurrent-queue", + "futures-io", + "futures-lite", + "parking", + "polling", + "rustix", + "slab", + "tracing", + "windows-sys 0.59.0", +] + +[[package]] +name = "async-lock" +version = "3.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ff6e472cdea888a4bd64f342f09b3f50e1886d32afe8df3d663c01140b811b18" +dependencies = [ + "event-listener 5.4.0", + "event-listener-strategy", + "pin-project-lite", +] + +[[package]] +name = "async-process" +version = "2.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "63255f1dc2381611000436537bbedfe83183faa303a5a0edaf191edef06526bb" +dependencies = [ + "async-channel 2.3.1", + "async-io", + "async-lock", + "async-signal", + "async-task", + "blocking", + "cfg-if", + "event-listener 5.4.0", + "futures-lite", + "rustix", + "tracing", +] + +[[package]] +name = "async-signal" +version = "0.2.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "637e00349800c0bdf8bfc21ebbc0b6524abea702b0da4168ac00d070d0c0b9f3" +dependencies = [ + "async-io", + "async-lock", + "atomic-waker", + "cfg-if", + "futures-core", + "futures-io", + "rustix", + "signal-hook-registry", + "slab", + "windows-sys 0.59.0", +] + +[[package]] +name = "async-std" +version = "1.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c634475f29802fde2b8f0b505b1bd00dfe4df7d4a000f0b36f7671197d5c3615" +dependencies = [ + "async-channel 1.9.0", + "async-global-executor", + "async-io", + "async-lock", + "async-process", + "crossbeam-utils", + "futures-channel", + "futures-core", + "futures-io", + "futures-lite", + "gloo-timers", + "kv-log-macro", + "log", + "memchr", + "once_cell", + "pin-project-lite", + "pin-utils", + "slab", + "wasm-bindgen-futures", +] + [[package]] name = "async-stream" version = "0.3.6" @@ -158,6 +303,12 @@ dependencies = [ "syn", ] +[[package]] +name = "async-task" +version = "4.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b75356056920673b02621b35afd0f7dda9306d03c79a30f5c56c44cf256e3de" + [[package]] name = "async-trait" version = "0.1.86" @@ -411,6 +562,19 @@ dependencies = [ "generic-array", ] +[[package]] +name = "blocking" +version = "1.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "703f41c54fc768e63e091340b424302bb1c29ef4aa0c7f10fe849dfb114d29ea" +dependencies = [ + "async-channel 2.3.1", + "async-task", + "futures-io", + "futures-lite", + "piper", +] + [[package]] name = "bumpalo" version = "3.17.0" @@ -574,6 +738,15 @@ version = "1.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5b63caa9aa9397e2d9480a9b13673856c78d8ac123288526c37d7839f2a86990" +[[package]] +name = "concurrent-queue" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4ca0197aee26d1ae37445ee532fefce43251d24cc7c166799f4d46817f1d3973" +dependencies = [ + "crossbeam-utils", +] + [[package]] name = "constant_time_eq" version = "0.3.1" @@ -877,6 +1050,33 @@ dependencies = [ "windows-sys 0.59.0", ] +[[package]] +name = "event-listener" +version = "2.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0206175f82b8d6bf6652ff7d71a1e27fd2e4efde587fd368662814d6ec1d9ce0" + +[[package]] +name = "event-listener" +version = "5.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3492acde4c3fc54c845eaab3eed8bd00c7a7d881f78bfc801e43a93dec1331ae" +dependencies = [ + "concurrent-queue", + "parking", + "pin-project-lite", +] + +[[package]] +name = "event-listener-strategy" +version = "0.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3c3e4e0dd3673c1139bf041f3008816d9cf2946bbfac2945c09e523b8d7b05b2" +dependencies = [ + "event-listener 5.4.0", + "pin-project-lite", +] + [[package]] name = "fallible-iterator" version = "0.3.0" @@ -1026,6 +1226,19 @@ version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9e5c1b78ca4aae1ac06c48a526a655760685149f0d465d21f37abfe57ce075c6" +[[package]] +name = "futures-lite" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cef40d21ae2c515b51041df9ed313ed21e572df340ea58a922a0aefe7e8891a1" +dependencies = [ + "fastrand", + "futures-core", + "futures-io", + "parking", + "pin-project-lite", +] + [[package]] name = "futures-macro" version = "0.3.31" @@ -1127,6 +1340,18 @@ version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a8d1add55171497b4705a648c6b583acafb01d58050a51727785f0b2c8e0a2b2" +[[package]] +name = "gloo-timers" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbb143cf96099802033e0d4f4963b19fd2e0b728bcf076cd9cf7f6634f092994" +dependencies = [ + "futures-channel", + "futures-core", + "js-sys", + "wasm-bindgen", +] + [[package]] name = "h2" version = "0.4.8" @@ -1188,6 +1413,12 @@ version = "0.3.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d231dfb89cfffdbc30e7fc41579ed6066ad03abda9e567ccafae602b97ec5024" +[[package]] +name = "hermit-abi" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fbf6a919d6cf397374f7dfeeea91d974c7c0a7221d0d0f4f20d859d329e53fcc" + [[package]] name = "hex" version = "0.4.3" @@ -1593,6 +1824,15 @@ dependencies = [ "cpufeatures", ] +[[package]] +name = "kv-log-macro" +version = "1.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0de8b303297635ad57c9f5059fd9cee7a47f8e8daa09df0fcd07dd39fb22977f" +dependencies = [ + "log", +] + [[package]] name = "lalrpop" version = "0.20.2" @@ -1703,6 +1943,9 @@ name = "log" version = "0.4.26" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "30bde2b3dc3671ae49d8e2e9f044c7c005836e7a023ee57cffa25ab82764bb9e" +dependencies = [ + "value-bag", +] [[package]] name = "logos" @@ -2003,6 +2246,7 @@ dependencies = [ "tokio", "tokio-stream", "tonic", + "tower-http 0.6.2", "tracing", "url", "winterfell", @@ -2060,6 +2304,7 @@ dependencies = [ "tokio", "tokio-stream", "tonic", + "tower-http 0.6.2", "tracing", "url", ] @@ -2409,7 +2654,7 @@ version = "1.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4161fcb6d602d4d2081af7c3a45852d875a03dd337a6bfdd6e06407b61342a43" dependencies = [ - "hermit-abi", + "hermit-abi 0.3.9", "libc", ] @@ -2531,6 +2776,7 @@ version = "0.28.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "84dfad6042089c7fc1f6118b7040dc2eb4ab520abbf410b79dc481032af39570" dependencies = [ + "async-std", "async-trait", "futures-channel", "futures-executor", @@ -2558,6 +2804,12 @@ version = "4.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1036865bb9422d3300cf723f657c2851d0e9ab12567854b1f4eba3d77decf564" +[[package]] +name = "parking" +version = "2.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f38d5652c16fde515bb1ecef450ab0f6a219d619a7274976324d5e377f7dceba" + [[package]] name = "parking_lot" version = "0.12.3" @@ -2686,12 +2938,38 @@ version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" +[[package]] +name = "piper" +version = "0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "96c8c490f422ef9a4efd2cb5b42b76c8613d7e7dfc1caf667b8a3350a5acc066" +dependencies = [ + "atomic-waker", + "fastrand", + "futures-io", +] + [[package]] name = "pkg-config" version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "953ec861398dccce10c670dfeaf3ec4911ca479e9c02154b3a215178c5f566f2" +[[package]] +name = "polling" +version = "3.7.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a604568c3202727d1507653cb121dbd627a58684eb09a820fd746bee38b4442f" +dependencies = [ + "cfg-if", + "concurrent-queue", + "hermit-abi 0.4.0", + "pin-project-lite", + "rustix", + "tracing", + "windows-sys 0.59.0", +] + [[package]] name = "powerfmt" version = "0.2.0" @@ -3350,6 +3628,15 @@ version = "1.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64" +[[package]] +name = "signal-hook-registry" +version = "1.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a9e9e0b4211b72e7b8b6e85c807d36c212bdb33ea8587f7569562a84df5465b1" +dependencies = [ + "libc", +] + [[package]] name = "siphasher" version = "1.0.1" @@ -3890,6 +4177,7 @@ dependencies = [ "http 1.2.0", "http-body", "pin-project-lite", + "tower 0.5.2", "tower-layer", "tower-service", "tracing", @@ -4140,6 +4428,12 @@ version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ba73ea9cf16a25df0c8caa16c51acb937d5712a8429db78a3ee29d5dcacd3a65" +[[package]] +name = "value-bag" +version = "1.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3ef4c4aa54d5d05a279399bfa921ec387b7aba77caf7a682ae8d86785b8fdad2" + [[package]] name = "vcpkg" version = "0.2.15" @@ -4270,6 +4564,19 @@ dependencies = [ "wasm-bindgen-shared", ] +[[package]] +name = "wasm-bindgen-futures" +version = "0.4.50" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "555d470ec0bc3bb57890405e5d4322cc9ea83cebb085523ced7be4144dac1e61" +dependencies = [ + "cfg-if", + "js-sys", + "once_cell", + "wasm-bindgen", + "web-sys", +] + [[package]] name = "wasm-bindgen-macro" version = "0.2.100" @@ -4302,6 +4609,16 @@ dependencies = [ "unicode-ident", ] +[[package]] +name = "web-sys" +version = "0.3.77" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "33b6dd2ef9186f1f2072e409e99cd22a975331a6b3591b12c764e0e55c60d5d2" +dependencies = [ + "js-sys", + "wasm-bindgen", +] + [[package]] name = "web-time" version = "1.1.0" diff --git a/Cargo.toml b/Cargo.toml index 7d8563b0e..cf817fdb8 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -26,6 +26,7 @@ version = "0.8.0" [workspace.dependencies] assert_matches = { version = "1.5" } +http = { version = "1.2" } itertools = { version = "0.14" } miden-air = { version = "0.12" } miden-lib = { git = "https://github.com/0xPolygonMiden/miden-base", branch = "next" } @@ -44,6 +45,8 @@ thiserror = { version = "2.0", default-features = false } tokio = { version = "1.40", features = ["rt-multi-thread"] } tokio-stream = { version = "0.1" } tonic = { version = "0.12" } +tower = { version = "0.5" } +tower-http = { version = "0.6", features = ["trace"] } tracing = { version = "0.1" } tracing-subscriber = { version = "0.3", features = ["env-filter", "fmt", "json"] } url = { version = "2.5", features = ["serde"] } diff --git a/bin/faucet/Cargo.toml b/bin/faucet/Cargo.toml index a1b66b565..28ef9d438 100644 --- a/bin/faucet/Cargo.toml +++ b/bin/faucet/Cargo.toml @@ -34,8 +34,8 @@ thiserror = { workspace = true } tokio = { workspace = true, features = ["fs"] } toml = { version = "0.8" } tonic = { workspace = true } -tower = "0.5" -tower-http = { version = "0.6", features = ["cors", "set-header", "trace"] } +tower = { workspace = true } +tower-http = { workspace = true, features = ["cors", "set-header", "trace"] } tracing = { workspace = true } url = { workspace = true } diff --git a/crates/block-producer/Cargo.toml b/crates/block-producer/Cargo.toml index 6c354019c..f6d0789b6 100644 --- a/crates/block-producer/Cargo.toml +++ b/crates/block-producer/Cargo.toml @@ -34,7 +34,8 @@ serde = { version = "1.0", features = ["derive"] } thiserror = { workspace = true } tokio = { workspace = true, features = ["macros", "net", "rt-multi-thread", "sync", "time"] } tokio-stream = { workspace = true, features = ["net"] } -tonic = { workspace = true } +tonic = { workspace = true, features = ["transport"] } +tower-http = { workspace = true, features = ["util"] } tracing = { workspace = true } url = { workspace = true } diff --git a/crates/block-producer/src/server.rs b/crates/block-producer/src/server.rs index 251d0b339..e3dc923e4 100644 --- a/crates/block-producer/src/server.rs +++ b/crates/block-producer/src/server.rs @@ -7,7 +7,7 @@ use miden_node_proto::generated::{ use miden_node_utils::{ errors::ApiError, formatting::{format_input_notes, format_output_notes}, - tracing::grpc::OtelInterceptor, + tracing::grpc::{block_producer_trace_fn, OtelInterceptor}, }; use miden_objects::{ block::BlockNumber, transaction::ProvenTransaction, utils::serde::Deserializable, @@ -15,6 +15,7 @@ use miden_objects::{ use tokio::{net::TcpListener, sync::Mutex}; use tokio_stream::wrappers::TcpListenerStream; use tonic::Status; +use tower_http::trace::TraceLayer; use tracing::{debug, info, instrument}; use crate::{ @@ -211,8 +212,9 @@ impl BlockProducerRpcServer { } async fn serve(self, listener: TcpListener) -> Result<(), tonic::transport::Error> { + // Build the gRPC server with the API service and trace layer. tonic::transport::Server::builder() - .trace_fn(miden_node_utils::tracing::grpc::block_producer_trace_fn) + .layer(TraceLayer::new_for_grpc().make_span_with(block_producer_trace_fn)) .add_service(api_server::ApiServer::new(self)) .serve_with_incoming(TcpListenerStream::new(listener)) .await diff --git a/crates/store/Cargo.toml b/crates/store/Cargo.toml index adc902a73..3ce5c5de0 100644 --- a/crates/store/Cargo.toml +++ b/crates/store/Cargo.toml @@ -28,6 +28,7 @@ thiserror = { workspace = true } tokio = { workspace = true, features = ["fs", "macros", "net", "rt-multi-thread"] } tokio-stream = { workspace = true, features = ["net"] } tonic = { workspace = true } +tower-http = { workspace = true, features = ["util"] } tracing = { workspace = true } url = { workspace = true } diff --git a/crates/store/src/server/mod.rs b/crates/store/src/server/mod.rs index 2b65a1dc2..a3c1f009d 100644 --- a/crates/store/src/server/mod.rs +++ b/crates/store/src/server/mod.rs @@ -1,9 +1,10 @@ use std::sync::Arc; use miden_node_proto::generated::store::api_server; -use miden_node_utils::errors::ApiError; +use miden_node_utils::{errors::ApiError, tracing::grpc::store_trace_fn}; use tokio::net::TcpListener; use tokio_stream::wrappers::TcpListenerStream; +use tower_http::trace::TraceLayer; use tracing::info; use crate::{blocks::BlockStore, config::StoreConfig, db::Db, state::State, COMPONENT}; @@ -61,8 +62,9 @@ impl Store { /// /// Note: this blocks until the server dies. pub async fn serve(self) -> Result<(), ApiError> { + // Build the gRPC server with the API service and trace layer. tonic::transport::Server::builder() - .trace_fn(miden_node_utils::tracing::grpc::store_trace_fn) + .layer(TraceLayer::new_for_grpc().make_span_with(store_trace_fn)) .add_service(self.api_service) .serve_with_incoming(TcpListenerStream::new(self.listener)) .await diff --git a/crates/utils/Cargo.toml b/crates/utils/Cargo.toml index 73995dca5..c35bfd41a 100644 --- a/crates/utils/Cargo.toml +++ b/crates/utils/Cargo.toml @@ -21,12 +21,12 @@ vergen = ["dep:vergen", "dep:vergen-gitcl"] [dependencies] anyhow = { version = "1.0" } figment = { version = "0.10", features = ["env", "toml"] } -http = { version = "1.2" } +http = { workspace = true } itertools = { workspace = true } miden-objects = { workspace = true } opentelemetry = { version = "0.28" } opentelemetry-otlp = { version = "0.28", default-features = false, features = ["grpc-tonic", "tls-roots", "trace"] } -opentelemetry_sdk = { version = "0.28", features = ["rt-tokio"] } +opentelemetry_sdk = { version = "0.28", features = ["rt-tokio", "testing"] } rand = { workspace = true } serde = { version = "1.0", features = ["derive"] } thiserror = { workspace = true } @@ -35,6 +35,7 @@ tracing = { workspace = true } tracing-forest = { version = "0.1", optional = true, features = ["chrono"] } tracing-opentelemetry = { version = "0.29" } tracing-subscriber = { workspace = true } + # Optional dependencies enabled by `vergen` feature. # This must match the version expected by `vergen-gitcl`. vergen = { "version" = "9.0", optional = true } diff --git a/crates/utils/src/logging.rs b/crates/utils/src/logging.rs index a8a220ccd..cec4f6160 100644 --- a/crates/utils/src/logging.rs +++ b/crates/utils/src/logging.rs @@ -3,7 +3,7 @@ use std::str::FromStr; use anyhow::Result; use opentelemetry::trace::TracerProvider as _; use opentelemetry_otlp::WithTonicConfig; -use opentelemetry_sdk::propagation::TraceContextPropagator; +use opentelemetry_sdk::{propagation::TraceContextPropagator, trace::SpanExporter}; use tracing::subscriber::Subscriber; use tracing_opentelemetry::OpenTelemetryLayer; use tracing_subscriber::{ @@ -39,7 +39,17 @@ pub fn setup_tracing(otel: OpenTelemetry) -> Result<()> { // Note: open-telemetry requires a tokio-runtime, so this _must_ be lazily evaluated (aka not // `then_some`) to avoid crashing sync callers (with OpenTelemetry::Disabled set). Examples of // such callers are tests with logging enabled. - let otel_layer = otel.is_enabled().then(open_telemetry_layer); + let otel_layer = { + if otel.is_enabled() { + let exporter = opentelemetry_otlp::SpanExporter::builder() + .with_tonic() + .with_tls_config(tonic::transport::ClientTlsConfig::new().with_native_roots()) + .build()?; + Some(open_telemetry_layer(exporter)) + } else { + None + } + }; let subscriber = Registry::default() .with(stdout_layer().with_filter(env_or_default_filter())) @@ -47,17 +57,13 @@ pub fn setup_tracing(otel: OpenTelemetry) -> Result<()> { tracing::subscriber::set_global_default(subscriber).map_err(Into::into) } -fn open_telemetry_layer() -> Box + Send + Sync + 'static> +fn open_telemetry_layer( + exporter: impl SpanExporter + 'static, +) -> Box + Send + Sync + 'static> where S: Subscriber + Sync + Send, for<'a> S: tracing_subscriber::registry::LookupSpan<'a>, { - let exporter = opentelemetry_otlp::SpanExporter::builder() - .with_tonic() - .with_tls_config(tonic::transport::ClientTlsConfig::new().with_native_roots()) - .build() - .unwrap(); - let tracer = opentelemetry_sdk::trace::SdkTracerProvider::builder() .with_batch_exporter(exporter) .build(); diff --git a/crates/utils/src/tracing/grpc.rs b/crates/utils/src/tracing/grpc.rs index 1e951af12..6a5bb9508 100644 --- a/crates/utils/src/tracing/grpc.rs +++ b/crates/utils/src/tracing/grpc.rs @@ -1,4 +1,13 @@ -use tracing_opentelemetry::OpenTelemetrySpanExt; +/// Creates a [`tracing::Span`] based on RPC service and method name. +macro_rules! rpc_span { + ($service:expr, $method:expr) => { + tracing::info_span!( + concat!($service, "/", $method), + rpc.service = $service, + rpc.method = $method + ) + }; +} /// A [`trace_fn`](tonic::transport::server::Server) implementation for the block producer which /// adds open-telemetry information to the span. @@ -6,11 +15,11 @@ use tracing_opentelemetry::OpenTelemetrySpanExt; /// Creates an `info` span following the open-telemetry standard: `block-producer.rpc/{method}`. /// Additionally also pulls in remote tracing context which allows the server trace to be connected /// to the client's origin trace. -pub fn block_producer_trace_fn(request: &http::Request<()>) -> tracing::Span { +pub fn block_producer_trace_fn(request: &http::Request) -> tracing::Span { let span = if let Some("SubmitProvenTransaction") = request.uri().path().rsplit('/').next() { - tracing::info_span!("block-producer.rpc/SubmitProvenTransaction") + rpc_span!("block-producer.rpc", "SubmitProvenTransaction") } else { - tracing::info_span!("block-producer.rpc/Unknown") + rpc_span!("block-producer.rpc", "Unknown") }; add_otel_span_attributes(span, request) @@ -22,23 +31,23 @@ pub fn block_producer_trace_fn(request: &http::Request<()>) -> tracing::Span { /// Creates an `info` span following the open-telemetry standard: `store.rpc/{method}`. Additionally /// also pulls in remote tracing context which allows the server trace to be connected to the /// client's origin trace. -pub fn store_trace_fn(request: &http::Request<()>) -> tracing::Span { +pub fn store_trace_fn(request: &http::Request) -> tracing::Span { let span = match request.uri().path().rsplit('/').next() { - Some("ApplyBlock") => tracing::info_span!("store.rpc/ApplyBlock"), - Some("CheckNullifiers") => tracing::info_span!("store.rpc/CheckNullifiers"), - Some("CheckNullifiersByPrefix") => tracing::info_span!("store.rpc/CheckNullifiersByPrefix"), - Some("GetAccountDetails") => tracing::info_span!("store.rpc/GetAccountDetails"), - Some("GetAccountProofs") => tracing::info_span!("store.rpc/GetAccountProofs"), - Some("GetAccountStateDelta") => tracing::info_span!("store.rpc/GetAccountStateDelta"), - Some("GetBlockByNumber") => tracing::info_span!("store.rpc/GetBlockByNumber"), - Some("GetBlockHeaderByNumber") => tracing::info_span!("store.rpc/GetBlockHeaderByNumber"), - Some("GetBlockInputs") => tracing::info_span!("store.rpc/GetBlockInputs"), - Some("GetBatchInputs") => tracing::info_span!("store.rpc/GetBatchInputs"), - Some("GetNotesById") => tracing::info_span!("store.rpc/GetNotesById"), - Some("GetTransactionInputs") => tracing::info_span!("store.rpc/GetTransactionInputs"), - Some("SyncNotes") => tracing::info_span!("store.rpc/SyncNotes"), - Some("SyncState") => tracing::info_span!("store.rpc/SyncState"), - _ => tracing::info_span!("store.rpc/Unknown"), + Some("ApplyBlock") => rpc_span!("store.rpc", "ApplyBlock"), + Some("CheckNullifiers") => rpc_span!("store.rpc", "CheckNullifiers"), + Some("CheckNullifiersByPrefix") => rpc_span!("store.rpc", "CheckNullifiersByPrefix"), + Some("GetAccountDetails") => rpc_span!("store.rpc", "GetAccountDetails"), + Some("GetAccountProofs") => rpc_span!("store.rpc", "GetAccountProofs"), + Some("GetAccountStateDelta") => rpc_span!("store.rpc", "GetAccountStateDelta"), + Some("GetBlockByNumber") => rpc_span!("store.rpc", "GetBlockByNumber"), + Some("GetBlockHeaderByNumber") => rpc_span!("store.rpc", "GetBlockHeaderByNumber"), + Some("GetBlockInputs") => rpc_span!("store.rpc", "GetBlockInputs"), + Some("GetBatchInputs") => rpc_span!("store.rpc", "GetBatchInputs"), + Some("GetNotesById") => rpc_span!("store.rpc", "GetNotesById"), + Some("GetTransactionInputs") => rpc_span!("store.rpc", "GetTransactionInputs"), + Some("SyncNotes") => rpc_span!("store.rpc", "SyncNotes"), + Some("SyncState") => rpc_span!("store.rpc", "SyncState"), + _ => rpc_span!("store.rpc", "Unknown"), }; add_otel_span_attributes(span, request) @@ -47,19 +56,44 @@ pub fn store_trace_fn(request: &http::Request<()>) -> tracing::Span { /// Adds remote tracing context to the span. /// /// Could be expanded in the future by adding in more open-telemetry properties. -fn add_otel_span_attributes(span: tracing::Span, request: &http::Request<()>) -> tracing::Span { +fn add_otel_span_attributes(span: tracing::Span, request: &http::Request) -> tracing::Span { + use super::OpenTelemetrySpanExt; // Pull the open-telemetry parent context using the HTTP extractor. We could make a more // generic gRPC extractor by utilising the gRPC metadata. However that // (a) requires cloning headers, // (b) we would have to write this ourselves, and // (c) gRPC metadata is transferred using HTTP headers in any case. - use tracing_opentelemetry::OpenTelemetrySpanExt; let otel_ctx = opentelemetry::global::get_text_map_propagator(|propagator| { propagator.extract(&MetadataExtractor(&tonic::metadata::MetadataMap::from_headers( request.headers().clone(), ))) }); - span.set_parent(otel_ctx); + tracing_opentelemetry::OpenTelemetrySpanExt::set_parent(&span, otel_ctx); + + // Set HTTP attributes. + // See https://opentelemetry.io/docs/specs/semconv/rpc/rpc-spans/#server-attributes. + span.set_attribute("rpc.system", "grpc"); + if let Some(host) = request.uri().host() { + span.set_attribute("server.address", host); + } + if let Some(host_port) = request.uri().port() { + span.set_attribute("server.port", host_port.as_u16()); + } + let remote_addr = request + .extensions() + .get::() + .and_then(tonic::transport::server::TcpConnectInfo::remote_addr); + if let Some(addr) = remote_addr { + span.set_attribute("client.address", addr.ip()); + span.set_attribute("client.port", addr.port()); + span.set_attribute("network.peer.address", addr.ip()); + span.set_attribute("network.peer.port", addr.port()); + span.set_attribute("network.transport", "tcp"); + match addr.ip() { + std::net::IpAddr::V4(_) => span.set_attribute("network.type", "ipv4"), + std::net::IpAddr::V6(_) => span.set_attribute("network.type", "ipv6"), + } + } span } @@ -73,6 +107,7 @@ impl tonic::service::Interceptor for OtelInterceptor { &mut self, mut request: tonic::Request<()>, ) -> Result, tonic::Status> { + use tracing_opentelemetry::OpenTelemetrySpanExt; let ctx = tracing::Span::current().context(); opentelemetry::global::get_text_map_propagator(|propagator| { propagator.inject_context(&ctx, &mut MetadataInjector(request.metadata_mut())); diff --git a/crates/utils/src/tracing/span_ext.rs b/crates/utils/src/tracing/span_ext.rs index a21150ac0..4aed6c7a8 100644 --- a/crates/utils/src/tracing/span_ext.rs +++ b/crates/utils/src/tracing/span_ext.rs @@ -1,4 +1,5 @@ use core::time::Duration; +use std::net::IpAddr; use miden_objects::{block::BlockNumber, Digest}; use opentelemetry::{trace::Status, Key, Value}; @@ -20,29 +21,53 @@ impl ToValue for Digest { } } -impl ToValue for f64 { - fn to_value(&self) -> Value { - (*self).into() - } -} - impl ToValue for BlockNumber { fn to_value(&self) -> Value { i64::from(self.as_u32()).into() } } -impl ToValue for u32 { - fn to_value(&self) -> Value { - i64::from(*self).into() - } +/// Generates `impl ToValue` blocks for types that are `ToString`. +macro_rules! impl_to_string_to_value { + ($($t:ty),*) => { + $( + impl ToValue for $t { + fn to_value(&self) -> Value { + self.to_string().into() + } + } + )* + }; } +impl_to_string_to_value!(IpAddr, &str); -impl ToValue for i64 { - fn to_value(&self) -> Value { - (*self).into() - } +/// Generates `impl ToValue` blocks for integer types. +macro_rules! impl_int_to_value { + ($($t:ty),*) => { + $( + impl ToValue for $t { + fn to_value(&self) -> Value { + i64::from(*self).into() + } + } + )* + }; +} +impl_int_to_value!(u16, u32); + +/// Generates `impl ToValue` blocks for types that are `Into`. +macro_rules! impl_to_value { + ($($t:ty),*) => { + $( + impl ToValue for $t { + fn to_value(&self) -> Value { + (*self).into() + } + } + )* + }; } +impl_to_value!(f64, i64); /// Utility functions based on [`tracing_opentelemetry::OpenTelemetrySpanExt`]. /// diff --git a/docs/operator.md b/docs/operator.md index 286aa64eb..9be133afd 100644 --- a/docs/operator.md +++ b/docs/operator.md @@ -90,6 +90,7 @@ block_builder.build_block ┕━ mempool.revert_expired_transactions ┕━ mempool.revert_transactions ``` + #### Batch building @@ -143,7 +144,7 @@ The exporter can be configured using environment variables as specified in the o > [setup guide](https://docs.honeycomb.io/send-data/opentelemetry/#using-the-honeycomb-opentelemetry-endpoint). ```sh -OTEL_EXPORTER_OTLP_ENDPOINT=api.honeycomb.io:443 \ +OTEL_EXPORTER_OTLP_ENDPOINT=https://api.honeycomb.io:443 \ OTEL_EXPORTER_OTLP_HEADERS="x-honeycomb-team=your-api-key" \ miden-node start --open-telemetry node ``` From f9ab405cb2f78781187183f7bf388b7ec575bc15 Mon Sep 17 00:00:00 2001 From: Tomas Date: Thu, 6 Mar 2025 12:55:14 -0300 Subject: [PATCH 25/27] chore: upgrade to Rust 2024 edition (#727) --- .github/workflows/lint.yml | 11 ++--------- CHANGELOG.md | 2 ++ Cargo.toml | 4 ++-- README.md | 6 +++--- bin/faucet/src/client.rs | 8 ++++---- bin/faucet/src/errors.rs | 2 +- bin/faucet/src/handlers.rs | 4 ++-- bin/faucet/src/main.rs | 14 +++++++------- bin/faucet/src/state.rs | 2 +- bin/faucet/src/store.rs | 2 +- bin/node/src/commands/genesis/mod.rs | 18 ++++++++++++------ bin/node/src/commands/init.rs | 2 +- bin/node/src/config.rs | 2 +- bin/node/src/main.rs | 2 +- crates/block-producer/src/batch_builder/mod.rs | 10 +++++----- crates/block-producer/src/block_builder/mod.rs | 10 +++++----- .../block-producer/src/domain/transaction.rs | 2 +- crates/block-producer/src/errors.rs | 6 ++++-- .../block-producer/src/mempool/batch_graph.rs | 2 +- .../mempool/inflight_state/account_state.rs | 8 +++++--- .../src/mempool/inflight_state/mod.rs | 9 ++------- crates/block-producer/src/mempool/mod.rs | 6 +++--- crates/block-producer/src/mempool/tests.rs | 2 +- .../src/mempool/transaction_expiration.rs | 2 +- .../src/mempool/transaction_graph.rs | 2 +- crates/block-producer/src/server.rs | 4 ++-- crates/block-producer/src/store/mod.rs | 6 +++--- .../block-producer/src/test_utils/account.rs | 2 +- crates/block-producer/src/test_utils/batch.rs | 4 ++-- crates/block-producer/src/test_utils/block.rs | 2 +- crates/block-producer/src/test_utils/mod.rs | 6 +++--- crates/block-producer/src/test_utils/note.rs | 2 +- .../block-producer/src/test_utils/proven_tx.rs | 4 ++-- crates/block-producer/src/test_utils/store.rs | 2 +- crates/proto/src/domain/account.rs | 2 +- crates/proto/src/domain/block.rs | 2 +- crates/proto/src/domain/digest.rs | 2 +- crates/proto/src/domain/merkle.rs | 2 +- crates/proto/src/domain/note.rs | 2 +- crates/rpc/src/server/api.rs | 8 ++++---- crates/rpc/src/server/mod.rs | 2 +- crates/store/src/db/migrations.rs | 6 +++--- crates/store/src/db/mod.rs | 2 +- crates/store/src/db/settings.rs | 2 +- crates/store/src/db/sql/mod.rs | 14 +++++++------- crates/store/src/db/sql/utils.rs | 5 ++--- crates/store/src/db/tests.rs | 14 +++++++------- crates/store/src/errors.rs | 14 ++++++++++---- crates/store/src/genesis.rs | 4 ++-- crates/store/src/nullifier_tree.rs | 4 ++-- crates/store/src/server/api.rs | 4 ++-- crates/store/src/server/mod.rs | 2 +- crates/store/src/state.rs | 11 ++++------- crates/test-macro/src/lib.rs | 2 +- crates/utils/src/config.rs | 2 +- crates/utils/src/crypto.rs | 4 ++-- crates/utils/src/formatting.rs | 2 +- crates/utils/src/logging.rs | 4 ++-- crates/utils/src/tracing/grpc.rs | 2 +- crates/utils/src/tracing/span_ext.rs | 4 ++-- crates/utils/src/version/mod.rs | 4 ++-- rust-toolchain.toml | 2 +- 62 files changed, 150 insertions(+), 148 deletions(-) diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index 8425d4740..e4ac4540a 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -94,15 +94,8 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@main - - name: Install cargo-machete - uses: clechasseur/rs-cargo@v2 - with: - command: install - args: cargo-machete@0.7.0 - - name: Machete - uses: clechasseur/rs-cargo@v2 - with: - command: machete + - name: machete + uses: bnjbvr/cargo-machete@main proto: name: proto check diff --git a/CHANGELOG.md b/CHANGELOG.md index 81da63401..5dd7a791d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -21,6 +21,8 @@ - [BREAKING] Update `GetBlockInputs` RPC (#709). - [BREAKING] `CheckNullifiersByPrefix` now takes a starting block number (#707). - [BREAKING] Removed nullifiers from `SyncState` endpoint (#708). +- [BREAKING] Updated to Rust Edition 2024 (#727). +- [BREAKING] MSRV bumped to 1.85 (#727). ### Enhancements diff --git a/Cargo.toml b/Cargo.toml index cf817fdb8..3a56da92c 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -15,13 +15,13 @@ resolver = "2" [workspace.package] authors = ["Miden contributors"] -edition = "2021" +edition = "2024" exclude = [".github/"] homepage = "https://polygon.technology/polygon-miden" license = "MIT" readme = "README.md" repository = "https://github.com/0xPolygonMiden/miden-node" -rust-version = "1.84" +rust-version = "1.85" version = "0.8.0" [workspace.dependencies] diff --git a/README.md b/README.md index 251238a09..e0b8a8d22 100644 --- a/README.md +++ b/README.md @@ -2,7 +2,7 @@ [![LICENSE](https://img.shields.io/badge/license-MIT-blue.svg)](https://github.com/0xPolygonMiden/miden-node/blob/main/LICENSE) [![test](https://github.com/0xPolygonMiden/miden-node/actions/workflows/test.yml/badge.svg)](https://github.com/0xPolygonMiden/miden-node/actions/workflows/test.yml) -[![RUST_VERSION](https://img.shields.io/badge/rustc-1.84+-lightgray.svg)](https://www.rust-lang.org/tools/install) +[![RUST_VERSION](https://img.shields.io/badge/rustc-1.85+-lightgray.svg)](https://www.rust-lang.org/tools/install) [![crates.io](https://img.shields.io/crates/v/miden-node)](https://crates.io/crates/miden-node) This repository holds the Miden node; that is, the software which processes transactions and creates blocks for the @@ -48,7 +48,7 @@ source. ### Debian package Debian packages are available and are the fastest way to install the node on a Debian-based system. Both `amd64` and -`arm64` packages are available. +`arm64` packages are available. These packages can be found under our [releases](https://github.com/0xPolygonMiden/miden-node/releases) page along with a checksum. @@ -73,7 +73,7 @@ sudo dpkg -i $package_name.deb ### Install using `cargo` -Install Rust version **1.84** or greater using the official Rust installation +Install Rust version **1.85** or greater using the official Rust installation [instructions](https://www.rust-lang.org/tools/install). Depending on the platform, you may need to install additional libraries. For example, on Ubuntu 22.04 the following diff --git a/bin/faucet/src/client.rs b/bin/faucet/src/client.rs index cc3fa2618..bf738e420 100644 --- a/bin/faucet/src/client.rs +++ b/bin/faucet/src/client.rs @@ -9,6 +9,7 @@ use miden_node_proto::generated::{ rpc::api_client::ApiClient, }; use miden_objects::{ + Felt, account::{Account, AccountFile, AccountId, AuthSecretKey}, asset::FungibleAsset, block::{BlockHeader, BlockNumber}, @@ -20,17 +21,16 @@ use miden_objects::{ transaction::{ChainMmr, ExecutedTransaction, TransactionArgs, TransactionScript}, utils::Deserializable, vm::AdviceMap, - Felt, }; use miden_tx::{ - auth::BasicAuthenticator, utils::Serializable, LocalTransactionProver, ProvingOptions, - TransactionExecutor, TransactionProver, + LocalTransactionProver, ProvingOptions, TransactionExecutor, TransactionProver, + auth::BasicAuthenticator, utils::Serializable, }; use rand::{random, rngs::StdRng}; use tonic::transport::Channel; use tracing::info; -use crate::{config::FaucetConfig, errors::ClientError, store::FaucetDataStore, COMPONENT}; +use crate::{COMPONENT, config::FaucetConfig, errors::ClientError, store::FaucetDataStore}; pub const DISTRIBUTE_FUNGIBLE_ASSET_SCRIPT: &str = include_str!("transaction_scripts/distribute_fungible_asset.masm"); diff --git a/bin/faucet/src/errors.rs b/bin/faucet/src/errors.rs index 476f066aa..093b53616 100644 --- a/bin/faucet/src/errors.rs +++ b/bin/faucet/src/errors.rs @@ -1,7 +1,7 @@ use std::fmt::Debug; use axum::{ - http::{header, StatusCode}, + http::{StatusCode, header}, response::{IntoResponse, Response}, }; use miden_objects::AccountIdError; diff --git a/bin/faucet/src/handlers.rs b/bin/faucet/src/handlers.rs index 171e7f5a2..add62c2d9 100644 --- a/bin/faucet/src/handlers.rs +++ b/bin/faucet/src/handlers.rs @@ -1,9 +1,9 @@ use anyhow::Context; use axum::{ + Json, extract::State, http::{Response, StatusCode}, response::IntoResponse, - Json, }; use http::header; use http_body_util::Full; @@ -16,7 +16,7 @@ use serde::{Deserialize, Serialize}; use tonic::body; use tracing::info; -use crate::{errors::HandlerError, state::FaucetState, COMPONENT}; +use crate::{COMPONENT, errors::HandlerError, state::FaucetState}; #[derive(Deserialize)] pub struct FaucetRequest { diff --git a/bin/faucet/src/main.rs b/bin/faucet/src/main.rs index c6dce5eed..749c8eb1c 100644 --- a/bin/faucet/src/main.rs +++ b/bin/faucet/src/main.rs @@ -12,22 +12,22 @@ use std::path::PathBuf; use anyhow::Context; use axum::{ - routing::{get, post}, Router, + routing::{get, post}, }; use clap::{Parser, Subcommand}; use client::initialize_faucet_client; use handlers::{get_background, get_favicon, get_index_css, get_index_html, get_index_js}; use http::HeaderValue; -use miden_lib::{account::faucets::create_basic_fungible_faucet, AuthScheme}; +use miden_lib::{AuthScheme, account::faucets::create_basic_fungible_faucet}; use miden_node_utils::{ config::load_config, crypto::get_rpo_random_coin, logging::OpenTelemetry, version::LongVersion, }; use miden_objects::{ + Felt, account::{AccountFile, AccountStorageMode, AuthSecretKey}, asset::TokenSymbol, crypto::dsa::rpo_falcon512::SecretKey, - Felt, }; use rand::{Rng, SeedableRng}; use rand_chacha::ChaCha20Rng; @@ -38,7 +38,7 @@ use tower_http::{cors::CorsLayer, set_header::SetResponseHeaderLayer, trace::Tra use tracing::info; use crate::{ - config::{FaucetConfig, DEFAULT_FAUCET_ACCOUNT_PATH}, + config::{DEFAULT_FAUCET_ACCOUNT_PATH, FaucetConfig}, handlers::{get_metadata, get_tokens}, }; @@ -170,7 +170,7 @@ async fn run_faucet_command(cli: Cli) -> anyhow::Result<()> { let secret = SecretKey::with_rng(&mut get_rpo_random_coin(&mut rng)); let (account, account_seed) = create_basic_fungible_faucet( - rng.gen(), + rng.r#gen(), (&root_block_header).try_into().context("failed to create anchor block")?, TokenSymbol::try_from(token_symbol.as_str()) .context("failed to parse token symbol")?, @@ -249,10 +249,10 @@ mod test { }; use fantoccini::ClientBuilder; - use serde_json::{json, Map}; + use serde_json::{Map, json}; use url::Url; - use crate::{config::FaucetConfig, run_faucet_command, stub_rpc_api::serve_stub, Cli}; + use crate::{Cli, config::FaucetConfig, run_faucet_command, stub_rpc_api::serve_stub}; /// This test starts a stub node, a faucet connected to the stub node, and a chromedriver /// to test the faucet website. It then loads the website and checks that all the requests diff --git a/bin/faucet/src/state.rs b/bin/faucet/src/state.rs index 1c839228c..ce4cb0ed8 100644 --- a/bin/faucet/src/state.rs +++ b/bin/faucet/src/state.rs @@ -5,7 +5,7 @@ use static_files::Resource; use tokio::sync::Mutex; use tracing::info; -use crate::{client::FaucetClient, config::FaucetConfig, static_resources, COMPONENT}; +use crate::{COMPONENT, client::FaucetClient, config::FaucetConfig, static_resources}; // FAUCET STATE // ================================================================================================ diff --git a/bin/faucet/src/store.rs b/bin/faucet/src/store.rs index 1c61b7ebb..a46beb30d 100644 --- a/bin/faucet/src/store.rs +++ b/bin/faucet/src/store.rs @@ -1,11 +1,11 @@ use std::sync::Mutex; use miden_objects::{ + Word, account::{Account, AccountId}, block::{BlockHeader, BlockNumber}, note::NoteId, transaction::{ChainMmr, InputNotes, TransactionInputs}, - Word, }; use miden_tx::{DataStore, DataStoreError}; diff --git a/bin/node/src/commands/genesis/mod.rs b/bin/node/src/commands/genesis/mod.rs index a8e95087d..cb7a38ca0 100644 --- a/bin/node/src/commands/genesis/mod.rs +++ b/bin/node/src/commands/genesis/mod.rs @@ -3,16 +3,16 @@ use std::{ path::{Path, PathBuf}, }; -use anyhow::{anyhow, bail, Context, Result}; +use anyhow::{Context, Result, anyhow, bail}; pub use inputs::{AccountInput, AuthSchemeInput, GenesisInput}; -use miden_lib::{account::faucets::create_basic_fungible_faucet, AuthScheme}; +use miden_lib::{AuthScheme, account::faucets::create_basic_fungible_faucet}; use miden_node_store::genesis::GenesisState; use miden_node_utils::{config::load_config, crypto::get_rpo_random_coin}; use miden_objects::{ + Felt, ONE, account::{Account, AccountFile, AccountIdAnchor, AuthSecretKey}, asset::TokenSymbol, crypto::{dsa::rpo_falcon512::SecretKey, utils::Serializable}, - Felt, ONE, }; use rand::{Rng, SeedableRng}; use rand_chacha::ChaCha20Rng; @@ -45,7 +45,10 @@ pub fn make_genesis(inputs_path: &PathBuf, output_path: &PathBuf, force: bool) - if !force { if let Ok(file_exists) = output_path.try_exists() { if file_exists { - return Err(anyhow!("Failed to generate new genesis file {} because it already exists. Use the --force flag to overwrite.", output_path.display())); + return Err(anyhow!( + "Failed to generate new genesis file {} because it already exists. Use the --force flag to overwrite.", + output_path.display() + )); } } else { return Err(anyhow!("Failed to open {} file.", output_path.display())); @@ -118,7 +121,7 @@ fn create_accounts( let storage_mode = inputs.storage_mode.as_str().try_into()?; let (account, account_seed) = create_basic_fungible_faucet( - rng.gen(), + rng.r#gen(), AccountIdAnchor::PRE_GENESIS, TokenSymbol::try_from(inputs.token_symbol.as_str())?, inputs.decimals, @@ -142,7 +145,10 @@ fn create_accounts( let path = accounts_path.as_ref().join(format!("{name}.mac")); if !force && matches!(path.try_exists(), Ok(true)) { - bail!("Failed to generate account file {} because it already exists. Use the --force flag to overwrite.", path.display()); + bail!( + "Failed to generate account file {} because it already exists. Use the --force flag to overwrite.", + path.display() + ); } account_data.account.set_nonce(ONE)?; diff --git a/bin/node/src/commands/init.rs b/bin/node/src/commands/init.rs index 7b36a70ff..2e518fff9 100644 --- a/bin/node/src/commands/init.rs +++ b/bin/node/src/commands/init.rs @@ -1,6 +1,6 @@ use std::{fs::File, io::Write, path::Path}; -use anyhow::{anyhow, Result}; +use anyhow::{Result, anyhow}; use crate::{commands::genesis::GenesisInput, config::NodeConfig}; diff --git a/bin/node/src/config.rs b/bin/node/src/config.rs index 4fef3981f..c65275bd2 100644 --- a/bin/node/src/config.rs +++ b/bin/node/src/config.rs @@ -79,8 +79,8 @@ mod tests { use super::NodeConfig; use crate::{ - config::{NormalizedBlockProducerConfig, NormalizedRpcConfig}, NODE_CONFIG_FILE_PATH, + config::{NormalizedBlockProducerConfig, NormalizedRpcConfig}, }; #[test] diff --git a/bin/node/src/main.rs b/bin/node/src/main.rs index 2845b9395..04eb1ee2a 100644 --- a/bin/node/src/main.rs +++ b/bin/node/src/main.rs @@ -4,7 +4,7 @@ use std::path::PathBuf; -use anyhow::{anyhow, Context}; +use anyhow::{Context, anyhow}; use clap::{Parser, Subcommand}; use commands::{init::init_config_files, start::start_node}; use miden_node_block_producer::server::BlockProducer; diff --git a/crates/block-producer/src/batch_builder/mod.rs b/crates/block-producer/src/batch_builder/mod.rs index 550dcb736..af74d719f 100644 --- a/crates/block-producer/src/batch_builder/mod.rs +++ b/crates/block-producer/src/batch_builder/mod.rs @@ -3,17 +3,17 @@ use std::{num::NonZeroUsize, ops::Range, time::Duration}; use miden_node_proto::domain::batch::BatchInputs; use miden_node_utils::formatting::format_array; use miden_objects::{ - batch::{BatchId, ProposedBatch, ProvenBatch}, MIN_PROOF_SECURITY_LEVEL, + batch::{BatchId, ProposedBatch, ProvenBatch}, }; use miden_tx_batch_prover::LocalBatchProver; use rand::Rng; use tokio::{task::JoinSet, time}; -use tracing::{debug, info, instrument, Span}; +use tracing::{Span, debug, info, instrument}; use crate::{ - domain::transaction::AuthenticatedTransaction, errors::BuildBatchError, mempool::SharedMempool, - store::StoreClient, COMPONENT, SERVER_BUILD_BATCH_FREQUENCY, + COMPONENT, SERVER_BUILD_BATCH_FREQUENCY, domain::transaction::AuthenticatedTransaction, + errors::BuildBatchError, mempool::SharedMempool, store::StoreClient, }; // BATCH BUILDER @@ -212,7 +212,7 @@ impl WorkerPool { // Randomly fail batches at the configured rate. // // Note: Rng::gen rolls between [0, 1.0) for f32, so this works as expected. - let failed = rand::thread_rng().gen::() < self.failure_rate; + let failed = rand::thread_rng().r#gen::() < self.failure_rate; let store = self.store.clone(); async move { diff --git a/crates/block-producer/src/block_builder/mod.rs b/crates/block-producer/src/block_builder/mod.rs index 9a24f4aa4..812504c40 100644 --- a/crates/block-producer/src/block_builder/mod.rs +++ b/crates/block-producer/src/block_builder/mod.rs @@ -4,18 +4,18 @@ use futures::FutureExt; use miden_block_prover::LocalBlockProver; use miden_node_utils::tracing::OpenTelemetrySpanExt; use miden_objects::{ + MIN_PROOF_SECURITY_LEVEL, batch::ProvenBatch, block::{BlockInputs, BlockNumber, ProposedBlock, ProvenBlock}, note::NoteHeader, - MIN_PROOF_SECURITY_LEVEL, }; use rand::Rng; use tokio::time::Duration; -use tracing::{instrument, Span}; +use tracing::{Span, instrument}; use crate::{ - errors::BuildBlockError, mempool::SharedMempool, store::StoreClient, COMPONENT, - SERVER_BLOCK_FREQUENCY, + COMPONENT, SERVER_BLOCK_FREQUENCY, errors::BuildBlockError, mempool::SharedMempool, + store::StoreClient, }; // BLOCK BUILDER @@ -234,7 +234,7 @@ impl BlockBuilder { #[instrument(target = COMPONENT, name = "block_builder.inject_failure", skip_all, err)] fn inject_failure(&self, value: T) -> Result { - let roll = rand::thread_rng().gen::(); + let roll = rand::thread_rng().r#gen::(); Span::current().set_attribute("failure_rate", self.failure_rate); Span::current().set_attribute("dice_roll", roll); diff --git a/crates/block-producer/src/domain/transaction.rs b/crates/block-producer/src/domain/transaction.rs index c249709ed..da9238841 100644 --- a/crates/block-producer/src/domain/transaction.rs +++ b/crates/block-producer/src/domain/transaction.rs @@ -1,11 +1,11 @@ use std::{collections::BTreeSet, sync::Arc}; use miden_objects::{ + Digest, account::AccountId, block::BlockNumber, note::{NoteId, Nullifier}, transaction::{ProvenTransaction, TransactionId, TxAccountUpdate}, - Digest, }; use crate::{errors::VerifyTxError, store::TransactionInputs}; diff --git a/crates/block-producer/src/errors.rs b/crates/block-producer/src/errors.rs index 6c9de3e37..05e779ad5 100644 --- a/crates/block-producer/src/errors.rs +++ b/crates/block-producer/src/errors.rs @@ -2,10 +2,10 @@ use miden_block_prover::ProvenBlockError; use miden_node_proto::errors::ConversionError; use miden_node_utils::formatting::format_opt; use miden_objects::{ + Digest, ProposedBatchError, ProposedBlockError, block::BlockNumber, note::{NoteId, Nullifier}, transaction::TransactionId, - Digest, ProposedBatchError, ProposedBlockError, }; use miden_tx_batch_prover::errors::ProvenBatchError; use thiserror::Error; @@ -80,7 +80,9 @@ pub enum AddTransactionError { #[error("transaction verification failed")] VerificationFailed(#[from] VerifyTxError), - #[error("transaction input data from block {input_block} is rejected as stale because it is older than the limit of {stale_limit}")] + #[error( + "transaction input data from block {input_block} is rejected as stale because it is older than the limit of {stale_limit}" + )] StaleInputs { input_block: BlockNumber, stale_limit: BlockNumber, diff --git a/crates/block-producer/src/mempool/batch_graph.rs b/crates/block-producer/src/mempool/batch_graph.rs index 256408c1f..7a3166a34 100644 --- a/crates/block-producer/src/mempool/batch_graph.rs +++ b/crates/block-producer/src/mempool/batch_graph.rs @@ -7,8 +7,8 @@ use miden_objects::{ }; use super::{ - graph::{DependencyGraph, GraphError}, BlockBudget, BudgetStatus, + graph::{DependencyGraph, GraphError}, }; // BATCH GRAPH diff --git a/crates/block-producer/src/mempool/inflight_state/account_state.rs b/crates/block-producer/src/mempool/inflight_state/account_state.rs index 560241185..a96f5c8ca 100644 --- a/crates/block-producer/src/mempool/inflight_state/account_state.rs +++ b/crates/block-producer/src/mempool/inflight_state/account_state.rs @@ -1,6 +1,6 @@ use std::collections::VecDeque; -use miden_objects::{transaction::TransactionId, Digest}; +use miden_objects::{Digest, transaction::TransactionId}; // IN-FLIGHT ACCOUNT STATE // ================================================================================================ @@ -55,7 +55,8 @@ impl InflightAccountState { pub fn revert(&mut self, n: usize) -> AccountStatus { let uncommitted = self.uncommitted_count(); assert!( - uncommitted >= n, "Attempted to revert {n} transactions which is more than the {uncommitted} which are uncommitted.", + uncommitted >= n, + "Attempted to revert {n} transactions which is more than the {uncommitted} which are uncommitted.", ); self.states.drain(self.states.len() - n..); @@ -71,7 +72,8 @@ impl InflightAccountState { pub fn commit(&mut self, n: usize) { let uncommitted = self.uncommitted_count(); assert!( - uncommitted >= n, "Attempted to revert {n} transactions which is more than the {uncommitted} which are uncommitted." + uncommitted >= n, + "Attempted to revert {n} transactions which is more than the {uncommitted} which are uncommitted." ); self.committed += n; diff --git a/crates/block-producer/src/mempool/inflight_state/mod.rs b/crates/block-producer/src/mempool/inflight_state/mod.rs index e9945aca8..95dce1f21 100644 --- a/crates/block-producer/src/mempool/inflight_state/mod.rs +++ b/crates/block-producer/src/mempool/inflight_state/mod.rs @@ -353,11 +353,7 @@ impl OutputNoteState { /// Returns the source transaction ID if the output note is not yet committed. fn transaction(&self) -> Option<&TransactionId> { - if let Self::Inflight(tx) = self { - Some(tx) - } else { - None - } + if let Self::Inflight(tx) = self { Some(tx) } else { None } } } @@ -371,9 +367,8 @@ mod tests { use super::*; use crate::test_utils::{ - mock_account_id, + MockProvenTxBuilder, mock_account_id, note::{mock_note, mock_output_note}, - MockProvenTxBuilder, }; #[test] diff --git a/crates/block-producer/src/mempool/mod.rs b/crates/block-producer/src/mempool/mod.rs index a4fca1614..b1c157f16 100644 --- a/crates/block-producer/src/mempool/mod.rs +++ b/crates/block-producer/src/mempool/mod.rs @@ -4,10 +4,10 @@ use batch_graph::BatchGraph; use graph::GraphError; use inflight_state::InflightState; use miden_objects::{ + MAX_ACCOUNTS_PER_BATCH, MAX_INPUT_NOTES_PER_BATCH, MAX_OUTPUT_NOTES_PER_BATCH, batch::{BatchId, ProvenBatch}, block::BlockNumber, transaction::TransactionId, - MAX_ACCOUNTS_PER_BATCH, MAX_INPUT_NOTES_PER_BATCH, MAX_OUTPUT_NOTES_PER_BATCH, }; use tokio::sync::{Mutex, MutexGuard}; use tracing::instrument; @@ -15,8 +15,8 @@ use transaction_expiration::TransactionExpirations; use transaction_graph::TransactionGraph; use crate::{ - domain::transaction::AuthenticatedTransaction, errors::AddTransactionError, COMPONENT, - SERVER_MAX_BATCHES_PER_BLOCK, SERVER_MAX_TXS_PER_BATCH, + COMPONENT, SERVER_MAX_BATCHES_PER_BLOCK, SERVER_MAX_TXS_PER_BATCH, + domain::transaction::AuthenticatedTransaction, errors::AddTransactionError, }; mod batch_graph; diff --git a/crates/block-producer/src/mempool/tests.rs b/crates/block-producer/src/mempool/tests.rs index 8c81286bd..6a1467287 100644 --- a/crates/block-producer/src/mempool/tests.rs +++ b/crates/block-producer/src/mempool/tests.rs @@ -2,7 +2,7 @@ use miden_objects::block::BlockNumber; use pretty_assertions::assert_eq; use super::*; -use crate::test_utils::{batch::TransactionBatchConstructor, MockProvenTxBuilder}; +use crate::test_utils::{MockProvenTxBuilder, batch::TransactionBatchConstructor}; impl Mempool { fn for_tests() -> Self { diff --git a/crates/block-producer/src/mempool/transaction_expiration.rs b/crates/block-producer/src/mempool/transaction_expiration.rs index e806a415c..1bc61aa11 100644 --- a/crates/block-producer/src/mempool/transaction_expiration.rs +++ b/crates/block-producer/src/mempool/transaction_expiration.rs @@ -1,4 +1,4 @@ -use std::collections::{btree_map::Entry, BTreeMap, BTreeSet}; +use std::collections::{BTreeMap, BTreeSet, btree_map::Entry}; use miden_objects::{block::BlockNumber, transaction::TransactionId}; diff --git a/crates/block-producer/src/mempool/transaction_graph.rs b/crates/block-producer/src/mempool/transaction_graph.rs index ab1eb43bf..4f6e60e1b 100644 --- a/crates/block-producer/src/mempool/transaction_graph.rs +++ b/crates/block-producer/src/mempool/transaction_graph.rs @@ -3,8 +3,8 @@ use std::collections::BTreeSet; use miden_objects::transaction::TransactionId; use super::{ - graph::{DependencyGraph, GraphError}, BatchBudget, BudgetStatus, + graph::{DependencyGraph, GraphError}, }; use crate::domain::transaction::AuthenticatedTransaction; diff --git a/crates/block-producer/src/server.rs b/crates/block-producer/src/server.rs index e3dc923e4..2db3c9e1e 100644 --- a/crates/block-producer/src/server.rs +++ b/crates/block-producer/src/server.rs @@ -7,7 +7,7 @@ use miden_node_proto::generated::{ use miden_node_utils::{ errors::ApiError, formatting::{format_input_notes, format_output_notes}, - tracing::grpc::{block_producer_trace_fn, OtelInterceptor}, + tracing::grpc::{OtelInterceptor, block_producer_trace_fn}, }; use miden_objects::{ block::BlockNumber, transaction::ProvenTransaction, utils::serde::Deserializable, @@ -19,6 +19,7 @@ use tower_http::trace::TraceLayer; use tracing::{debug, info, instrument}; use crate::{ + COMPONENT, SERVER_MEMPOOL_EXPIRATION_SLACK, SERVER_MEMPOOL_STATE_RETENTION, batch_builder::BatchBuilder, block_builder::BlockBuilder, config::BlockProducerConfig, @@ -26,7 +27,6 @@ use crate::{ errors::{AddTransactionError, BlockProducerError, VerifyTxError}, mempool::{BatchBudget, BlockBudget, Mempool, SharedMempool}, store::StoreClient, - COMPONENT, SERVER_MEMPOOL_EXPIRATION_SLACK, SERVER_MEMPOOL_STATE_RETENTION, }; /// Represents an initialized block-producer component where the RPC connection is open, diff --git a/crates/block-producer/src/store/mod.rs b/crates/block-producer/src/store/mod.rs index 9d4ad4d40..ffd11b6e5 100644 --- a/crates/block-producer/src/store/mod.rs +++ b/crates/block-producer/src/store/mod.rs @@ -6,6 +6,7 @@ use std::{ use itertools::Itertools; use miden_node_proto::{ + AccountState, domain::batch::BatchInputs, errors::{ConversionError, MissingFieldHelper}, generated::{ @@ -17,22 +18,21 @@ use miden_node_proto::{ responses::{GetTransactionInputsResponse, NullifierTransactionInputRecord}, store::api_client as store_client, }, - AccountState, }; use miden_node_utils::{formatting::format_opt, tracing::grpc::OtelInterceptor}; use miden_objects::{ + Digest, account::AccountId, block::{BlockHeader, BlockInputs, BlockNumber, ProvenBlock}, note::{NoteId, Nullifier}, transaction::ProvenTransaction, utils::Serializable, - Digest, }; use miden_processor::crypto::RpoDigest; use tonic::{service::interceptor::InterceptedService, transport::Channel}; use tracing::{debug, info, instrument}; -use crate::{errors::StoreError, COMPONENT}; +use crate::{COMPONENT, errors::StoreError}; // TRANSACTION INPUTS // ================================================================================================ diff --git a/crates/block-producer/src/test_utils/account.rs b/crates/block-producer/src/test_utils/account.rs index 2247a39a3..a58d6d396 100644 --- a/crates/block-producer/src/test_utils/account.rs +++ b/crates/block-producer/src/test_utils/account.rs @@ -1,8 +1,8 @@ use std::{collections::HashMap, ops::Not, sync::LazyLock}; use miden_objects::{ - account::{AccountIdAnchor, AccountIdVersion, AccountStorageMode, AccountType}, Hasher, + account::{AccountIdAnchor, AccountIdVersion, AccountStorageMode, AccountType}, }; use super::*; diff --git a/crates/block-producer/src/test_utils/batch.rs b/crates/block-producer/src/test_utils/batch.rs index 8e4371a3f..c1e1a5f56 100644 --- a/crates/block-producer/src/test_utils/batch.rs +++ b/crates/block-producer/src/test_utils/batch.rs @@ -1,10 +1,10 @@ use std::collections::BTreeMap; use miden_objects::{ + Digest, batch::{BatchAccountUpdate, BatchId, ProvenBatch}, block::BlockNumber, transaction::{InputNotes, ProvenTransaction}, - Digest, }; use crate::test_utils::MockProvenTxBuilder; @@ -19,7 +19,7 @@ pub trait TransactionBatchConstructor { /// [`ProposedBatch`](miden_objects::batch::ProposedBatch) first and convert (without proving) /// or prove it into a [`ProvenBatch`]. fn mocked_from_transactions<'tx>(txs: impl IntoIterator) - -> Self; + -> Self; /// Returns a `TransactionBatch` with `notes_per_tx.len()` transactions, where the i'th /// transaction has `notes_per_tx[i]` notes created diff --git a/crates/block-producer/src/test_utils/block.rs b/crates/block-producer/src/test_utils/block.rs index 7fcf0bd4c..25b81fbe4 100644 --- a/crates/block-producer/src/test_utils/block.rs +++ b/crates/block-producer/src/test_utils/block.rs @@ -1,4 +1,5 @@ use miden_objects::{ + ACCOUNT_TREE_DEPTH, Digest, batch::ProvenBatch, block::{ BlockAccountUpdate, BlockHeader, BlockNoteIndex, BlockNoteTree, OutputNoteBatch, @@ -7,7 +8,6 @@ use miden_objects::{ crypto::merkle::{Mmr, SimpleSmt}, note::Nullifier, transaction::OutputNote, - Digest, ACCOUNT_TREE_DEPTH, }; use super::MockStoreSuccess; diff --git a/crates/block-producer/src/test_utils/mod.rs b/crates/block-producer/src/test_utils/mod.rs index 97dfcc5fb..83a7a5d2e 100644 --- a/crates/block-producer/src/test_utils/mod.rs +++ b/crates/block-producer/src/test_utils/mod.rs @@ -1,16 +1,16 @@ use std::sync::Arc; use miden_objects::{ + Digest, account::AccountId, crypto::rand::{FeltRng, RpoRandomCoin}, testing::account_id::AccountIdBuilder, transaction::TransactionId, - Digest, }; mod proven_tx; -pub use proven_tx::{mock_proven_tx, MockProvenTxBuilder}; +pub use proven_tx::{MockProvenTxBuilder, mock_proven_tx}; mod store; @@ -18,7 +18,7 @@ pub use store::{MockStoreSuccess, MockStoreSuccessBuilder}; mod account; -pub use account::{mock_account_id, MockPrivateAccount}; +pub use account::{MockPrivateAccount, mock_account_id}; pub mod block; diff --git a/crates/block-producer/src/test_utils/note.rs b/crates/block-producer/src/test_utils/note.rs index 8ed060c6e..271d03654 100644 --- a/crates/block-producer/src/test_utils/note.rs +++ b/crates/block-producer/src/test_utils/note.rs @@ -4,7 +4,7 @@ use miden_objects::{ testing::note::NoteBuilder, transaction::{InputNote, InputNoteCommitment, OutputNote}, }; -use rand_chacha::{rand_core::SeedableRng, ChaCha20Rng}; +use rand_chacha::{ChaCha20Rng, rand_core::SeedableRng}; use crate::test_utils::account::mock_account_id; diff --git a/crates/block-producer/src/test_utils/proven_tx.rs b/crates/block-producer/src/test_utils/proven_tx.rs index 4af9a0e5b..ed51977cb 100644 --- a/crates/block-producer/src/test_utils/proven_tx.rs +++ b/crates/block-producer/src/test_utils/proven_tx.rs @@ -3,12 +3,12 @@ use std::ops::Range; use itertools::Itertools; use miden_air::HashFunction; use miden_objects::{ + Digest, Felt, Hasher, ONE, account::AccountId, block::BlockNumber, note::{Note, NoteExecutionHint, NoteHeader, NoteMetadata, NoteType, Nullifier}, transaction::{InputNote, OutputNote, ProvenTransaction, ProvenTransactionBuilder}, vm::ExecutionProof, - Digest, Felt, Hasher, ONE, }; use rand::Rng; use winterfell::Proof; @@ -36,7 +36,7 @@ impl MockProvenTxBuilder { /// Generates 3 random, sequential transactions acting on the same account. pub fn sequential() -> [AuthenticatedTransaction; 3] { let mut rng = rand::thread_rng(); - let mock_account: MockPrivateAccount<4> = rng.gen::().into(); + let mock_account: MockPrivateAccount<4> = rng.r#gen::().into(); (0..3) .map(|i| { diff --git a/crates/block-producer/src/test_utils/store.rs b/crates/block-producer/src/test_utils/store.rs index 4d0507e33..adb8fccae 100644 --- a/crates/block-producer/src/test_utils/store.rs +++ b/crates/block-producer/src/test_utils/store.rs @@ -4,12 +4,12 @@ use std::{ }; use miden_objects::{ + ACCOUNT_TREE_DEPTH, EMPTY_WORD, ZERO, batch::ProvenBatch, block::{BlockHeader, BlockNumber, OutputNoteBatch, ProvenBlock}, crypto::merkle::{Mmr, SimpleSmt, Smt}, note::{NoteId, NoteInclusionProof}, transaction::ProvenTransaction, - ACCOUNT_TREE_DEPTH, EMPTY_WORD, ZERO, }; use tokio::sync::RwLock; diff --git a/crates/proto/src/domain/account.rs b/crates/proto/src/domain/account.rs index 36fadc4a0..9e176abbc 100644 --- a/crates/proto/src/domain/account.rs +++ b/crates/proto/src/domain/account.rs @@ -2,11 +2,11 @@ use std::fmt::{Debug, Display, Formatter}; use miden_node_utils::formatting::format_opt; use miden_objects::{ + Digest, account::{Account, AccountHeader, AccountId}, block::BlockNumber, crypto::{hash::rpo::RpoDigest, merkle::MerklePath}, utils::{Deserializable, Serializable}, - Digest, }; use super::try_convert; diff --git a/crates/proto/src/domain/block.rs b/crates/proto/src/domain/block.rs index ff19518bb..ca9e23cfd 100644 --- a/crates/proto/src/domain/block.rs +++ b/crates/proto/src/domain/block.rs @@ -8,11 +8,11 @@ use miden_objects::{ }; use crate::{ + AccountWitnessRecord, NullifierWitnessRecord, errors::{ConversionError, MissingFieldHelper}, generated::{ block as proto, note::NoteInclusionInBlockProof, responses::GetBlockInputsResponse, }, - AccountWitnessRecord, NullifierWitnessRecord, }; // BLOCK HEADER diff --git a/crates/proto/src/domain/digest.rs b/crates/proto/src/domain/digest.rs index 87c8e459d..d2cba603a 100644 --- a/crates/proto/src/domain/digest.rs +++ b/crates/proto/src/domain/digest.rs @@ -1,7 +1,7 @@ use std::fmt::{Debug, Display, Formatter}; use hex::{FromHex, ToHex}; -use miden_objects::{note::NoteId, Digest, Felt, StarkField}; +use miden_objects::{Digest, Felt, StarkField, note::NoteId}; use crate::{errors::ConversionError, generated::digest as proto}; diff --git a/crates/proto/src/domain/merkle.rs b/crates/proto/src/domain/merkle.rs index 2cd1adf74..c0670fa45 100644 --- a/crates/proto/src/domain/merkle.rs +++ b/crates/proto/src/domain/merkle.rs @@ -1,6 +1,6 @@ use miden_objects::{ - crypto::merkle::{LeafIndex, MerklePath, MmrDelta, SmtLeaf, SmtProof}, Digest, Word, + crypto::merkle::{LeafIndex, MerklePath, MmrDelta, SmtLeaf, SmtProof}, }; use super::{convert, try_convert}; diff --git a/crates/proto/src/domain/note.rs b/crates/proto/src/domain/note.rs index 6c3b7f165..b1048b599 100644 --- a/crates/proto/src/domain/note.rs +++ b/crates/proto/src/domain/note.rs @@ -1,6 +1,6 @@ use miden_objects::{ - note::{NoteExecutionHint, NoteId, NoteInclusionProof, NoteMetadata, NoteTag, NoteType}, Digest, Felt, + note::{NoteExecutionHint, NoteId, NoteInclusionProof, NoteMetadata, NoteTag, NoteType}, }; use crate::{ diff --git a/crates/rpc/src/server/api.rs b/crates/rpc/src/server/api.rs index fbb233eb4..ea6fb112a 100644 --- a/crates/rpc/src/server/api.rs +++ b/crates/rpc/src/server/api.rs @@ -20,18 +20,18 @@ use miden_node_proto::{ }; use miden_node_utils::tracing::grpc::OtelInterceptor; use miden_objects::{ - account::AccountId, crypto::hash::rpo::RpoDigest, transaction::ProvenTransaction, - utils::serde::Deserializable, Digest, MAX_NUM_FOREIGN_ACCOUNTS, MIN_PROOF_SECURITY_LEVEL, + Digest, MAX_NUM_FOREIGN_ACCOUNTS, MIN_PROOF_SECURITY_LEVEL, account::AccountId, + crypto::hash::rpo::RpoDigest, transaction::ProvenTransaction, utils::serde::Deserializable, }; use miden_tx::TransactionVerifier; use tonic::{ + Request, Response, Status, service::interceptor::InterceptedService, transport::{Channel, Error}, - Request, Response, Status, }; use tracing::{debug, info, instrument}; -use crate::{config::RpcConfig, COMPONENT}; +use crate::{COMPONENT, config::RpcConfig}; // RPC API // ================================================================================================ diff --git a/crates/rpc/src/server/mod.rs b/crates/rpc/src/server/mod.rs index d728eab45..8310387ec 100644 --- a/crates/rpc/src/server/mod.rs +++ b/crates/rpc/src/server/mod.rs @@ -5,7 +5,7 @@ use tokio::net::TcpListener; use tokio_stream::wrappers::TcpListenerStream; use tracing::info; -use crate::{config::RpcConfig, COMPONENT}; +use crate::{COMPONENT, config::RpcConfig}; mod api; diff --git a/crates/store/src/db/migrations.rs b/crates/store/src/db/migrations.rs index ef7547ee2..34c888b02 100644 --- a/crates/store/src/db/migrations.rs +++ b/crates/store/src/db/migrations.rs @@ -1,14 +1,14 @@ use std::sync::LazyLock; -use miden_objects::crypto::hash::blake::{Blake3Digest, Blake3_160}; +use miden_objects::crypto::hash::blake::{Blake3_160, Blake3Digest}; use rusqlite::Connection; -use rusqlite_migration::{Migrations, SchemaVersion, M}; +use rusqlite_migration::{M, Migrations, SchemaVersion}; use tracing::{debug, error, info, instrument}; use crate::{ + COMPONENT, db::{settings::Settings, sql::utils::schema_version}, errors::DatabaseError, - COMPONENT, }; type Hash = Blake3Digest<20>; diff --git a/crates/store/src/db/mod.rs b/crates/store/src/db/mod.rs index 3b2027455..cba274d0a 100644 --- a/crates/store/src/db/mod.rs +++ b/crates/store/src/db/mod.rs @@ -23,12 +23,12 @@ use tokio::sync::oneshot; use tracing::{info, info_span, instrument}; use crate::{ + COMPONENT, SQL_STATEMENT_CACHE_CAPACITY, blocks::BlockStore, config::StoreConfig, db::migrations::apply_migrations, errors::{DatabaseError, DatabaseSetupError, GenesisError, NoteSyncError, StateSyncError}, genesis::GenesisState, - COMPONENT, SQL_STATEMENT_CACHE_CAPACITY, }; mod migrations; diff --git a/crates/store/src/db/settings.rs b/crates/store/src/db/settings.rs index 6fdb41272..3bb497f26 100644 --- a/crates/store/src/db/settings.rs +++ b/crates/store/src/db/settings.rs @@ -1,4 +1,4 @@ -use rusqlite::{params, types::FromSql, Connection, OptionalExtension, Result, ToSql}; +use rusqlite::{Connection, OptionalExtension, Result, ToSql, params, types::FromSql}; use crate::db::sql::utils::table_exists; diff --git a/crates/store/src/db/sql/mod.rs b/crates/store/src/db/sql/mod.rs index c4f1894ba..fc01d499e 100644 --- a/crates/store/src/db/sql/mod.rs +++ b/crates/store/src/db/sql/mod.rs @@ -5,17 +5,18 @@ pub(crate) mod utils; use std::{ borrow::Cow, - collections::{btree_map::Entry, BTreeMap, BTreeSet}, + collections::{BTreeMap, BTreeSet, btree_map::Entry}, num::NonZeroUsize, rc::Rc, }; use miden_node_proto::domain::account::{AccountInfo, AccountSummary}; use miden_objects::{ + Digest, Word, account::{ - delta::AccountUpdateDetails, AccountDelta, AccountId, AccountStorageDelta, - AccountVaultDelta, FungibleAssetDelta, NonFungibleAssetDelta, NonFungibleDeltaAction, - StorageMapDelta, + AccountDelta, AccountId, AccountStorageDelta, AccountVaultDelta, FungibleAssetDelta, + NonFungibleAssetDelta, NonFungibleDeltaAction, StorageMapDelta, + delta::AccountUpdateDetails, }, asset::NonFungibleAsset, block::{BlockAccountUpdate, BlockHeader, BlockNoteIndex, BlockNumber}, @@ -23,9 +24,8 @@ use miden_objects::{ note::{NoteExecutionMode, NoteId, NoteInclusionProof, NoteMetadata, NoteType, Nullifier}, transaction::TransactionId, utils::serde::{Deserializable, Serializable}, - Digest, Word, }; -use rusqlite::{params, types::Value, Connection, Transaction}; +use rusqlite::{Connection, Transaction, params, types::Value}; use utils::{read_block_number, read_from_blob_column}; use super::{ @@ -383,7 +383,7 @@ pub fn select_account_delta( _ => { return Err(DatabaseError::DataCorrupted(format!( "Invalid non-fungible asset delta action: {action}" - ))) + ))); }, } } diff --git a/crates/store/src/db/sql/utils.rs b/crates/store/src/db/sql/utils.rs index 284c3dde8..3d15d7945 100644 --- a/crates/store/src/db/sql/utils.rs +++ b/crates/store/src/db/sql/utils.rs @@ -7,9 +7,8 @@ use miden_objects::{ utils::Deserializable, }; use rusqlite::{ - params, + Connection, OptionalExtension, params, types::{Value, ValueRef}, - Connection, OptionalExtension, }; use crate::errors::DatabaseError; @@ -38,7 +37,7 @@ pub fn schema_version(conn: &Connection) -> rusqlite::Result { /// Auxiliary macro which substitutes `$src` token by `$dst` expression. macro_rules! subst { - ($src:tt, $dst:expr) => { + ($src:tt, $dst:expr_2021) => { $dst }; } diff --git a/crates/store/src/db/tests.rs b/crates/store/src/db/tests.rs index 98faf1f1e..962b6db2a 100644 --- a/crates/store/src/db/tests.rs +++ b/crates/store/src/db/tests.rs @@ -6,10 +6,11 @@ use std::num::NonZeroUsize; use miden_lib::transaction::TransactionKernel; use miden_node_proto::domain::account::AccountSummary; use miden_objects::{ + Felt, FieldElement, Word, ZERO, account::{ - delta::AccountUpdateDetails, Account, AccountBuilder, AccountComponent, AccountDelta, - AccountId, AccountIdVersion, AccountStorageDelta, AccountStorageMode, AccountType, - AccountVaultDelta, StorageSlot, + Account, AccountBuilder, AccountComponent, AccountDelta, AccountId, AccountIdVersion, + AccountStorageDelta, AccountStorageMode, AccountType, AccountVaultDelta, StorageSlot, + delta::AccountUpdateDetails, }, asset::{Asset, FungibleAsset, NonFungibleAsset, NonFungibleAssetDetails}, block::{BlockAccountUpdate, BlockHeader, BlockNoteIndex, BlockNoteTree, BlockNumber}, @@ -22,12 +23,11 @@ use miden_objects::{ ACCOUNT_ID_OFF_CHAIN_SENDER, ACCOUNT_ID_REGULAR_ACCOUNT_UPDATABLE_CODE_OFF_CHAIN, ACCOUNT_ID_REGULAR_ACCOUNT_UPDATABLE_CODE_ON_CHAIN, }, - Felt, FieldElement, Word, ZERO, }; -use rusqlite::{vtab::array, Connection}; +use rusqlite::{Connection, vtab::array}; -use super::{sql, AccountInfo, NoteRecord, NullifierInfo}; -use crate::db::{migrations::apply_migrations, sql::PaginationToken, TransactionSummary}; +use super::{AccountInfo, NoteRecord, NullifierInfo, sql}; +use crate::db::{TransactionSummary, migrations::apply_migrations, sql::PaginationToken}; fn create_db() -> Connection { let mut conn = Connection::open_in_memory().unwrap(); diff --git a/crates/store/src/errors.rs b/crates/store/src/errors.rs index ebca9f753..877f4bae3 100644 --- a/crates/store/src/errors.rs +++ b/crates/store/src/errors.rs @@ -2,6 +2,7 @@ use std::io; use deadpool_sqlite::{InteractError, PoolError}; use miden_objects::{ + AccountDeltaError, AccountError, NoteError, account::AccountId, block::{BlockHeader, BlockNumber}, crypto::{ @@ -11,7 +12,6 @@ use miden_objects::{ }, note::Nullifier, transaction::OutputNote, - AccountDeltaError, AccountError, NoteError, }; use rusqlite::types::FromSqlError; use thiserror::Error; @@ -150,7 +150,9 @@ pub enum GenesisError { genesis_filepath: String, source: io::Error, }, - #[error("block header in store doesn't match block header in genesis file. Expected {expected_genesis_header:?}, but store contained {block_header_in_store:?}")] + #[error( + "block header in store doesn't match block header in genesis file. Expected {expected_genesis_header:?}, but store contained {block_header_in_store:?}" + )] GenesisBlockHeaderMismatch { expected_genesis_header: Box, block_header_in_store: Box, @@ -230,7 +232,9 @@ pub enum GetBlockInputsError { SelectNoteInclusionProofError(#[source] DatabaseError), #[error("failed to select block headers")] SelectBlockHeaderError(#[source] DatabaseError), - #[error("highest block number {highest_block_number} referenced by a batch is newer than the latest block {latest_block_number}")] + #[error( + "highest block number {highest_block_number} referenced by a batch is newer than the latest block {latest_block_number}" + )] UnknownBatchBlockReference { highest_block_number: BlockNumber, latest_block_number: BlockNumber, @@ -265,7 +269,9 @@ pub enum GetBatchInputsError { SelectBlockHeaderError(#[source] DatabaseError), #[error("set of blocks refernced by transactions is empty")] TransactionBlockReferencesEmpty, - #[error("highest block number {highest_block_num} referenced by a transaction is newer than the latest block {latest_block_num}")] + #[error( + "highest block number {highest_block_num} referenced by a transaction is newer than the latest block {latest_block_num}" + )] UnknownTransactionBlockReference { highest_block_num: BlockNumber, latest_block_num: BlockNumber, diff --git a/crates/store/src/genesis.rs b/crates/store/src/genesis.rs index d78990734..37728e750 100644 --- a/crates/store/src/genesis.rs +++ b/crates/store/src/genesis.rs @@ -1,11 +1,11 @@ use miden_lib::transaction::TransactionKernel; use miden_objects::{ - account::{delta::AccountUpdateDetails, Account}, + ACCOUNT_TREE_DEPTH, Digest, + account::{Account, delta::AccountUpdateDetails}, block::{BlockAccountUpdate, BlockHeader, BlockNoteTree, BlockNumber, ProvenBlock}, crypto::merkle::{MmrPeaks, SimpleSmt, Smt}, note::Nullifier, utils::serde::{ByteReader, ByteWriter, Deserializable, DeserializationError, Serializable}, - Digest, ACCOUNT_TREE_DEPTH, }; use crate::errors::GenesisError; diff --git a/crates/store/src/nullifier_tree.rs b/crates/store/src/nullifier_tree.rs index d34bc01d3..7d8b04d5f 100644 --- a/crates/store/src/nullifier_tree.rs +++ b/crates/store/src/nullifier_tree.rs @@ -1,11 +1,11 @@ use miden_objects::{ + Felt, FieldElement, Word, block::BlockNumber, crypto::{ hash::rpo::RpoDigest, - merkle::{MutationSet, Smt, SmtProof, SMT_DEPTH}, + merkle::{MutationSet, SMT_DEPTH, Smt, SmtProof}, }, note::Nullifier, - Felt, FieldElement, Word, }; use crate::errors::NullifierTreeError; diff --git a/crates/store/src/server/api.rs b/crates/store/src/server/api.rs index f69c8779a..4edc54c37 100644 --- a/crates/store/src/server/api.rs +++ b/crates/store/src/server/api.rs @@ -37,7 +37,7 @@ use miden_objects::{ use tonic::{Request, Response, Status}; use tracing::{debug, info, instrument}; -use crate::{state::State, COMPONENT}; +use crate::{COMPONENT, state::State}; // STORE API // ================================================================================================ @@ -487,7 +487,7 @@ impl api_server::Api for StoreApi { Ok(Response::new(GetAccountProofsResponse { block_num: block_num.as_u32(), - account_proofs: infos.into_iter().map(Into::into).collect(), + account_proofs: infos, })) } diff --git a/crates/store/src/server/mod.rs b/crates/store/src/server/mod.rs index a3c1f009d..7316dd969 100644 --- a/crates/store/src/server/mod.rs +++ b/crates/store/src/server/mod.rs @@ -7,7 +7,7 @@ use tokio_stream::wrappers::TcpListenerStream; use tower_http::trace::TraceLayer; use tracing::info; -use crate::{blocks::BlockStore, config::StoreConfig, db::Db, state::State, COMPONENT}; +use crate::{COMPONENT, blocks::BlockStore, config::StoreConfig, db::Db, state::State}; mod api; diff --git a/crates/store/src/state.rs b/crates/store/src/state.rs index d1ec2b48f..6b37caf0f 100644 --- a/crates/store/src/state.rs +++ b/crates/store/src/state.rs @@ -18,6 +18,7 @@ use miden_node_proto::{ }; use miden_node_utils::formatting::format_array; use miden_objects::{ + ACCOUNT_TREE_DEPTH, AccountError, account::{AccountDelta, AccountHeader, AccountId, StorageSlot}, block::{AccountWitness, BlockHeader, BlockInputs, BlockNumber, NullifierWitness, ProvenBlock}, crypto::{ @@ -30,15 +31,15 @@ use miden_objects::{ note::{NoteId, Nullifier}, transaction::{ChainMmr, OutputNote}, utils::Serializable, - AccountError, ACCOUNT_TREE_DEPTH, }; use tokio::{ - sync::{oneshot, Mutex, RwLock}, + sync::{Mutex, RwLock, oneshot}, time::Instant, }; use tracing::{info, info_span, instrument}; use crate::{ + COMPONENT, blocks::BlockStore, db::{Db, NoteRecord, NoteSyncUpdate, NullifierInfo, StateSyncUpdate}, errors::{ @@ -47,7 +48,6 @@ use crate::{ StateSyncError, }, nullifier_tree::NullifierTree, - COMPONENT, }; // STRUCTURES // ================================================================================================ @@ -962,10 +962,7 @@ impl State { from_block: BlockNumber, to_block: BlockNumber, ) -> Result, DatabaseError> { - self.db - .select_account_state_delta(account_id, from_block, to_block) - .await - .map_err(Into::into) + self.db.select_account_state_delta(account_id, from_block, to_block).await } /// Loads a block from the block store. Return `Ok(None)` if the block is not found. diff --git a/crates/test-macro/src/lib.rs b/crates/test-macro/src/lib.rs index 8958a4d47..13696059d 100644 --- a/crates/test-macro/src/lib.rs +++ b/crates/test-macro/src/lib.rs @@ -1,6 +1,6 @@ use proc_macro::TokenStream; use quote::ToTokens; -use syn::{parse_macro_input, parse_quote, Block, ItemFn}; +use syn::{Block, ItemFn, parse_macro_input, parse_quote}; #[proc_macro_attribute] pub fn enable_logging(_attr: TokenStream, item: TokenStream) -> TokenStream { diff --git a/crates/utils/src/config.rs b/crates/utils/src/config.rs index 4f79c6c49..3005c5915 100644 --- a/crates/utils/src/config.rs +++ b/crates/utils/src/config.rs @@ -1,8 +1,8 @@ use std::path::Path; use figment::{ - providers::{Format, Toml}, Figment, + providers::{Format, Toml}, }; use serde::Deserialize; diff --git a/crates/utils/src/crypto.rs b/crates/utils/src/crypto.rs index f8a48aef2..a9fec4987 100644 --- a/crates/utils/src/crypto.rs +++ b/crates/utils/src/crypto.rs @@ -1,12 +1,12 @@ use miden_objects::{ - crypto::{hash::rpo::RpoDigest, rand::RpoRandomCoin}, Felt, + crypto::{hash::rpo::RpoDigest, rand::RpoRandomCoin}, }; use rand::{Rng, RngCore}; /// Creates a new RPO Random Coin with random seed pub fn get_rpo_random_coin(rng: &mut T) -> RpoRandomCoin { - let auth_seed: [u64; 4] = rng.gen(); + let auth_seed: [u64; 4] = rng.r#gen(); let rng_seed = RpoDigest::from(auth_seed.map(Felt::new)); RpoRandomCoin::new(rng_seed.into()) diff --git a/crates/utils/src/formatting.rs b/crates/utils/src/formatting.rs index 38e980b14..dedffc9c4 100644 --- a/crates/utils/src/formatting.rs +++ b/crates/utils/src/formatting.rs @@ -3,7 +3,7 @@ use std::fmt::Display; use itertools::Itertools; use miden_objects::{ crypto::{ - hash::{blake::Blake3Digest, Digest}, + hash::{Digest, blake::Blake3Digest}, utils::bytes_to_hex_string, }, transaction::{InputNoteCommitment, InputNotes, OutputNotes}, diff --git a/crates/utils/src/logging.rs b/crates/utils/src/logging.rs index cec4f6160..eccedf7c0 100644 --- a/crates/utils/src/logging.rs +++ b/crates/utils/src/logging.rs @@ -7,8 +7,8 @@ use opentelemetry_sdk::{propagation::TraceContextPropagator, trace::SpanExporter use tracing::subscriber::Subscriber; use tracing_opentelemetry::OpenTelemetryLayer; use tracing_subscriber::{ - layer::{Filter, SubscriberExt}, Layer, Registry, + layer::{Filter, SubscriberExt}, }; /// Configures [`setup_tracing`] to enable or disable the open-telemetry exporter. @@ -108,8 +108,8 @@ where fn env_or_default_filter() -> Box + Send + Sync + 'static> { use tracing::level_filters::LevelFilter; use tracing_subscriber::{ - filter::{FilterExt, Targets}, EnvFilter, + filter::{FilterExt, Targets}, }; // `tracing` does not allow differentiating between invalid and missing env var so we manually diff --git a/crates/utils/src/tracing/grpc.rs b/crates/utils/src/tracing/grpc.rs index 6a5bb9508..2f83f1d57 100644 --- a/crates/utils/src/tracing/grpc.rs +++ b/crates/utils/src/tracing/grpc.rs @@ -1,6 +1,6 @@ /// Creates a [`tracing::Span`] based on RPC service and method name. macro_rules! rpc_span { - ($service:expr, $method:expr) => { + ($service:literal, $method:literal) => { tracing::info_span!( concat!($service, "/", $method), rpc.service = $service, diff --git a/crates/utils/src/tracing/span_ext.rs b/crates/utils/src/tracing/span_ext.rs index 4aed6c7a8..3d97167f1 100644 --- a/crates/utils/src/tracing/span_ext.rs +++ b/crates/utils/src/tracing/span_ext.rs @@ -1,8 +1,8 @@ use core::time::Duration; use std::net::IpAddr; -use miden_objects::{block::BlockNumber, Digest}; -use opentelemetry::{trace::Status, Key, Value}; +use miden_objects::{Digest, block::BlockNumber}; +use opentelemetry::{Key, Value, trace::Status}; /// Utility functions for converting types into [`opentelemetry::Value`]. pub trait ToValue { diff --git a/crates/utils/src/version/mod.rs b/crates/utils/src/version/mod.rs index 98f3c096c..03ff66249 100644 --- a/crates/utils/src/version/mod.rs +++ b/crates/utils/src/version/mod.rs @@ -26,7 +26,7 @@ pub struct LongVersion { impl std::fmt::Display for LongVersion { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - let Self { + let &Self { version, mut sha, mut branch, @@ -39,7 +39,7 @@ impl std::fmt::Display for LongVersion { debug, } = self; - let dirty = match *dirty { + let dirty = match dirty { "true" => "-dirty", _ => "", }; diff --git a/rust-toolchain.toml b/rust-toolchain.toml index 31c2f2645..f3a6d7f22 100644 --- a/rust-toolchain.toml +++ b/rust-toolchain.toml @@ -1,4 +1,4 @@ [toolchain] -channel = "1.84" +channel = "1.85" components = ["clippy", "rust-src", "rustfmt"] profile = "minimal" From 9571780fbdb4824014e91e09c7431c6cca99e632 Mon Sep 17 00:00:00 2001 From: Santiago Pittella <87827390+SantiagoPittella@users.noreply.github.com> Date: Sun, 9 Mar 2025 18:20:00 -0300 Subject: [PATCH 26/27] feat: use remote batch prover (#701) --- CHANGELOG.md | 1 + Cargo.lock | 88 +++++++++++++++++-- README.md | 8 ++ bin/node/src/config.rs | 20 ++++- config/miden-node.toml | 2 + crates/block-producer/Cargo.toml | 41 +++++---- .../block-producer/src/batch_builder/mod.rs | 81 ++++++++++++++--- crates/block-producer/src/config.rs | 17 +++- crates/block-producer/src/errors.rs | 4 + crates/block-producer/src/server.rs | 2 +- 10 files changed, 218 insertions(+), 46 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 5dd7a791d..b035de4fc 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -21,6 +21,7 @@ - [BREAKING] Update `GetBlockInputs` RPC (#709). - [BREAKING] `CheckNullifiersByPrefix` now takes a starting block number (#707). - [BREAKING] Removed nullifiers from `SyncState` endpoint (#708). +- [BREAKING] Added `batch_prover_url` to block producer configuration (#701). - [BREAKING] Updated to Rust Edition 2024 (#727). - [BREAKING] MSRV bumped to 1.85 (#727). diff --git a/Cargo.lock b/Cargo.lock index 198bf8e17..4b63800c2 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2053,7 +2053,7 @@ dependencies = [ [[package]] name = "miden-block-prover" version = "0.8.0" -source = "git+https://github.com/0xPolygonMiden/miden-base?branch=next#f2d50bfa4a83841875570d1301adccbe164ea111" +source = "git+https://github.com/0xPolygonMiden/miden-base?branch=next#0aa58b4ad81d7970edcfd32a08eb90234294ecbf" dependencies = [ "miden-crypto", "miden-lib", @@ -2146,7 +2146,7 @@ dependencies = [ [[package]] name = "miden-lib" version = "0.8.0" -source = "git+https://github.com/0xPolygonMiden/miden-base?branch=next#f2d50bfa4a83841875570d1301adccbe164ea111" +source = "git+https://github.com/0xPolygonMiden/miden-base?branch=next#0aa58b4ad81d7970edcfd32a08eb90234294ecbf" dependencies = [ "miden-assembly", "miden-objects", @@ -2180,7 +2180,7 @@ dependencies = [ "supports-hyperlinks", "supports-unicode", "syn", - "terminal_size", + "terminal_size 0.3.0", "textwrap", "thiserror 2.0.11", "trybuild", @@ -2236,6 +2236,7 @@ dependencies = [ "miden-node-utils", "miden-objects", "miden-processor", + "miden-proving-service-client", "miden-tx", "miden-tx-batch-prover", "pretty_assertions", @@ -2344,7 +2345,7 @@ dependencies = [ [[package]] name = "miden-objects" version = "0.8.0" -source = "git+https://github.com/0xPolygonMiden/miden-base?branch=next#f2d50bfa4a83841875570d1301adccbe164ea111" +source = "git+https://github.com/0xPolygonMiden/miden-base?branch=next#0aa58b4ad81d7970edcfd32a08eb90234294ecbf" dependencies = [ "getrandom 0.2.15", "miden-assembly", @@ -2387,6 +2388,25 @@ dependencies = [ "winter-prover", ] +[[package]] +name = "miden-proving-service-client" +version = "0.8.0" +source = "git+https://github.com/0xPolygonMiden/miden-base?branch=next#0aa58b4ad81d7970edcfd32a08eb90234294ecbf" +dependencies = [ + "async-trait", + "miden-objects", + "miden-tx", + "miette", + "prost", + "prost-build", + "protox", + "thiserror 2.0.11", + "tokio", + "tonic", + "tonic-build", + "tonic-web-wasm-client", +] + [[package]] name = "miden-rpc-proto" version = "0.8.0" @@ -2403,7 +2423,7 @@ dependencies = [ [[package]] name = "miden-tx" version = "0.8.0" -source = "git+https://github.com/0xPolygonMiden/miden-base?branch=next#f2d50bfa4a83841875570d1301adccbe164ea111" +source = "git+https://github.com/0xPolygonMiden/miden-base?branch=next#0aa58b4ad81d7970edcfd32a08eb90234294ecbf" dependencies = [ "async-trait", "miden-lib", @@ -2420,7 +2440,7 @@ dependencies = [ [[package]] name = "miden-tx-batch-prover" version = "0.8.0" -source = "git+https://github.com/0xPolygonMiden/miden-base?branch=next#f2d50bfa4a83841875570d1301adccbe164ea111" +source = "git+https://github.com/0xPolygonMiden/miden-base?branch=next#0aa58b4ad81d7970edcfd32a08eb90234294ecbf" dependencies = [ "miden-core", "miden-crypto", @@ -2449,8 +2469,16 @@ version = "7.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1a955165f87b37fd1862df2a59547ac542c77ef6d17c666f619d1ad22dd89484" dependencies = [ + "backtrace", + "backtrace-ext", "cfg-if", "miette-derive", + "owo-colors", + "supports-color", + "supports-hyperlinks", + "supports-unicode", + "terminal_size 0.4.1", + "textwrap", "thiserror 1.0.69", "unicode-width 0.1.14", ] @@ -3829,6 +3857,16 @@ dependencies = [ "windows-sys 0.48.0", ] +[[package]] +name = "terminal_size" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5352447f921fda68cf61b4101566c0bdb5104eff6804d0678e5227580ab6a4e9" +dependencies = [ + "rustix", + "windows-sys 0.59.0", +] + [[package]] name = "textwrap" version = "0.16.1" @@ -4114,6 +4152,31 @@ dependencies = [ "tracing", ] +[[package]] +name = "tonic-web-wasm-client" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c191f6613df48874158b6af303313eadf25d1b7a534216b62a1f049d77cd2711" +dependencies = [ + "base64 0.22.1", + "byteorder", + "bytes", + "futures-util", + "http 1.2.0", + "http-body", + "http-body-util", + "httparse", + "js-sys", + "pin-project", + "thiserror 1.0.69", + "tonic", + "tower-service", + "wasm-bindgen", + "wasm-bindgen-futures", + "wasm-streams", + "web-sys", +] + [[package]] name = "tower" version = "0.4.13" @@ -4609,6 +4672,19 @@ dependencies = [ "unicode-ident", ] +[[package]] +name = "wasm-streams" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "15053d8d85c7eccdbefef60f06769760a563c7f0a9d6902a13d35c7800b0ad65" +dependencies = [ + "futures-util", + "js-sys", + "wasm-bindgen", + "wasm-bindgen-futures", + "web-sys", +] + [[package]] name = "web-sys" version = "0.3.77" diff --git a/README.md b/README.md index e0b8a8d22..5cb1bcf11 100644 --- a/README.md +++ b/README.md @@ -185,6 +185,14 @@ or alternatively start the systemd service if that's how you wish to operate: systemctl start miden-node.service ``` +The `miden-node.toml` can be modified adding a batch prover URL, to delegate the batch proving: + +```toml +batch_prover_url = "" +``` + +If this variable is not set, the node will use a local batch prover. + ### Monitoring and telemetry Please see our operator documentation [here](docs/operator.md). diff --git a/bin/node/src/config.rs b/bin/node/src/config.rs index c65275bd2..e4b52c6f7 100644 --- a/bin/node/src/config.rs +++ b/bin/node/src/config.rs @@ -27,6 +27,7 @@ struct NormalizedRpcConfig { struct NormalizedBlockProducerConfig { endpoint: Url, verify_tx_proofs: bool, + batch_prover_url: Option, } impl Default for NormalizedRpcConfig { @@ -44,9 +45,17 @@ impl Default for NormalizedRpcConfig { impl Default for NormalizedBlockProducerConfig { fn default() -> Self { // Ensure we stay in sync with the original defaults. - let BlockProducerConfig { endpoint, store_url: _, verify_tx_proofs } = - BlockProducerConfig::default(); - Self { endpoint, verify_tx_proofs } + let BlockProducerConfig { + endpoint, + store_url: _, + verify_tx_proofs, + batch_prover_url, + } = BlockProducerConfig::default(); + Self { + endpoint, + verify_tx_proofs, + batch_prover_url, + } } } @@ -58,6 +67,7 @@ impl NodeConfig { endpoint: block_producer.endpoint, store_url: store.endpoint.clone(), verify_tx_proofs: block_producer.verify_tx_proofs, + batch_prover_url: block_producer.batch_prover_url, }; let rpc = RpcConfig { @@ -92,6 +102,7 @@ mod tests { [block_producer] endpoint = "http://127.0.0.1:8080" verify_tx_proofs = true + batch_prover_url = "http://127.0.0.1:8081" [rpc] endpoint = "http://127.0.0.1:8080" @@ -111,7 +122,8 @@ mod tests { NodeConfig { block_producer: NormalizedBlockProducerConfig { endpoint: Url::parse("http://127.0.0.1:8080").unwrap(), - verify_tx_proofs: true + verify_tx_proofs: true, + batch_prover_url: Some(Url::parse("http://127.0.0.1:8081").unwrap()), }, rpc: NormalizedRpcConfig { endpoint: Url::parse("http://127.0.0.1:8080").unwrap(), diff --git a/config/miden-node.toml b/config/miden-node.toml index 4356ce516..8a4c4ec85 100644 --- a/config/miden-node.toml +++ b/config/miden-node.toml @@ -6,6 +6,8 @@ endpoint = "http://127.0.0.1:48046" # enables or disables the verification of transaction proofs before they are accepted into the # transaction queue. verify_tx_proofs = true +# address of the remote batch prover service +batch_prover_url = "http://127.0.0.1:8082/" [rpc] # port defined as: sum(ord(c)**p for (p, c) in enumerate('miden-rpc', 1)) % 2**16 diff --git a/crates/block-producer/Cargo.toml b/crates/block-producer/Cargo.toml index f6d0789b6..2dd38c170 100644 --- a/crates/block-producer/Cargo.toml +++ b/crates/block-producer/Cargo.toml @@ -18,26 +18,29 @@ workspace = true tracing-forest = ["miden-node-utils/tracing-forest"] [dependencies] -async-trait = { version = "0.1" } -futures = { version = "0.3" } -itertools = { workspace = true } -miden-block-prover = { git = "https://github.com/0xPolygonMiden/miden-base.git", branch = "next" } -miden-lib = { workspace = true } -miden-node-proto = { workspace = true } -miden-node-utils = { workspace = true } -miden-objects = { workspace = true } -miden-processor = { workspace = true } -miden-tx = { workspace = true } +async-trait = { version = "0.1" } +futures = { version = "0.3" } +itertools = { workspace = true } +miden-block-prover = { git = "https://github.com/0xPolygonMiden/miden-base.git", branch = "next" } +miden-lib = { workspace = true } +miden-node-proto = { workspace = true } +miden-node-utils = { workspace = true } +miden-objects = { workspace = true } +miden-processor = { workspace = true } +miden-proving-service-client = { git = "https://github.com/0xPolygonMiden/miden-base.git", branch = "next", features = [ + "batch-prover", +] } +miden-tx = { workspace = true } miden-tx-batch-prover = { git = "https://github.com/0xPolygonMiden/miden-base.git", branch = "next" } -rand = { version = "0.8" } -serde = { version = "1.0", features = ["derive"] } -thiserror = { workspace = true } -tokio = { workspace = true, features = ["macros", "net", "rt-multi-thread", "sync", "time"] } -tokio-stream = { workspace = true, features = ["net"] } -tonic = { workspace = true, features = ["transport"] } -tower-http = { workspace = true, features = ["util"] } -tracing = { workspace = true } -url = { workspace = true } +rand = { version = "0.8" } +serde = { version = "1.0", features = ["derive"] } +thiserror = { workspace = true } +tokio = { workspace = true, features = ["macros", "net", "rt-multi-thread", "sync", "time"] } +tokio-stream = { workspace = true, features = ["net"] } +tonic = { workspace = true, features = ["transport"] } +tower-http = { workspace = true, features = ["util"] } +tracing = { workspace = true } +url = { workspace = true } [dev-dependencies] assert_matches = { workspace = true } diff --git a/crates/block-producer/src/batch_builder/mod.rs b/crates/block-producer/src/batch_builder/mod.rs index af74d719f..f48f75ff2 100644 --- a/crates/block-producer/src/batch_builder/mod.rs +++ b/crates/block-producer/src/batch_builder/mod.rs @@ -6,10 +6,12 @@ use miden_objects::{ MIN_PROOF_SECURITY_LEVEL, batch::{BatchId, ProposedBatch, ProvenBatch}, }; +use miden_proving_service_client::proving_service::batch_prover::RemoteBatchProver; use miden_tx_batch_prover::LocalBatchProver; use rand::Rng; use tokio::{task::JoinSet, time}; use tracing::{Span, debug, info, instrument}; +use url::Url; use crate::{ COMPONENT, SERVER_BUILD_BATCH_FREQUENCY, domain::transaction::AuthenticatedTransaction, @@ -33,10 +35,20 @@ pub struct BatchBuilder { /// /// Note: this _must_ be sign positive and less than 1.0. pub failure_rate: f32, + /// The batch prover to use. + batch_prover: BatchProver, } -impl Default for BatchBuilder { - fn default() -> Self { +impl BatchBuilder { + /// Creates a new [`BatchBuilder`] with the given batch prover URL. + /// + /// Defaults to [`BatchProver::Local`] is no URL is provided. + pub fn new(batch_prover_url: Option) -> Self { + let batch_prover = match batch_prover_url { + Some(url) => BatchProver::new_remote(url), + None => BatchProver::new_local(MIN_PROOF_SECURITY_LEVEL), + }; + Self { batch_interval: SERVER_BUILD_BATCH_FREQUENCY, // SAFETY: 2 is non-zero so this always succeeds. @@ -44,11 +56,10 @@ impl Default for BatchBuilder { // Note: The range cannot be empty. simulated_proof_time: Duration::ZERO..Duration::from_millis(1), failure_rate: 0.0, + batch_prover, } } -} -impl BatchBuilder { /// Starts the [`BatchBuilder`], creating and proving batches at the configured interval. /// /// A pool of batch-proving workers is spawned, which are fed new batch jobs periodically. @@ -63,8 +74,13 @@ impl BatchBuilder { let mut interval = tokio::time::interval(self.batch_interval); interval.set_missed_tick_behavior(time::MissedTickBehavior::Delay); - let mut worker_pool = - WorkerPool::new(self.workers, self.simulated_proof_time, self.failure_rate, store); + let mut worker_pool = WorkerPool::new( + self.workers, + self.simulated_proof_time, + self.failure_rate, + store, + self.batch_prover, + ); loop { tokio::select! { @@ -123,6 +139,7 @@ struct WorkerPool { /// impact beyond ergonomics. task_map: Vec<(tokio::task::Id, BatchId)>, store: StoreClient, + batch_prover: BatchProver, } impl WorkerPool { @@ -131,6 +148,7 @@ impl WorkerPool { simulated_proof_time: Range, failure_rate: f32, store: StoreClient, + batch_prover: BatchProver, ) -> Self { Self { simulated_proof_time, @@ -139,6 +157,7 @@ impl WorkerPool { store, in_progress: JoinSet::default(), task_map: Vec::default(), + batch_prover, } } @@ -214,6 +233,7 @@ impl WorkerPool { // Note: Rng::gen rolls between [0, 1.0) for f32, so this works as expected. let failed = rand::thread_rng().r#gen::() < self.failure_rate; let store = self.store.clone(); + let batch_prover = self.batch_prover.clone(); async move { tracing::debug!("Begin proving batch."); @@ -229,8 +249,9 @@ impl WorkerPool { .await .map_err(|err| (id, BuildBatchError::FetchBatchInputsFailed(err)))?; - let batch = - Self::build_batch(transactions, batch_inputs).map_err(|err| (id, err))?; + let batch = Self::build_batch(transactions, batch_inputs, batch_prover) + .await + .map_err(|err| (id, err))?; tokio::time::sleep(simulated_proof_time).await; if failed { @@ -251,9 +272,10 @@ impl WorkerPool { } #[instrument(target = COMPONENT, skip_all, err, fields(batch_id))] - fn build_batch( + async fn build_batch( txs: Vec, batch_inputs: BatchInputs, + batch_prover: BatchProver, ) -> Result { let num_txs = txs.len(); @@ -275,9 +297,7 @@ impl WorkerPool { Span::current().record("batch_id", proposed_batch.id().to_string()); info!(target: COMPONENT, "Proposed Batch built"); - let proven_batch = LocalBatchProver::new(MIN_PROOF_SECURITY_LEVEL) - .prove(proposed_batch) - .map_err(BuildBatchError::ProveBatchError)?; + let proven_batch = batch_prover.prove(proposed_batch).await?; Span::current().record("batch_id", proven_batch.id().to_string()); info!(target: COMPONENT, "Proven Batch built"); @@ -285,3 +305,40 @@ impl WorkerPool { Ok(proven_batch) } } + +// BATCH PROVER +// ================================================================================================ + +/// Represents a batch prover which can be either local or remote. +#[derive(Clone)] +pub enum BatchProver { + Local(LocalBatchProver), + Remote(RemoteBatchProver), +} + +impl BatchProver { + pub fn new_local(security_level: u32) -> Self { + info!(target: COMPONENT, "Using local batch prover"); + Self::Local(LocalBatchProver::new(security_level)) + } + + pub fn new_remote(endpoint: impl Into) -> Self { + info!(target: COMPONENT, "Using remote batch prover"); + Self::Remote(RemoteBatchProver::new(endpoint)) + } + + #[instrument(target = COMPONENT, skip_all, err)] + pub async fn prove( + &self, + proposed_batch: ProposedBatch, + ) -> Result { + match self { + Self::Local(prover) => { + prover.prove(proposed_batch).map_err(BuildBatchError::ProveBatchError) + }, + Self::Remote(prover) => { + prover.prove(proposed_batch).await.map_err(BuildBatchError::RemoteProverError) + }, + } + } +} diff --git a/crates/block-producer/src/config.rs b/crates/block-producer/src/config.rs index 0ad19741b..41153fec0 100644 --- a/crates/block-producer/src/config.rs +++ b/crates/block-producer/src/config.rs @@ -23,14 +23,22 @@ pub struct BlockProducerConfig { /// verification may take ~15ms/proof. This is OK when all transactions are forwarded to the /// block producer from the RPC component as transaction proofs are also verified there. pub verify_tx_proofs: bool, + + /// URL of the remote batch prover. + pub batch_prover_url: Option, } impl Display for BlockProducerConfig { fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { - f.write_fmt(format_args!( - "{{ endpoint: \"{}\", store_url: \"{}\" }}", - self.endpoint, self.store_url - )) + write!(f, "{{ endpoint: \"{}\"", self.endpoint)?; + write!(f, ", store_url: \"{}\"", self.store_url)?; + + let batch_prover_url = self + .batch_prover_url + .as_ref() + .map_or_else(|| "None".to_string(), ToString::to_string); + + write!(f, ", batch_prover_url: \"{batch_prover_url}\" }}") } } @@ -44,6 +52,7 @@ impl Default for BlockProducerConfig { store_url: Url::parse(format!("http://127.0.0.1:{DEFAULT_STORE_PORT}").as_str()) .unwrap(), verify_tx_proofs: true, + batch_prover_url: None, } } } diff --git a/crates/block-producer/src/errors.rs b/crates/block-producer/src/errors.rs index 05e779ad5..b13698809 100644 --- a/crates/block-producer/src/errors.rs +++ b/crates/block-producer/src/errors.rs @@ -7,6 +7,7 @@ use miden_objects::{ note::{NoteId, Nullifier}, transaction::TransactionId, }; +use miden_proving_service_client::RemoteProverError; use miden_tx_batch_prover::errors::ProvenBatchError; use thiserror::Error; use tokio::task::JoinError; @@ -144,6 +145,9 @@ pub enum BuildBatchError { #[error("failed to prove proposed transaction batch")] ProveBatchError(#[source] ProvenBatchError), + + #[error("failed to prove batch with remote prover")] + RemoteProverError(#[source] RemoteProverError), } // Block building errors diff --git a/crates/block-producer/src/server.rs b/crates/block-producer/src/server.rs index 2db3c9e1e..4e0beb39e 100644 --- a/crates/block-producer/src/server.rs +++ b/crates/block-producer/src/server.rs @@ -82,7 +82,7 @@ impl BlockProducer { info!(target: COMPONENT, "Server initialized"); Ok(Self { - batch_builder: BatchBuilder::default(), + batch_builder: BatchBuilder::new(config.batch_prover_url), block_builder: BlockBuilder::new(store.clone()), batch_budget: BatchBudget::default(), block_budget: BlockBudget::default(), From 7546dc77036fcf8acf4a339cde5b5cb91051877e Mon Sep 17 00:00:00 2001 From: Santiago Pittella <87827390+SantiagoPittella@users.noreply.github.com> Date: Sun, 9 Mar 2025 18:33:34 -0300 Subject: [PATCH 27/27] feat: add remote block prover as optional (#719) --- CHANGELOG.md | 1 + README.md | 5 +- bin/node/src/config.rs | 6 ++ config/miden-node.toml | 4 +- crates/block-producer/Cargo.toml | 1 + .../block-producer/src/batch_builder/mod.rs | 4 +- .../block-producer/src/block_builder/mod.rs | 58 ++++++++++++++++--- crates/block-producer/src/config.rs | 17 +++++- crates/block-producer/src/errors.rs | 2 + crates/block-producer/src/server.rs | 2 +- 10 files changed, 85 insertions(+), 15 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index b035de4fc..6fe5a07b7 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -22,6 +22,7 @@ - [BREAKING] `CheckNullifiersByPrefix` now takes a starting block number (#707). - [BREAKING] Removed nullifiers from `SyncState` endpoint (#708). - [BREAKING] Added `batch_prover_url` to block producer configuration (#701). +- [BREAKING] Added `block_prover_url` to block producer configuration (#719). - [BREAKING] Updated to Rust Edition 2024 (#727). - [BREAKING] MSRV bumped to 1.85 (#727). diff --git a/README.md b/README.md index 5cb1bcf11..d788817a6 100644 --- a/README.md +++ b/README.md @@ -185,14 +185,13 @@ or alternatively start the systemd service if that's how you wish to operate: systemctl start miden-node.service ``` -The `miden-node.toml` can be modified adding a batch prover URL, to delegate the batch proving: +The node will use local provers for blocks and batches, but you can configure the node to use remote provers by adding the following to the `miden-node.toml` file in the `[block-producer]` section: ```toml batch_prover_url = "" +block_prover_url = "" ``` -If this variable is not set, the node will use a local batch prover. - ### Monitoring and telemetry Please see our operator documentation [here](docs/operator.md). diff --git a/bin/node/src/config.rs b/bin/node/src/config.rs index e4b52c6f7..a3faa463b 100644 --- a/bin/node/src/config.rs +++ b/bin/node/src/config.rs @@ -28,6 +28,7 @@ struct NormalizedBlockProducerConfig { endpoint: Url, verify_tx_proofs: bool, batch_prover_url: Option, + block_prover_url: Option, } impl Default for NormalizedRpcConfig { @@ -50,11 +51,13 @@ impl Default for NormalizedBlockProducerConfig { store_url: _, verify_tx_proofs, batch_prover_url, + block_prover_url, } = BlockProducerConfig::default(); Self { endpoint, verify_tx_proofs, batch_prover_url, + block_prover_url, } } } @@ -68,6 +71,7 @@ impl NodeConfig { store_url: store.endpoint.clone(), verify_tx_proofs: block_producer.verify_tx_proofs, batch_prover_url: block_producer.batch_prover_url, + block_prover_url: block_producer.block_prover_url, }; let rpc = RpcConfig { @@ -103,6 +107,7 @@ mod tests { endpoint = "http://127.0.0.1:8080" verify_tx_proofs = true batch_prover_url = "http://127.0.0.1:8081" + block_prover_url = "http://127.0.0.1:8082" [rpc] endpoint = "http://127.0.0.1:8080" @@ -124,6 +129,7 @@ mod tests { endpoint: Url::parse("http://127.0.0.1:8080").unwrap(), verify_tx_proofs: true, batch_prover_url: Some(Url::parse("http://127.0.0.1:8081").unwrap()), + block_prover_url: Some(Url::parse("http://127.0.0.1:8082").unwrap()), }, rpc: NormalizedRpcConfig { endpoint: Url::parse("http://127.0.0.1:8080").unwrap(), diff --git a/config/miden-node.toml b/config/miden-node.toml index 8a4c4ec85..0e5354984 100644 --- a/config/miden-node.toml +++ b/config/miden-node.toml @@ -7,7 +7,9 @@ endpoint = "http://127.0.0.1:48046" # transaction queue. verify_tx_proofs = true # address of the remote batch prover service -batch_prover_url = "http://127.0.0.1:8082/" +batch_prover_url = "http://127.0.0.1:8081/" +# address of the remote block prover service +block_prover_url = "http://127.0.0.1:8082/" [rpc] # port defined as: sum(ord(c)**p for (p, c) in enumerate('miden-rpc', 1)) % 2**16 diff --git a/crates/block-producer/Cargo.toml b/crates/block-producer/Cargo.toml index 2dd38c170..963caf6cc 100644 --- a/crates/block-producer/Cargo.toml +++ b/crates/block-producer/Cargo.toml @@ -29,6 +29,7 @@ miden-objects = { workspace = true } miden-processor = { workspace = true } miden-proving-service-client = { git = "https://github.com/0xPolygonMiden/miden-base.git", branch = "next", features = [ "batch-prover", + "block-prover", ] } miden-tx = { workspace = true } miden-tx-batch-prover = { git = "https://github.com/0xPolygonMiden/miden-base.git", branch = "next" } diff --git a/crates/block-producer/src/batch_builder/mod.rs b/crates/block-producer/src/batch_builder/mod.rs index f48f75ff2..7cb14e95f 100644 --- a/crates/block-producer/src/batch_builder/mod.rs +++ b/crates/block-producer/src/batch_builder/mod.rs @@ -36,13 +36,15 @@ pub struct BatchBuilder { /// Note: this _must_ be sign positive and less than 1.0. pub failure_rate: f32, /// The batch prover to use. + /// + /// If not provided, a local batch prover is used. batch_prover: BatchProver, } impl BatchBuilder { /// Creates a new [`BatchBuilder`] with the given batch prover URL. /// - /// Defaults to [`BatchProver::Local`] is no URL is provided. + /// If no URL is provided, a local batch prover is used. pub fn new(batch_prover_url: Option) -> Self { let batch_prover = match batch_prover_url { Some(url) => BatchProver::new_remote(url), diff --git a/crates/block-producer/src/block_builder/mod.rs b/crates/block-producer/src/block_builder/mod.rs index 812504c40..8d218ea23 100644 --- a/crates/block-producer/src/block_builder/mod.rs +++ b/crates/block-producer/src/block_builder/mod.rs @@ -9,9 +9,11 @@ use miden_objects::{ block::{BlockInputs, BlockNumber, ProposedBlock, ProvenBlock}, note::NoteHeader, }; +use miden_proving_service_client::proving_service::block_prover::RemoteBlockProver; use rand::Rng; use tokio::time::Duration; -use tracing::{Span, instrument}; +use tracing::{Span, info, instrument}; +use url::Url; use crate::{ COMPONENT, SERVER_BLOCK_FREQUENCY, errors::BuildBlockError, mempool::SharedMempool, @@ -34,17 +36,25 @@ pub struct BlockBuilder { pub store: StoreClient, /// The prover used to prove a proposed block into a proven block. - pub block_prover: LocalBlockProver, + pub block_prover: BlockProver, } impl BlockBuilder { - pub fn new(store: StoreClient) -> Self { + /// Creates a new [`BlockBuilder`] with the given [`StoreClient`] and optional block prover URL. + /// + /// If the block prover URL is not set, the block builder will use the local block prover. + pub fn new(store: StoreClient, block_prover_url: Option) -> Self { + let block_prover = match block_prover_url { + Some(url) => BlockProver::new_remote(url), + None => BlockProver::new_local(MIN_PROOF_SECURITY_LEVEL), + }; + Self { block_interval: SERVER_BLOCK_FREQUENCY, // Note: The range cannot be empty. simulated_proof_time: Duration::ZERO..Duration::from_millis(1), failure_rate: 0.0, - block_prover: LocalBlockProver::new(MIN_PROOF_SECURITY_LEVEL), + block_prover, store, } } @@ -190,10 +200,7 @@ impl BlockBuilder { &self, proposed_block: ProposedBlock, ) -> Result { - let proven_block = self - .block_prover - .prove(proposed_block) - .map_err(BuildBlockError::ProveBlockFailed)?; + let proven_block = self.block_prover.prove(proposed_block).await?; self.simulate_proving().await; @@ -352,3 +359,38 @@ impl TelemetryInjectorExt for ProvenBlock { span.set_attribute("block.commitments.transaction", header.tx_hash()); } } + +// BLOCK PROVER +// ================================================================================================ + +pub enum BlockProver { + Local(LocalBlockProver), + Remote(RemoteBlockProver), +} + +impl BlockProver { + pub fn new_local(security_level: u32) -> Self { + info!(target: COMPONENT, "Using local block prover"); + Self::Local(LocalBlockProver::new(security_level)) + } + + pub fn new_remote(endpoint: impl Into) -> Self { + info!(target: COMPONENT, "Using remote block prover"); + Self::Remote(RemoteBlockProver::new(endpoint)) + } + + #[instrument(target = COMPONENT, skip_all, err)] + pub async fn prove( + &self, + proposed_block: ProposedBlock, + ) -> Result { + match self { + Self::Local(prover) => { + prover.prove(proposed_block).map_err(BuildBlockError::ProveBlockFailed) + }, + Self::Remote(prover) => { + prover.prove(proposed_block).await.map_err(BuildBlockError::RemoteProverError) + }, + } + } +} diff --git a/crates/block-producer/src/config.rs b/crates/block-producer/src/config.rs index 41153fec0..278541b6a 100644 --- a/crates/block-producer/src/config.rs +++ b/crates/block-producer/src/config.rs @@ -25,7 +25,14 @@ pub struct BlockProducerConfig { pub verify_tx_proofs: bool, /// URL of the remote batch prover. + /// + /// If not set, the block producer will use the local batch prover. pub batch_prover_url: Option, + + /// URL of the remote block prover. + /// + /// If not set, the block producer will use the local block prover. + pub block_prover_url: Option, } impl Display for BlockProducerConfig { @@ -38,7 +45,14 @@ impl Display for BlockProducerConfig { .as_ref() .map_or_else(|| "None".to_string(), ToString::to_string); - write!(f, ", batch_prover_url: \"{batch_prover_url}\" }}") + write!(f, ", batch_prover_url: \"{batch_prover_url}\" }}")?; + + let block_prover_url = self + .block_prover_url + .as_ref() + .map_or_else(|| "None".to_string(), ToString::to_string); + + write!(f, ", block_prover_url: \"{block_prover_url}\" }}") } } @@ -53,6 +67,7 @@ impl Default for BlockProducerConfig { .unwrap(), verify_tx_proofs: true, batch_prover_url: None, + block_prover_url: None, } } } diff --git a/crates/block-producer/src/errors.rs b/crates/block-producer/src/errors.rs index b13698809..ef1d70d4d 100644 --- a/crates/block-producer/src/errors.rs +++ b/crates/block-producer/src/errors.rs @@ -167,6 +167,8 @@ pub enum BuildBlockError { /// responses. #[error("nothing actually went wrong, failure was injected on purpose")] InjectedFailure, + #[error("failed to prove block with remote prover")] + RemoteProverError(#[source] RemoteProverError), } // Store errors diff --git a/crates/block-producer/src/server.rs b/crates/block-producer/src/server.rs index 4e0beb39e..1625a8d2a 100644 --- a/crates/block-producer/src/server.rs +++ b/crates/block-producer/src/server.rs @@ -83,7 +83,7 @@ impl BlockProducer { Ok(Self { batch_builder: BatchBuilder::new(config.batch_prover_url), - block_builder: BlockBuilder::new(store.clone()), + block_builder: BlockBuilder::new(store.clone(), config.block_prover_url), batch_budget: BatchBudget::default(), block_budget: BlockBudget::default(), state_retention: SERVER_MEMPOOL_STATE_RETENTION,