diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 5930db66..d42b8e5d 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -31,7 +31,7 @@ jobs: crate: cargo-binstall - run: cargo binstall -y --force cargo-risczero@${{ env.RISC0_VERSION }} - run: cargo risczero install --version $RISC0_TOOLCHAIN_VERSION - - run: cargo test --workspace --all-targets -F ef-tests + - run: cargo test --workspace --all-targets -F ef-tests,debug-guest-build clippy: name: clippy diff --git a/.gitignore b/.gitignore index dcb9cd8c..f6d7eb7a 100644 --- a/.gitignore +++ b/.gitignore @@ -1,9 +1,10 @@ .DS_Store target/ tmp/ -rpc_cache/ +cache_rpc/ +cache_zkp/ log.txt *.pb *.zkp -rpc_cache.json +cache_rpc.json .idea diff --git a/Cargo.lock b/Cargo.lock index 5a19f735..456c2442 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -42,9 +42,9 @@ checksum = "aae1277d39aeec15cb388266ecc24b11c80469deae6067e17a1a7aa9e5c1f234" [[package]] name = "ahash" -version = "0.8.7" +version = "0.8.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "77c3a9648d43b9cd48db467b3f87fdd6e146bcc88ab0180006cef2179fe11d01" +checksum = "91429305e9f0a25f6205c5b8e0d2db09e0708a7a6df0f42212bb56c32c8ac97a" dependencies = [ "cfg-if", "once_cell", @@ -209,6 +209,15 @@ version = "1.0.79" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "080e9890a082662b09c1ad45f567faeeb47f22b5fb23895fbe1e651e718e25ca" +[[package]] +name = "approx" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cab112f0a86d568ea0e627cc1d6be74a1e9cd55214684db5561995f6dad897c6" +dependencies = [ + "num-traits", +] + [[package]] name = "ark-bn254" version = "0.4.0" @@ -672,17 +681,6 @@ dependencies = [ "zeroize", ] -[[package]] -name = "bonsai-sdk" -version = "0.5.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "441d1092e11977985946b6564251df91d80ae36982128e53be52a32548ad8762" -dependencies = [ - "reqwest", - "serde", - "thiserror", -] - [[package]] name = "bonsai-sdk" version = "0.6.0" @@ -692,6 +690,7 @@ dependencies = [ "reqwest", "serde", "thiserror", + "tokio", ] [[package]] @@ -1035,6 +1034,52 @@ dependencies = [ "typenum", ] +[[package]] +name = "cust" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0d6cc71911e179f12483b9734120b45bd00bf64fab085cc4818428523eedd469" +dependencies = [ + "bitflags 1.3.2", + "bytemuck", + "cust_core", + "cust_derive", + "cust_raw", + "find_cuda_helper", +] + +[[package]] +name = "cust_core" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "039f79662cb8f890cbf335e818cd522d6e3a53fe63f61d1aaaf859cd3d975f06" +dependencies = [ + "cust_derive", + "glam", + "mint", + "vek", +] + +[[package]] +name = "cust_derive" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e8a3bc95fe629aed92b2423de6ccff9e40174b21d19cb6ee6281a4d04ac72f66" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "cust_raw" +version = "0.11.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fbf40d6ade12cb9828bbc844b9875c7b93d25e67a3c9bf61c7aa3ae09e402bf8" +dependencies = [ + "find_cuda_helper", +] + [[package]] name = "darling" version = "0.20.4" @@ -1504,6 +1549,15 @@ dependencies = [ "syn 1.0.109", ] +[[package]] +name = "find_cuda_helper" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f9f9e65c593dd01ac77daad909ea4ad17f0d6d1776193fc8ea766356177abdad" +dependencies = [ + "glob", +] + [[package]] name = "fixed-hash" version = "0.8.0" @@ -1538,15 +1592,6 @@ version = "1.0.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" -[[package]] -name = "foreign-types" -version = "0.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f6f339eb8adc052cd2ca78910fda869aefa38d22d5cb648e6485e4d3fc06f3b1" -dependencies = [ - "foreign-types-shared 0.1.1", -] - [[package]] name = "foreign-types" version = "0.5.0" @@ -1554,7 +1599,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d737d9aa519fb7b749cbc3b962edcf310a8dd1f4b67c91c4f83975dbdd17d965" dependencies = [ "foreign-types-macros", - "foreign-types-shared 0.3.1", + "foreign-types-shared", ] [[package]] @@ -1568,12 +1613,6 @@ dependencies = [ "syn 2.0.48", ] -[[package]] -name = "foreign-types-shared" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "00b0228411908ca8685dba7fc2cdd70ec9990a6e753e89b6ac91a84c40fbaf4b" - [[package]] name = "foreign-types-shared" version = "0.3.1" @@ -1735,6 +1774,15 @@ dependencies = [ "stable_deref_trait", ] +[[package]] +name = "glam" +version = "0.20.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f43e957e744be03f5801a55472f593d43fabdebf25a4585db250f04d86b1675f" +dependencies = [ + "num-traits", +] + [[package]] name = "glob" version = "0.3.1" @@ -1947,19 +1995,6 @@ dependencies = [ "tokio-rustls", ] -[[package]] -name = "hyper-tls" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d6183ddfa99b85da61a140bea0efc93fdf56ceaa041b37d553518030827f9905" -dependencies = [ - "bytes", - "hyper", - "native-tls", - "tokio", - "tokio-native-tls", -] - [[package]] name = "iana-time-zone" version = "0.1.59" @@ -2340,7 +2375,7 @@ dependencies = [ "bitflags 1.3.2", "block", "core-graphics-types", - "foreign-types 0.5.0", + "foreign-types", "log", "objc", "paste", @@ -2367,6 +2402,12 @@ dependencies = [ "adler", ] +[[package]] +name = "mint" +version = "0.5.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e53debba6bda7a793e5f99b8dacf19e626084f525f7829104ba9898f367d85ff" + [[package]] name = "mio" version = "0.8.10" @@ -2384,24 +2425,6 @@ version = "0.8.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e5ce46fe64a9d73be07dcbe690a38ce1b293be448fd8ce1e6c1b8062c9f72c6a" -[[package]] -name = "native-tls" -version = "0.2.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "07226173c32f2926027b63cce4bcd8076c3552846cbe7925f3aaffeac0a3b92e" -dependencies = [ - "lazy_static", - "libc", - "log", - "openssl", - "openssl-probe", - "openssl-sys", - "schannel", - "security-framework", - "security-framework-sys", - "tempfile", -] - [[package]] name = "ndarray" version = "0.15.6" @@ -2617,50 +2640,6 @@ dependencies = [ "syn 1.0.109", ] -[[package]] -name = "openssl" -version = "0.10.63" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "15c9d69dd87a29568d4d017cfe8ec518706046a05184e5aea92d0af890b803c8" -dependencies = [ - "bitflags 2.4.2", - "cfg-if", - "foreign-types 0.3.2", - "libc", - "once_cell", - "openssl-macros", - "openssl-sys", -] - -[[package]] -name = "openssl-macros" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.48", -] - -[[package]] -name = "openssl-probe" -version = "0.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ff011a302c396a5197692431fc1948019154afc178baf7d8e37367442a4601cf" - -[[package]] -name = "openssl-sys" -version = "0.9.99" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "22e1bf214306098e4832460f797824c05d25aacdf896f64a985fb0fd992454ae" -dependencies = [ - "cc", - "libc", - "pkg-config", - "vcpkg", -] - [[package]] name = "option-ext" version = "0.2.0" @@ -2816,12 +2795,6 @@ dependencies = [ "spki", ] -[[package]] -name = "pkg-config" -version = "0.3.29" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2900ede94e305130c13ddd391e0ab7cbaeb783945ae07a279c268cb05109c6cb" - [[package]] name = "powerfmt" version = "0.2.0" @@ -3186,12 +3159,10 @@ dependencies = [ "http-body", "hyper", "hyper-rustls", - "hyper-tls", "ipnet", "js-sys", "log", "mime", - "native-tls", "once_cell", "percent-encoding", "pin-project-lite", @@ -3202,7 +3173,6 @@ dependencies = [ "serde_urlencoded", "system-configuration", "tokio", - "tokio-native-tls", "tokio-rustls", "tower-service", "url", @@ -3367,6 +3337,7 @@ checksum = "404ed28b36b5e5a346267870e675ec634678193c111376334d0e00edcecefece" dependencies = [ "anyhow", "bytemuck", + "cust", "downloader", "hex", "metal", @@ -3398,6 +3369,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cc4890f3c85bfbefdc05f926778d92d1fa24b8d931a077170e07c3b6e14e4c29" dependencies = [ "anyhow", + "cust", "metal", "rand", "rayon", @@ -3429,6 +3401,16 @@ dependencies = [ "rand_core", ] +[[package]] +name = "risc0-sppark" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5be1d1ff7fe501c9f420654bc1ff7461909b85e7f8fb3698a8812c0a8a787306" +dependencies = [ + "cc", + "which", +] + [[package]] name = "risc0-sys" version = "0.20.0" @@ -3436,7 +3418,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "510fcce109b5d5df9458f2ce578504f97fc786b1330cc2130ea12824ff286ef7" dependencies = [ "cc", + "cust", "risc0-build-kernel", + "risc0-sppark", ] [[package]] @@ -3448,6 +3432,7 @@ dependencies = [ "anyhow", "blake2", "bytemuck", + "cust", "digest 0.10.7", "ff", "hex", @@ -3478,7 +3463,7 @@ dependencies = [ "ark-groth16", "ark-serialize 0.4.2", "bincode", - "bonsai-sdk 0.6.0", + "bonsai-sdk", "bytemuck", "bytes", "cfg-if", @@ -3758,15 +3743,6 @@ dependencies = [ "syn 1.0.109", ] -[[package]] -name = "schannel" -version = "0.1.23" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fbc91545643bcf3a0bbb6569265615222618bdf33ce4ffbbd13c4bbd4c093534" -dependencies = [ - "windows-sys 0.52.0", -] - [[package]] name = "scopeguard" version = "1.2.0" @@ -3815,29 +3791,6 @@ dependencies = [ "cc", ] -[[package]] -name = "security-framework" -version = "2.9.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "05b64fb303737d99b81884b2c63433e9ae28abebe5eb5045dcdd175dc2ecf4de" -dependencies = [ - "bitflags 1.3.2", - "core-foundation", - "core-foundation-sys", - "libc", - "security-framework-sys", -] - -[[package]] -name = "security-framework-sys" -version = "2.9.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e932934257d3b408ed8f30db49d85ea163bfe74961f017f405b025af298f0c7a" -dependencies = [ - "core-foundation-sys", - "libc", -] - [[package]] name = "semver" version = "0.11.0" @@ -4336,16 +4289,6 @@ dependencies = [ "syn 2.0.48", ] -[[package]] -name = "tokio-native-tls" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bbae76ab933c85776efabc971569dd6119c580d8f5d448769dec1764bf796ef2" -dependencies = [ - "native-tls", - "tokio", -] - [[package]] name = "tokio-rustls" version = "0.24.1" @@ -4641,10 +4584,16 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "830b7e5d4d90034032940e4ace0d9a9a057e7a45cd94e6c007832e39edb82f6d" [[package]] -name = "vcpkg" -version = "0.2.15" +name = "vek" +version = "0.15.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426" +checksum = "8085882662f9bc47fc8b0cdafa5e19df8f592f650c02b9083da8d45ac9eebd17" +dependencies = [ + "approx", + "num-integer", + "num-traits", + "rustc_version 0.4.0", +] [[package]] name = "version_check" @@ -5037,7 +4986,7 @@ dependencies = [ "anyhow", "assert_cmd", "bincode", - "bonsai-sdk 0.5.1", + "bonsai-sdk", "bytemuck", "clap", "env_logger", @@ -5051,6 +5000,7 @@ dependencies = [ "serde", "tempfile", "tokio", + "tracing", "zeth-guests", "zeth-lib", "zeth-primitives", @@ -5103,6 +5053,7 @@ dependencies = [ "ethers-core", "hex-literal", "k256", + "log", "revm-primitives", "rlp", "serde", diff --git a/Cargo.toml b/Cargo.toml index 3c7bcc79..83845e2c 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -17,7 +17,7 @@ lto = true opt-level = 3 [workspace.dependencies] -bonsai-sdk = "0.5" +bonsai-sdk = { version = "0.6.0", features = ["async"] } hashbrown = { version = "0.14.3", features = ["inline-more"] } risc0-build = { version = "0.20.0" } risc0-zkvm = { version = "0.20.0", default-features = false } diff --git a/README.md b/README.md index ef749e9b..4df78434 100644 --- a/README.md +++ b/README.md @@ -1,10 +1,8 @@ # zeth -**NEW: Zeth now supports Optimism blocks! Just pass in `--network=optimism`!** +Zeth is an open-source ZK block prover for Ethereum and Optimism built on the RISC Zero zkVM. -Zeth is an open-source ZK block prover for Ethereum built on the RISC Zero zkVM. - -Zeth makes it possible to *prove* that a given Ethereum block is valid +Zeth makes it possible to *prove* that a given block is valid (i.e., is the result of applying the given list of transactions to the parent block) *without* relying on the validator or sync committees. This is because Zeth does *all* the work needed to construct a new block *from within the zkVM*, including: @@ -18,6 +16,8 @@ This is because Zeth does *all* the work needed to construct a new block *from w After constructing the new block, Zeth calculates and outputs the block hash. By running this process within the zkVM, we obtain a ZK proof that the new block is valid. +For Optimism, our validity proof ensures that the block is correctly derived from the +available data posted to Ethereum. ## Status @@ -25,139 +25,322 @@ Zeth is experimental and may still contain bugs. ## Usage -### Building +### Prerequisites + +Zeth primarily requires the availability of Ethereum/Optimism RPC provider(s) data. +Two complementary types of providers are supported: + +* RPC provider. + This fetches data from a Web2 RPC provider, such as [Alchemy](https://www.alchemy.com/). + Specified using the `--eth-rpc-url=` and `--op-rpc-url=` parameters. +* Cached RPC provider. + This fetches RPC data from a local file when possible, and falls back to a Web2 RPC provider when necessary. + It amends the local file with results from the Web2 provider so that subsequent runs don't require additional Web2 RPC calls. + Specified using the `--cache[=]` parameter. + +### Installation + + +#### RISC Zero zkVM -- Install the `cargo risczero` tool and the `risc0` toolchain: +Install the `cargo risczero` tool and the `risc0` toolchain: ```console -$ cargo install cargo-risczero -$ cargo risczero install +cargo install cargo-risczero +cargo risczero install ``` -- Clone the repository and build with `cargo`: +#### zeth +Clone the repository and build with `cargo` using one of the following commands: + +* CPU Proving (slow): +```console +cargo build --release +``` + +- GPU Proving (apple/metal) ```console -$ cargo build --release +cargo build -F metal --release ``` -### Running +- GPU Proving (nvidia/cuda) +```console +cargo build -F cuda --release +``` +#### Execution: -Zeth currently has several modes of execution: +Run the built binary (instead of using `cargo run`) using: +```console +RUST_LOG=info ./target/release/zeth ``` -Usage: zeth [OPTIONS] --block-no= + +### CLI + +Zeth currently has four main modes of execution: + +```console +RUST_LOG=info ./target/release/zeth help +``` +```console +Usage: zeth + +Commands: + build Build blocks natively outside the zkVM + run Run the block creation process inside the executor + prove Provably create blocks inside the zkVM + verify Verify a block creation receipt + op-info Output debug information about an optimism block + help Print this message or the help of the given subcommand(s) Options: - -r, --rpc-url= - URL of the chain RPC node. - -c, --cache[=] - Use a local directory as a cache for RPC calls. - Accepts a custom directory. - [default: host/testdata] - -n, --network= - Network name (ethereum/optimism). - [default: ethereum] - -b, --block-no= - Block number to validate. - -l, --local-exec[=] - Runs the verification inside the zkvm executor locally. - Accepts a custom maximum segment cycle count as a power of 2. [default: 20] - -s, --submit-to-bonsai - Whether to submit the proving workload to Bonsai. - -v, --verify-bonsai-receipt-uuid= - Bonsai Session UUID to use for receipt verification. - -p, --profile - Whether to profile the zkVM execution. - -h, --help - Print help. - -V, --version - Print version. + -h, --help Print help + -V, --version Print version +``` +For every command, the `--network` parameter can be set to either `ethereum` or `optimism` for provable construction +of single blocks from either chain on its own. +To provably derive Optimism blocks using the data posted on the Ethereum chain, use `--network=optimism-derived`, +but `optimism-derived` is not supported by the `run` and `op-info` commands. + +#### build +*This command only natively builds blocks and does not generate any proofs.* +```console +RUST_LOG=info ./target/release/zeth build --help ``` -Zeth primarily requires an Ethereum RPC provider. -Two complementary types of providers are supported: +```console +Build blocks natively outside the zkVM -* RPC provider. - This fetches data from a Web2 RPC provider, such as [Alchemy](https://www.alchemy.com/). - Specified using the `--rpc-url=` parameter. -* Cached RPC provider. - This fetches RPC data from a local file when possible, and falls back to a Web2 RPC provider when necessary. - It amends the local file with results from the Web2 provider so that subsequent runs don't require additional Web2 RPC calls. - Specified using the `--cache[=CACHE_DIRECTORY]` parameter. +Usage: zeth build [OPTIONS] --block-number= + +Options: + -w, --network= Network name (ethereum/optimism/optimism-derived) [default: ethereum] + -e, --eth-rpc-url= URL of the Ethereum RPC node + -o, --op-rpc-url= URL of the Optimism RPC node + -c, --cache[=] Use a local directory as a cache for RPC calls. Accepts a custom directory. [default: cache_rpc] + -b, --block-number= Block number to begin from + -n, --block-count= Number of blocks to provably derive [default: 1] + -m, --composition[=] Compose separate block derivation proofs together. Accepts a custom number of blocks to process per derivation call. (optimism-derived network only) [default: 1] + -h, --help Print help +``` -**Quick test mode**. -This is the default. When run in this mode, Zeth does all the work needed to construct an Ethereum block and verifies the correctness of the result using the RPC provider. No proofs are generated. -You can omit the `rpc-url` parameter if you do not change the `block-no` parameters from the below examples as the data is already cached. -Ethereum: +With `--network=optimism-derived`, the derivation proof creation is done without proof composition by default, +requiring the derivation to be carried out inside a single zkVM execution. + +**Examples** +The `host/testdata` and `host/testdata/derivation` directories come preloaded with a few cache files that you can use +out of the box without the need to explicitly specify an RPC URL: ```console -$ RUST_LOG=info ./target/release/zeth \ - --rpc-url="https://eth-mainnet.g.alchemy.com/v2/YOUR_API_KEY" \ - --cache \ - --block-no=16424130 +RUST_LOG=info ./target/release/zeth build \ + --network=ethereum \ + --cache=host/testdata \ + --block-number=16424130 ``` -Optimism: ```console -$ RUST_LOG=info ./target/release/zeth \ - --network=optimism \ - --rpc-url="https://opt-mainnet.g.alchemy.com/v2/YOUR_API_KEY" \ - --cache \ - --block-no=107728767 +RUST_LOG=info ./target/release/zeth build \ + --network=optimism \ + --cache=host/testdata \ + --block-number=107728767 +``` +```console +RUST_LOG=info ./target/release/zeth build \ + --network=optimism-derived \ + --cache=host/testdata/derivation \ + --block-number=109279674 \ + --block-count=4 +``` +**Composition** The optimism derivation proof (`--network=optimism-derived`) can alternatively be created using proof composition by +setting the `--composition` parameter to the number of op blocks per rolled up proof. +In the following example, 2 derivation proofs of 2 sequential blocks each are composed to obtain the final derivation +proof for the 4 sequential blocks: +```console +RUST_LOG=info ./target/release/zeth build \ + --network=optimism-derived \ + --cache=host/testdata/derivation \ + --block-number=109279674 \ + --block-count=4 \ + --composition=2 +``` + +#### run +*This command only invokes the RISC-V emulator and does not generate any proofs.* +```console +RUST_LOG=info ./target/release/zeth run --help +``` +```console +Run the block creation process inside the executor + +Usage: zeth run [OPTIONS] --block-number= + +Options: + -w, --network= Network name (ethereum/optimism/optimism-derived) [default: ethereum] + -e, --eth-rpc-url= URL of the Ethereum RPC node + -o, --op-rpc-url= URL of the Optimism RPC node + -c, --cache[=] Use a local directory as a cache for RPC calls. Accepts a custom directory. [default: cache_rpc] + -b, --block-number= Block number to begin from + -n, --block-count= Number of blocks to provably derive [default: 1] + -x, --execution-po2= The maximum segment cycle count as a power of 2 [default: 20] + -p, --profile Whether to profile the zkVM execution + -h, --help Print help ``` **Local executor mode**. -To run in this mode, add the parameter `--local-exec[=SEGMENT_LIMIT]`. When run in this mode, Zeth does all the work needed to construct an Ethereum block from within the zkVM's non-proving emulator. Correctness of the result is checked using the RPC provider. This is useful for measuring the size of the computation (number of execution segments and cycles). No proofs are generated. +**Examples** +The below examples will invoke the executor, which will take a bit more time, and output the number of cycles required +for execution/proving inside the zkVM: +```console +RUST_LOG=info ./target/release/zeth run \ + --cache=host/testdata \ + --network=ethereum \ + --block-number=16424130 +``` +```console +RUST_LOG=info ./target/release/zeth run \ + --cache=host/testdata \ + --network=optimism \ + --block-number=107728767 +``` + +The `run` command does not support proof composition (required by `--network=optimism-derived`) because receipts are required for this process inside the +executor. +Alternatively, one can call the `prove` command in dev mode (`RISC0_DEV_MODE=true`) for the same functionality, as +demonstrated in the next section. + +#### prove +*This command generates a ZK proof, unless dev mode is enabled through the environment variable `RISC0_DEV_MODE=true`.* ```console -$ RUST_LOG=info ./target/release/zeth \ - --rpc-url="https://eth-mainnet.g.alchemy.com/v2/YOUR_API_KEY" \ - --cache \ - --block-no=16424130 \ - --local-exec +RUST_LOG=info ./target/release/zeth prove --help ``` +```console +Provably create blocks inside the zkVM + +Usage: zeth prove [OPTIONS] --block-number= -**Bonsai proving mode**. -*This mode generates a ZK proof.* +Options: + -w, --network= Network name (ethereum/optimism/optimism-derived) [default: ethereum] + -e, --eth-rpc-url= URL of the Ethereum RPC node + -o, --op-rpc-url= URL of the Optimism RPC node + -c, --cache[=] Use a local directory as a cache for RPC calls. Accepts a custom directory. [default: cache_rpc] + -b, --block-number= Block number to begin from + -n, --block-count= Number of blocks to provably derive [default: 1] + -x, --execution-po2= The maximum segment cycle count as a power of 2 [default: 20] + -p, --profile Whether to profile the zkVM execution + -m, --composition[=] Compose separate block derivation proofs together. Accepts a custom number of blocks to process per derivation call. (optimism-derived network only) [default: 1] + -s, --submit-to-bonsai Prove remotely using Bonsai + -h, --help Print help +``` + +**Proving on Bonsai**. To run in this mode, add the parameter `--submit-to-bonsai`. When run in this mode, Zeth submits a proving task to the [Bonsai proving service](https://www.bonsai.xyz/), -which then constructs an Ethereum block entirely from within the zkVM. -This mode checks the correctness of the result using the RPC provider. +which then constructs the blocks entirely from within the zkVM. +This mode checks the correctness of the result on your machine using the RPC provider(s). It also outputs the Bonsai session UUID, and polls Bonsai until the proof is complete. -To use this feature, first set the `BONSAI_API_URL` and `BONSAI_API_KEY` environment variables, -then follow the instructions below for submitting jobs to Bonsai and verifying the proofs. +To use this feature, first set the `BONSAI_API_URL` and `BONSAI_API_KEY` environment variables before executing zeth +to submit jobs to Bonsai. Need a Bonsai API key? [Sign up today](https://bonsai.xyz/apply). +**Examples** +The below examples will invoke the prover, which will take a potentially significant time to generate a ZK proof +locally: +```console +RUST_LOG=info ./target/release/zeth prove \ + --cache=host/testdata \ + --network=ethereum \ + --block-number=16424130 +``` ```console -$ RUST_LOG=info ./target/release/zeth \ - --rpc-url="https://eth-mainnet.g.alchemy.com/v2/YOUR_API_KEY" \ - --cache \ - --block-no=16424130 \ - --submit-to-bonsai +RUST_LOG=info ./target/release/zeth prove \ + --cache=host/testdata \ + --network=optimism \ + --block-number=107728767 ``` +```console +RUST_LOG=info ./target/release/zeth prove \ + --network=optimism-derived \ + --cache=host/testdata/derivation \ + --block-number=109279674 \ + --block-count=4 +``` +**Composition** Alternatively, we can run composition in dev mode, which should only as much time as required by the +executor, using the following command: +```console +RISC0_DEV_MODE=true RUST_LOG=info ./target/release/zeth prove \ + --network=optimism-derived \ + --cache=host/testdata/derivation \ + --block-number=109279674 \ + --block-count=4 \ + --composition=2 +``` +***NOTE*** Proving in dev mode only generates dummy receipts that do not attest to the validity of the computation and +are not verifiable outside of dev mode! -**Bonsai verify mode**. -*This mode verifies the ZK proof.* -To run in this mode, add the parameter `--verify-bonsai-receipt-uuid=BONSAI_SESSION_UUID`, -where `BONSAI_SESSION_UUID` is the session UUID returned by the `--submit-to-bonsai` mode. -This mode checks the correctness of the result using the RPC provider. +#### verify +*This command verifies a ZK proof generated on Bonsai.* +``` +RUST_LOG=info ./target/release/zeth verify --help +``` +``` +Verify a block creation receipt -```console -$ RUST_LOG=info ./target/release/zeth \ - --rpc-url="https://eth-mainnet.g.alchemy.com/v2/YOUR_API_KEY" \ - --cache \ - --block-no=16424130 \ - --verify-bonsai-receipt-uuid=BONSAI_SESSION_UUID +Usage: zeth verify [OPTIONS] --block-number= --bonsai-receipt-uuid= + +Options: + -w, --network= + Network name (ethereum/optimism/optimism-derived) [default: ethereum] + -e, --eth-rpc-url= + URL of the Ethereum RPC node + -o, --op-rpc-url= + URL of the Optimism RPC node + -c, --cache[=] + Use a local directory as a cache for RPC calls. Accepts a custom directory. [default: cache_rpc] + -b, --block-number= + Block number to begin from + -n, --block-count= + Number of blocks to provably derive [default: 1] + -b, --bonsai-receipt-uuid= + Verify the receipt from the provided Bonsai Session UUID + -h, --help + Print help +``` + +This command first natively builds the specified block(s), and then validates the correctness of the receipt generated +on Bonsai specified by the `--bonsai-receipt-uuid=BONSAI_SESSION_UUID` parameter, where `BONSAI_SESSION_UUID` is the +session UUID returned when proving using `--submit-to-bonsai`. + +#### op-info +``` +RUST_LOG=info ./target/release/zeth op-info --help +``` ``` +Output debug information about an optimism block + +Usage: zeth op-info [OPTIONS] --block-number= + +Options: + -w, --network= Network name (ethereum/optimism/optimism-derived) [default: ethereum] + -e, --eth-rpc-url= URL of the Ethereum RPC node + -o, --op-rpc-url= URL of the Optimism RPC node + -c, --cache[=] Use a local directory as a cache for RPC calls. Accepts a custom directory. [default: cache_rpc] + -b, --block-number= Block number to begin from + -n, --block-count= Number of blocks to provably derive [default: 1] + -h, --help Print help +``` +This command only outputs debug information for development use. + ## Additional resources diff --git a/guests/Cargo.toml b/guests/Cargo.toml index 999c3534..11ef6ee9 100644 --- a/guests/Cargo.toml +++ b/guests/Cargo.toml @@ -8,3 +8,9 @@ risc0-build = { workspace = true } [package.metadata.risc0] methods = ["eth-block", "op-block", "op-derive", "op-compose"] + +[features] +debug-guest-build = [] +default = [ + "risc0-build/docker" +] \ No newline at end of file diff --git a/guests/build.rs b/guests/build.rs index 42c6db8e..29578ad4 100644 --- a/guests/build.rs +++ b/guests/build.rs @@ -12,6 +12,29 @@ // See the License for the specific language governing permissions and // limitations under the License. +#[cfg(not(feature = "debug-guest-build"))] fn main() { - risc0_build::embed_methods(); + let cwd = std::env::current_dir().unwrap(); + let root_dir = cwd.parent().map(|d| d.to_path_buf()); + let build_opts = std::collections::HashMap::from_iter( + ["eth-block", "op-block", "op-derive", "op-compose"] + .into_iter() + .map(|guest_pkg| { + ( + guest_pkg, + risc0_build::GuestOptions { + features: vec![], + use_docker: Some(risc0_build::DockerOptions { + root_dir: root_dir.clone(), + }), + }, + ) + }), + ); + risc0_build::embed_methods_with_options(build_opts); +} + +#[cfg(feature = "debug-guest-build")] +fn main() { + risc0_build::embed_methods() } diff --git a/guests/eth-block/Cargo.lock b/guests/eth-block/Cargo.lock index 12925f70..eddbab28 100644 --- a/guests/eth-block/Cargo.lock +++ b/guests/eth-block/Cargo.lock @@ -95,9 +95,9 @@ dependencies = [ [[package]] name = "alloy-sol-macro" -version = "0.6.0" +version = "0.6.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5531f0a16e36c547e68c73a1638bea1f26237ee8ae785527190c4e4f9fecd2c5" +checksum = "8b0b5ab0cb07c21adf9d72e988b34e8200ce648c2bba8d009183bb1c50fb1216" dependencies = [ "const-hex", "dunce", @@ -113,9 +113,9 @@ dependencies = [ [[package]] name = "alloy-sol-types" -version = "0.6.0" +version = "0.6.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "783eb720b73d38f9d4c1fb9890e4db6bc8c708f7aa77d3071a19e06091ecd1c9" +checksum = "6c08f62ded7ce03513bfb60ef5cad4fff5d4f67eac6feb4df80426b7b9ffb06e" dependencies = [ "alloy-primitives", "alloy-sol-macro", @@ -592,9 +592,9 @@ checksum = "c3ac9f8b63eca6fd385229b3675f6cc0dc5c8a5c8a54a59d4f52ffd670d87b0c" [[package]] name = "bytemuck" -version = "1.14.0" +version = "1.14.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "374d28ec25809ee0e23827c2ab573d729e293f281dfe393500e7ad618baa61c6" +checksum = "ed2490600f404f2b94c167e31d3ed1d5f3c225a0f3b80230053b3e0b7b962bd9" dependencies = [ "bytemuck_derive", ] @@ -666,9 +666,9 @@ checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" [[package]] name = "chrono" -version = "0.4.32" +version = "0.4.33" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "41daef31d7a747c5c847246f36de49ced6f7403b4cdabc807a97b5cc184cda7a" +checksum = "9f13690e35a5e4ace198e7beea2895d29f3a9cc55015fcebe6336bd2010af9eb" dependencies = [ "android-tzdata", "iana-time-zone", @@ -2107,18 +2107,18 @@ dependencies = [ [[package]] name = "pin-project" -version = "1.1.3" +version = "1.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fda4ed1c6c173e3fc7a83629421152e01d7b1f9b7f65fb301e490e8cfc656422" +checksum = "0302c4a0442c456bd56f841aee5c3bfd17967563f6fadc9ceb9f9c23cf3807e0" dependencies = [ "pin-project-internal", ] [[package]] name = "pin-project-internal" -version = "1.1.3" +version = "1.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4359fd9c9171ec6e8c62926d6faaf553a8dc3f64e1507e76da7911b4f6a04405" +checksum = "266c042b60c9c76b8d53061e52b2e0d1116abc57cefc8c5cd671619a56ac3690" dependencies = [ "proc-macro2", "quote", @@ -2347,9 +2347,9 @@ dependencies = [ [[package]] name = "regex-automata" -version = "0.4.4" +version = "0.4.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3b7fa1134405e2ec9353fd416b17f8dacd46c473d7d3fd1cf202706a14eb792a" +checksum = "5bb987efffd3c6d0d8f5f89510bb458559eab11e4f869acb20bf845e016259cd" dependencies = [ "aho-corasick", "memchr", @@ -2941,9 +2941,9 @@ dependencies = [ [[package]] name = "serde_with" -version = "3.5.0" +version = "3.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f58c3a1b3e418f61c25b2aeb43fc6c95eaa252b8cecdda67f401943e9e08d33f" +checksum = "f5c9fdb6b00a489875b22efd4b78fe2b363b72265cc5f6eb2e2b9ee270e6140c" dependencies = [ "base64 0.21.7", "chrono", @@ -2958,9 +2958,9 @@ dependencies = [ [[package]] name = "serde_with_macros" -version = "3.5.0" +version = "3.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d2068b437a31fc68f25dd7edc296b078f04b45145c199d8eed9866e45f1ff274" +checksum = "dbff351eb4b33600a2e138dfa0b10b65a238ea8ff8fb2387c422c5022a3e8298" dependencies = [ "darling", "proc-macro2", @@ -3170,9 +3170,9 @@ dependencies = [ [[package]] name = "syn-solidity" -version = "0.6.0" +version = "0.6.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3cfbd642e1748fd9e47951973abfa78f825b11fbf68af9e6b9db4c983a770166" +checksum = "63bef2e2c735acbc06874eca3a8506f02a3c4700e6e748afc92cc2e4220e8a03" dependencies = [ "paste", "proc-macro2", @@ -3998,6 +3998,7 @@ dependencies = [ "bytes", "ethers-core", "k256", + "log", "revm-primitives", "rlp", "serde", diff --git a/guests/op-block/Cargo.lock b/guests/op-block/Cargo.lock index bac32ca2..6d8caa62 100644 --- a/guests/op-block/Cargo.lock +++ b/guests/op-block/Cargo.lock @@ -3997,6 +3997,7 @@ dependencies = [ "bytes", "ethers-core", "k256", + "log", "revm-primitives", "rlp", "serde", diff --git a/guests/op-compose/Cargo.lock b/guests/op-compose/Cargo.lock index 8bb2ea5b..34a51f58 100644 --- a/guests/op-compose/Cargo.lock +++ b/guests/op-compose/Cargo.lock @@ -3998,6 +3998,7 @@ dependencies = [ "bytes", "ethers-core", "k256", + "log", "revm-primitives", "rlp", "serde", diff --git a/guests/op-derive/Cargo.lock b/guests/op-derive/Cargo.lock index 8a394224..12a68f68 100644 --- a/guests/op-derive/Cargo.lock +++ b/guests/op-derive/Cargo.lock @@ -3998,6 +3998,7 @@ dependencies = [ "bytes", "ethers-core", "k256", + "log", "revm-primitives", "rlp", "serde", diff --git a/host/Cargo.toml b/host/Cargo.toml index 7c9d2f4f..cb83167a 100644 --- a/host/Cargo.toml +++ b/host/Cargo.toml @@ -21,6 +21,7 @@ ruint = { version = "1.10", default-features = false } serde = "1.0" tempfile = "3.6" tokio = { version = "1.23", features = ["full"] } +tracing = { version = "0.1", features = ["log"] } zeth-guests = { path = "../guests" } zeth-lib = { path = "../lib" } zeth-primitives = { path = "../primitives" } @@ -32,4 +33,10 @@ rstest = "0.18" [features] metal = [ "risc0-zkvm/metal" -] \ No newline at end of file +] +cuda = [ + "risc0-zkvm/cuda" +] +disable-dev-mode = [ + "risc0-zkvm/disable-dev-mode" +] diff --git a/host/src/cli.rs b/host/src/cli.rs index fa320978..3b2669e7 100644 --- a/host/src/cli.rs +++ b/host/src/cli.rs @@ -51,6 +51,13 @@ impl Cli { _ => None, } } + + pub fn execution_label(&self) -> String { + let sys_time = std::time::SystemTime::now() + .duration_since(std::time::UNIX_EPOCH) + .unwrap(); + format!("{}_{}", sys_time.as_secs(), self.to_string()) + } } impl ToString for Cli { @@ -119,9 +126,9 @@ pub struct CoreArgs { /// URL of the Optimism RPC node. pub op_rpc_url: Option, - #[clap(short, long, require_equals = true, num_args = 0..=1, default_missing_value = "rpc_cache")] + #[clap(short, long, require_equals = true, num_args = 0..=1, default_missing_value = "cache_rpc")] /// Use a local directory as a cache for RPC calls. Accepts a custom directory. - /// [default: host/testdata] + /// [default: cache_rpc] pub cache: Option, #[clap(short, long, require_equals = true)] @@ -135,9 +142,9 @@ pub struct CoreArgs { #[derive(clap::Args, Debug, Clone)] pub struct ExecutorArgs { - #[clap(short, long, require_equals = true, default_value_t = 20)] + #[clap(short = 'x', long, require_equals = true, default_value_t = 20)] /// The maximum segment cycle count as a power of 2. - pub local_exec: u32, + pub execution_po2: u32, #[clap(short, long, default_value_t = false)] /// Whether to profile the zkVM execution @@ -188,5 +195,5 @@ pub struct VerifyArgs { pub core_args: CoreArgs, #[clap(short, long, require_equals = true)] /// Verify the receipt from the provided Bonsai Session UUID. - pub receipt_bonsai_uuid: Option, + pub bonsai_receipt_uuid: String, } diff --git a/host/src/lib.rs b/host/src/lib.rs index f95bae93..327d03f1 100644 --- a/host/src/lib.rs +++ b/host/src/lib.rs @@ -12,20 +12,46 @@ // See the License for the specific language governing permissions and // limitations under the License. -use std::fs; +use std::{fs, path::Path}; -use risc0_zkvm::Receipt; +use risc0_zkvm::{is_dev_mode, Receipt}; +use tracing::debug; pub mod cli; pub mod operations; -pub fn save_receipt(file_reference: &String, receipt: &Receipt, index: Option<&mut usize>) { - let receipt_serialized = bincode::serialize(receipt).expect("Failed to serialize receipt!"); - let path = if let Some(number) = index { - *number += 1; - format!("receipt_{}-{}.zkp", file_reference, *number - 1) - } else { - format!("receipt_{}.zkp", file_reference) +pub fn load_receipt(file_name: &String) -> anyhow::Result> { + if is_dev_mode() { + // Nothing to load + return Ok(None); + } + + let receipt_serialized = match fs::read(zkp_cache_path(file_name)) { + Ok(receipt_serialized) => receipt_serialized, + Err(err) => { + debug!("Could not load cached receipt with label: {}", &file_name); + debug!("{:?}", err); + return Ok(None); + } }; - fs::write(path, receipt_serialized).expect("Failed to save receipt output file."); + + Ok(Some(bincode::deserialize(&receipt_serialized)?)) +} + +pub fn save_receipt(receipt_label: &String, receipt_data: &(String, Receipt)) { + if !is_dev_mode() { + fs::write( + zkp_cache_path(receipt_label), + bincode::serialize(receipt_data).expect("Failed to serialize receipt!"), + ) + .expect("Failed to save receipt output file."); + } +} + +fn zkp_cache_path(receipt_label: &String) -> String { + Path::new("cache_zkp") + .join(format!("{}.zkp", receipt_label)) + .to_str() + .unwrap() + .to_string() } diff --git a/host/src/main.rs b/host/src/main.rs index e8a1a2ed..319f0499 100644 --- a/host/src/main.rs +++ b/host/src/main.rs @@ -16,9 +16,10 @@ extern crate core; use anyhow::Result; use clap::Parser; +use log::info; use zeth::{ cli::Cli, - operations::{chains, info, rollups}, + operations::{chains, info::op_info, rollups}, }; use zeth_guests::*; use zeth_lib::{ @@ -29,27 +30,39 @@ use zeth_lib::{ #[tokio::main] async fn main() -> Result<()> { env_logger::init(); + info!("Using the following image ids:"); + info!( + "eth-block: {}", + hex::encode(bytemuck::cast_slice(Ð_BLOCK_ID)) + ); + info!( + "op-block: {}", + hex::encode(bytemuck::cast_slice(&OP_BLOCK_ID)) + ); + info!( + "op-derive: {}", + hex::encode(bytemuck::cast_slice(&OP_DERIVE_ID)) + ); + info!( + "op-compose: {}", + hex::encode(bytemuck::cast_slice(&OP_COMPOSE_ID)) + ); let cli = Cli::parse(); // Run simple debug info command if let Cli::OpInfo(..) = &cli { - return info::op_info(cli).await; + return op_info(cli).await; } // Execute other commands let core_args = cli.core_args(); - let sys_time = std::time::SystemTime::now() - .duration_since(std::time::UNIX_EPOCH) - .unwrap(); - let file_reference = format!("{}_{}", sys_time.as_secs(), cli.to_string()); match core_args.network { Network::Ethereum => { let rpc_url = core_args.eth_rpc_url.clone(); chains::build_chain_blocks::( cli, - &file_reference, rpc_url, ETH_MAINNET_CHAIN_SPEC.clone(), ETH_BLOCK_ELF, @@ -60,7 +73,6 @@ async fn main() -> Result<()> { let rpc_url = core_args.op_rpc_url.clone(); chains::build_chain_blocks::( cli, - &file_reference, rpc_url, OP_MAINNET_CHAIN_SPEC.clone(), OP_BLOCK_ELF, @@ -69,9 +81,9 @@ async fn main() -> Result<()> { } Network::OptimismDerived => { if let Some(composition_size) = cli.composition() { - rollups::compose_derived_rollup_blocks(cli, composition_size, &file_reference).await + rollups::compose_derived_rollup_blocks(cli, composition_size).await } else { - rollups::derive_rollup_blocks(cli, &file_reference).await + rollups::derive_rollup_blocks(cli).await } } } diff --git a/host/src/operations/chains.rs b/host/src/operations/chains.rs index ef5f38e0..18e737dd 100644 --- a/host/src/operations/chains.rs +++ b/host/src/operations/chains.rs @@ -17,6 +17,7 @@ use std::fmt::Debug; use anyhow::Context; use ethers_core::types::Transaction as EthersTransaction; use log::{info, warn}; +use risc0_zkvm::compute_image_id; use serde::{Deserialize, Serialize}; use zeth_lib::{ builder::BlockBuilderStrategy, @@ -28,12 +29,11 @@ use zeth_lib::{ use crate::{ cli::Cli, - operations::{execute, maybe_prove}, + operations::{execute, maybe_prove, verify_bonsai_receipt}, }; pub async fn build_chain_blocks( cli: Cli, - file_reference: &String, rpc_url: Option, chain_spec: ChainSpec, guest_elf: &[u8], @@ -92,11 +92,11 @@ where Cli::Run(run_args) => { execute( &input, - run_args.exec_args.local_exec, + run_args.exec_args.execution_po2, run_args.exec_args.profile, guest_elf, &compressed_output, - file_reference, + &cli.execution_label(), ); } Cli::Prove(..) => { @@ -105,118 +105,23 @@ where &input, guest_elf, &compressed_output, - vec![], - file_reference, - None, - ); + Default::default(), + ) + .await; } - Cli::Verify(..) => { - unimplemented!() + Cli::Verify(verify_args) => { + verify_bonsai_receipt( + compute_image_id(guest_elf)?, + &compressed_output, + verify_args.bonsai_receipt_uuid.clone(), + 4, + ) + .await?; } Cli::OpInfo(..) => { unreachable!() } } - // let mut bonsai_session_uuid = args.verify_receipt_bonsai_uuid; - - // Run in Bonsai (if requested) - // if bonsai_session_uuid.is_none() && args.submit_to_bonsai { - // info!("Creating Bonsai client"); - // let client = bonsai_sdk::Client::from_env(risc0_zkvm::VERSION) - // .expect("Could not create Bonsai client"); - // - // // create the memoryImg, upload it and return the imageId - // info!("Uploading memory image"); - // let img_id = { - // let program = Program::load_elf(guest_elf, risc0_zkvm::GUEST_MAX_MEM as u32) - // .expect("Could not load ELF"); - // let image = MemoryImage::new(&program, risc0_zkvm::PAGE_SIZE as u32) - // .expect("Could not create memory image"); - // let image_id = hex::encode(image.compute_id()); - // let image = bincode::serialize(&image).expect("Failed to serialize memory - // img"); - // - // client - // .upload_img(&image_id, image) - // .expect("Could not upload ELF"); - // image_id - // }; - // - // // Prepare input data and upload it. - // info!("Uploading inputs"); - // let input_data = to_vec(&input).unwrap(); - // let input_data = bytemuck::cast_slice(&input_data).to_vec(); - // let input_id = client - // .upload_input(input_data) - // .expect("Could not upload inputs"); - // - // // Start a session running the prover - // info!("Starting session"); - // let session = client - // .create_session(img_id, input_id) - // .expect("Could not create Bonsai session"); - // - // println!("Bonsai session UUID: {}", session.uuid); - // bonsai_session_uuid = Some(session.uuid) - // } - - // Verify receipt from Bonsai (if requested) - // if let Some(session_uuid) = bonsai_session_uuid { - // let client = bonsai_sdk::Client::from_env(risc0_zkvm::VERSION) - // .expect("Could not create Bonsai client"); - // let session = bonsai_sdk::SessionId { uuid: session_uuid }; - // - // loop { - // let res = session - // .status(&client) - // .expect("Could not fetch Bonsai status"); - // if res.status == "RUNNING" { - // println!( - // "Current status: {} - state: {} - continue polling...", - // res.status, - // res.state.unwrap_or_default() - // ); - // tokio::time::sleep(std::time::Duration::from_secs(15)).await; - // continue; - // } - // if res.status == "SUCCEEDED" { - // // Download the receipt, containing the output - // let receipt_url = res - // .receipt_url - // .expect("API error, missing receipt on completed session"); - // - // let receipt_buf = client - // .download(&receipt_url) - // .expect("Could not download receipt"); - // let receipt: Receipt = - // bincode::deserialize(&receipt_buf).expect("Could not deserialize - // receipt"); receipt - // .verify(guest_id) - // .expect("Receipt verification failed"); - // - // let expected_hash = preflight_data.header.hash(); - // let found_hash: BlockHash = receipt.journal.decode().unwrap(); - // - // if found_hash == expected_hash { - // info!("Block hash (from Bonsai): {}", found_hash); - // } else { - // error!( - // "Final block hash mismatch (from Bonsai) {} (expected {})", - // found_hash, expected_hash, - // ); - // } - // } else { - // panic!( - // "Workflow exited: {} - | err: {}", - // res.status, - // res.error_msg.unwrap_or_default() - // ); - // } - // - // break; - // } - // } - Ok(()) } diff --git a/host/src/operations/mod.rs b/host/src/operations/mod.rs index 96804455..9e1a69bd 100644 --- a/host/src/operations/mod.rs +++ b/host/src/operations/mod.rs @@ -18,67 +18,222 @@ pub mod rollups; use std::fmt::Debug; -use log::{error, info}; +use log::{debug, error, info, warn}; use risc0_zkvm::{ - default_prover, serde::to_vec, Assumption, ExecutorEnv, ExecutorImpl, FileSegmentRef, Receipt, - Session, + compute_image_id, default_prover, serde::to_vec, sha::Digest, Assumption, ExecutorEnv, + ExecutorImpl, FileSegmentRef, Receipt, Session, }; use serde::{de::DeserializeOwned, Serialize}; use tempfile::tempdir; +use zeth_primitives::keccak::keccak; -use crate::{cli::Cli, save_receipt}; +use crate::{cli::Cli, load_receipt, save_receipt}; -pub fn maybe_prove( - cli: &Cli, - input: &I, - elf: &[u8], +pub async fn verify_bonsai_receipt( + image_id: Digest, expected_output: &O, - assumptions: Vec, - file_reference: &String, - receipt_index: Option<&mut usize>, -) -> Option { - if let Cli::Prove(prove_args) = cli { - if prove_args.submit_to_bonsai { - unimplemented!() + uuid: String, + max_retries: usize, +) -> anyhow::Result<(String, Receipt)> { + info!("Tracking receipt uuid: {}", uuid); + let session = bonsai_sdk::alpha::SessionId { uuid }; + + loop { + let mut res = None; + for attempt in 1..=max_retries { + let client = bonsai_sdk::alpha_async::get_client_from_env(risc0_zkvm::VERSION).await?; + + match session.status(&client) { + Ok(response) => { + res = Some(response); + break; + } + Err(err) => { + if attempt == max_retries { + anyhow::bail!(err); + } + warn!( + "Attempt {}/{} for session status request: {:?}", + attempt, max_retries, err + ); + std::thread::sleep(std::time::Duration::from_secs(15)); + continue; + } + } } - // run prover - let receipt = prove( - prove_args.exec_args.local_exec, - to_vec(input).expect("Could not serialize composition prep input!"), - elf, - assumptions, - prove_args.exec_args.profile, - file_reference, - ); - // verify output - let output_guest: O = receipt.journal.decode().unwrap(); - if expected_output == &output_guest { - info!("Executor succeeded"); + + let res = res.unwrap(); + + if res.status == "RUNNING" { + info!( + "Current status: {} - state: {} - continue polling...", + res.status, + res.state.unwrap_or_default() + ); + std::thread::sleep(std::time::Duration::from_secs(15)); + } else if res.status == "SUCCEEDED" { + // Download the receipt, containing the output + let receipt_url = res + .receipt_url + .expect("API error, missing receipt on completed session"); + let client = bonsai_sdk::alpha_async::get_client_from_env(risc0_zkvm::VERSION).await?; + let receipt_buf = client.download(&receipt_url)?; + let receipt: Receipt = bincode::deserialize(&receipt_buf)?; + receipt + .verify(image_id) + .expect("Receipt verification failed"); + // verify output + let receipt_output: O = receipt.journal.decode().unwrap(); + if expected_output == &receipt_output { + info!("Receipt validated!"); + } else { + error!( + "Output mismatch! Receipt: {:?}, expected: {:?}", + receipt_output, expected_output, + ); + } + return Ok((session.uuid, receipt)); } else { - error!( - "Output mismatch! Executor: {:?}, expected: {:?}", - output_guest, expected_output, + panic!( + "Workflow exited: {} - | err: {}", + res.status, + res.error_msg.unwrap_or_default() ); } - // save receipt - save_receipt(file_reference, &receipt, receipt_index); - // return result - Some(receipt) + } +} + +pub async fn maybe_prove( + cli: &Cli, + input: &I, + elf: &[u8], + expected_output: &O, + assumptions: (Vec, Vec), +) -> Option<(String, Receipt)> { + let Cli::Prove(prove_args) = cli else { + return None; + }; + + let (assumption_instances, assumption_uuids) = assumptions; + let encoded_input = to_vec(input).expect("Could not serialize proving input!"); + + let encoded_output = + to_vec(expected_output).expect("Could not serialize expected proving output!"); + let computed_image_id = compute_image_id(elf).expect("Failed to compute elf image id!"); + + let receipt_label = format!( + "{}-{}", + hex::encode(computed_image_id), + hex::encode(keccak(bytemuck::cast_slice(&encoded_output))) + ); + + // get receipt + let (mut receipt_uuid, receipt, cached) = + if let Ok(Some(cached_data)) = load_receipt(&receipt_label) { + info!("Loaded locally cached receipt"); + (cached_data.0, cached_data.1, true) + } else if prove_args.submit_to_bonsai { + // query bonsai service until it works + loop { + if let Ok(remote_proof) = prove_bonsai( + encoded_input.clone(), + elf, + expected_output, + assumption_uuids.clone(), + ) + .await + { + break (remote_proof.0, remote_proof.1, false); + } + } + } else { + // run prover + ( + Default::default(), + prove_locally( + prove_args.exec_args.execution_po2, + encoded_input, + elf, + assumption_instances, + prove_args.exec_args.profile, + &cli.execution_label(), + ), + false, + ) + }; + + // verify output + let output_guest: O = receipt.journal.decode().unwrap(); + if expected_output == &output_guest { + info!("Prover succeeded"); } else { - None + error!( + "Output mismatch! Prover: {:?}, expected: {:?}", + output_guest, expected_output, + ); } + + // upload receipt to bonsai + if prove_args.submit_to_bonsai && receipt_uuid.is_empty() { + info!("Uploading cached receipt without UUID to Bonsai."); + receipt_uuid = upload_receipt(&receipt) + .await + .expect("Failed to upload cached receipt to Bonsai"); + } + + let result = (receipt_uuid, receipt); + + // save receipt + if !cached { + save_receipt(&receipt_label, &result); + } + + // return result + Some(result) +} + +pub async fn upload_receipt(receipt: &Receipt) -> anyhow::Result { + let client = bonsai_sdk::alpha_async::get_client_from_env(risc0_zkvm::VERSION).await?; + Ok(client.upload_receipt(bincode::serialize(receipt)?)?) +} + +pub async fn prove_bonsai( + encoded_input: Vec, + elf: &[u8], + expected_output: &O, + assumption_uuids: Vec, +) -> anyhow::Result<(String, Receipt)> { + info!("Proving on Bonsai"); + // Compute the image_id, then upload the ELF with the image_id as its key. + let image_id = risc0_zkvm::compute_image_id(elf)?; + let encoded_image_id = hex::encode(image_id); + // Prepare input data + let input_data = bytemuck::cast_slice(&encoded_input).to_vec(); + + let client = bonsai_sdk::alpha_async::get_client_from_env(risc0_zkvm::VERSION).await?; + client.upload_img(&encoded_image_id, elf.to_vec())?; + // upload input + let input_id = client.upload_input(input_data.clone())?; + + let session = client.create_session( + encoded_image_id.clone(), + input_id.clone(), + assumption_uuids.clone(), + )?; + + verify_bonsai_receipt(image_id, expected_output, session.uuid.clone(), 8).await } -pub fn prove( +pub fn prove_locally( segment_limit_po2: u32, encoded_input: Vec, elf: &[u8], assumptions: Vec, profile: bool, - file_reference: &String, + profile_reference: &String, ) -> Receipt { - info!("Proving with segment_limit_po2 = {:?}", segment_limit_po2); - info!( + debug!("Proving with segment_limit_po2 = {:?}", segment_limit_po2); + debug!( "Input size: {} words ( {} MB )", encoded_input.len(), encoded_input.len() * 4 / 1_000_000 @@ -94,7 +249,7 @@ pub fn prove( if profile { info!("Profiling enabled."); - env_builder.enable_profiler(format!("profile_{}.pb", file_reference)); + env_builder.enable_profiler(format!("profile_{}.pb", profile_reference)); } for assumption in assumptions { @@ -111,15 +266,15 @@ pub fn execute( profile: bool, elf: &[u8], expected_output: &O, - file_reference: &String, + profile_reference: &String, ) -> Session { - info!( + debug!( "Running in executor with segment_limit_po2 = {:?}", segment_limit_po2 ); let input = to_vec(input).expect("Could not serialize input!"); - info!( + debug!( "Input size: {} words ( {} MB )", input.len(), input.len() * 4 / 1_000_000 @@ -136,7 +291,7 @@ pub fn execute( if profile { info!("Profiling enabled."); - builder.enable_profiler(format!("profile_{}.pb", file_reference)); + builder.enable_profiler(format!("profile_{}.pb", profile_reference)); } let env = builder.build().unwrap(); diff --git a/host/src/operations/rollups.rs b/host/src/operations/rollups.rs index 4c181906..95784203 100644 --- a/host/src/operations/rollups.rs +++ b/host/src/operations/rollups.rs @@ -12,14 +12,10 @@ // See the License for the specific language governing permissions and // limitations under the License. -use std::{ - borrow::BorrowMut, - collections::VecDeque, - sync::{Arc, Mutex}, -}; +use std::collections::VecDeque; use anyhow::Context; -use log::info; +use log::{info, trace}; use risc0_zkvm::Assumption; use zeth_guests::*; use zeth_lib::{ @@ -41,9 +37,12 @@ use zeth_primitives::{ transactions::optimism::OptimismTxEssence, }; -use crate::{cli::Cli, operations::maybe_prove}; +use crate::{ + cli::Cli, + operations::{maybe_prove, verify_bonsai_receipt}, +}; -pub async fn derive_rollup_blocks(cli: Cli, file_reference: &String) -> anyhow::Result<()> { +pub async fn derive_rollup_blocks(cli: Cli) -> anyhow::Result<()> { info!("Fetching data ..."); let core_args = cli.core_args().clone(); let op_builder_provider_factory = ProviderFactory::new( @@ -51,7 +50,6 @@ pub async fn derive_rollup_blocks(cli: Cli, file_reference: &String) -> anyhow:: Network::Optimism, core_args.op_rpc_url.clone(), ); - let receipt_index = Arc::new(Mutex::new(0usize)); info!("Running preflight"); let derive_input = DeriveInput { @@ -71,7 +69,6 @@ pub async fn derive_rollup_blocks(cli: Cli, file_reference: &String) -> anyhow:: DeriveMachine::new(&OPTIMISM_CHAIN_SPEC, derive_input, Some(factory_clone)) .expect("Could not create derive machine"); let mut op_block_inputs = vec![]; - let derive_output = derive_machine .derive(Some(&mut op_block_inputs)) .expect("could not derive"); @@ -79,8 +76,8 @@ pub async fn derive_rollup_blocks(cli: Cli, file_reference: &String) -> anyhow:: }) .await?; - let (assumptions, op_block_outputs) = - build_op_blocks(&cli, file_reference, receipt_index.clone(), op_block_inputs); + let (assumptions, bonsai_receipt_uuids, op_block_outputs) = + build_op_blocks(&cli, op_block_inputs).await; let derive_input_mem = DeriveInput { db: derive_machine.derive_input.db.get_mem_db(), @@ -120,129 +117,41 @@ pub async fn derive_rollup_blocks(cli: Cli, file_reference: &String) -> anyhow:: println!("Derived: {} {}", derived_block.number, derived_block.hash); } - maybe_prove( - &cli, - &derive_input_mem, - OP_DERIVE_ELF, - &derive_output, - assumptions, - file_reference, - Some(receipt_index.lock().unwrap().borrow_mut()), - ); + match &cli { + Cli::Build(..) => {} + Cli::Run(..) => {} + Cli::Prove(..) => { + maybe_prove( + &cli, + &derive_input_mem, + OP_DERIVE_ELF, + &derive_output, + (assumptions, bonsai_receipt_uuids), + ) + .await; + } + Cli::Verify(verify_args) => { + verify_bonsai_receipt( + OP_DERIVE_ID.into(), + &derive_output, + verify_args.bonsai_receipt_uuid.clone(), + 4, + ) + .await?; + } + Cli::OpInfo(..) => { + unreachable!() + } + } - // let mut bonsai_session_uuid = args.verify_receipt_bonsai_uuid; - - // Run in Bonsai (if requested) - // if bonsai_session_uuid.is_none() && args.submit_to_bonsai { - // info!("Creating Bonsai client"); - // let client = bonsai_sdk::Client::from_env(risc0_zkvm::VERSION) - // .expect("Could not create Bonsai client"); - // - // // create the memoryImg, upload it and return the imageId - // info!("Uploading memory image"); - // let img_id = { - // let program = Program::load_elf(OP_DERIVE_ELF, risc0_zkvm::GUEST_MAX_MEM as - // u32) .expect("Could not load ELF"); - // let image = MemoryImage::new(&program, risc0_zkvm::PAGE_SIZE as u32) - // .expect("Could not create memory image"); - // let image_id = hex::encode(image.compute_id()); - // let image = bincode::serialize(&image).expect("Failed to serialize memory - // img"); - // - // client - // .upload_img(&image_id, image) - // .expect("Could not upload ELF"); - // image_id - // }; - // - // // Prepare input data and upload it. - // info!("Uploading inputs"); - // let input_data = to_vec(&derive_input).unwrap(); - // let input_data = bytemuck::cast_slice(&input_data).to_vec(); - // let input_id = client - // .upload_input(input_data) - // .expect("Could not upload inputs"); - // - // // Start a session running the prover - // info!("Starting session"); - // let session = client - // .create_session(img_id, input_id) - // .expect("Could not create Bonsai session"); - // - // println!("Bonsai session UUID: {}", session.uuid); - // bonsai_session_uuid = Some(session.uuid) - // } - - // Verify receipt from Bonsai (if requested) - // if let Some(session_uuid) = bonsai_session_uuid { - // let client = bonsai_sdk::Client::from_env(risc0_zkvm::VERSION) - // .expect("Could not create Bonsai client"); - // let session = bonsai_sdk::SessionId { uuid: session_uuid }; - // - // loop { - // let res = session - // .status(&client) - // .expect("Could not fetch Bonsai status"); - // if res.status == "RUNNING" { - // println!( - // "Current status: {} - state: {} - continue polling...", - // res.status, - // res.state.unwrap_or_default() - // ); - // tokio::time::sleep(std::time::Duration::from_secs(15)).await; - // continue; - // } - // if res.status == "SUCCEEDED" { - // // Download the receipt, containing the output - // let receipt_url = res - // .receipt_url - // .expect("API error, missing receipt on completed session"); - // - // let receipt_buf = client - // .download(&receipt_url) - // .expect("Could not download receipt"); - // let receipt: Receipt = - // bincode::deserialize(&receipt_buf).expect("Could not deserialize - // receipt"); receipt - // .verify(OP_DERIVE_ID) - // .expect("Receipt verification failed"); - // - // let bonsai_output: DeriveOutput = receipt.journal.decode().unwrap(); - // - // if output == bonsai_output { - // println!("Bonsai succeeded"); - // } else { - // error!( - // "Output mismatch! Bonsai: {:?}, expected: {:?}", - // bonsai_output, output, - // ); - // } - // } else { - // panic!( - // "Workflow exited: {} - | err: {}", - // res.status, - // res.error_msg.unwrap_or_default() - // ); - // } - // - // break; - // } - // - // info!("Bonsai request completed"); - // } Ok(()) } -pub async fn compose_derived_rollup_blocks( - cli: Cli, - composition_size: u64, - file_reference: &String, -) -> anyhow::Result<()> { +pub async fn compose_derived_rollup_blocks(cli: Cli, composition_size: u64) -> anyhow::Result<()> { let core_args = cli.core_args().clone(); // OP Composition info!("Fetching data ..."); let mut lift_queue = Vec::new(); - let receipt_index = Arc::new(Mutex::new(0usize)); let mut complete_eth_chain: Vec
= Vec::new(); for op_block_index in (0..core_args.block_count).step_by(composition_size as usize) { let db = RpcDb::new( @@ -308,8 +217,8 @@ pub async fn compose_derived_rollup_blocks( } eth_chain.push(eth_tail); - let (assumptions, op_block_outputs) = - build_op_blocks(&cli, file_reference, receipt_index.clone(), op_block_inputs); + let (assumptions, bonsai_receipt_uuids, op_block_outputs) = + build_op_blocks(&cli, op_block_inputs).await; let derive_input_mem = DeriveInput { db: derive_machine.derive_input.db.get_mem_db(), @@ -341,10 +250,9 @@ pub async fn compose_derived_rollup_blocks( &derive_input_mem, OP_DERIVE_ELF, &derive_output, - assumptions, - file_reference, - Some(receipt_index.lock().unwrap().borrow_mut()), - ); + (assumptions, bonsai_receipt_uuids), + ) + .await; // Append derivation outputs to lift queue lift_queue.push((derive_output, receipt)); @@ -392,16 +300,16 @@ pub async fn compose_derived_rollup_blocks( &prep_compose_input, OP_COMPOSE_ELF, &prep_compose_output, - vec![], - file_reference, - Some(receipt_index.lock().unwrap().borrow_mut()), - ); + Default::default(), + ) + .await; // Lift + info!("Lifting {} proofs...", lift_queue.len()); let mut join_queue = VecDeque::new(); for (derive_output, derive_receipt) in lift_queue { let eth_tail_hash = derive_output.eth_tail.hash.0; - info!("Lifting ... {:?}", &derive_output); + trace!("Lifting ... {:?}", &derive_output); let lift_compose_input = ComposeInput { block_image_id: OP_BLOCK_ID, derive_image_id: OP_DERIVE_ID, @@ -416,18 +324,17 @@ pub async fn compose_derived_rollup_blocks( .clone() .process() .expect("Lift composition failed."); - info!("Lifted ... {:?}", &lift_compose_output); + trace!("Lifted ... {:?}", &lift_compose_output); - let lift_compose_receipt = if let Some(receipt) = derive_receipt { + let lift_compose_receipt = if let Some((receipt_uuid, receipt)) = derive_receipt { maybe_prove( &cli, &lift_compose_input, OP_COMPOSE_ELF, &lift_compose_output, - vec![receipt.into()], - file_reference, - Some(receipt_index.lock().unwrap().borrow_mut()), + (vec![receipt.into()], vec![receipt_uuid]), ) + .await } else { None }; @@ -436,12 +343,13 @@ pub async fn compose_derived_rollup_blocks( } // Join + info!("Composing {} proofs...", join_queue.len()); while join_queue.len() > 1 { // Pop left output let (left, left_receipt) = join_queue.pop_front().unwrap(); // Only peek at right output let (right, _right_receipt) = join_queue.front().unwrap(); - info!("Joining"); + trace!("Joining"); let ComposeOutputOperation::AGGREGATE { op_tail: left_op_tail, .. @@ -458,9 +366,10 @@ pub async fn compose_derived_rollup_blocks( }; // Push dangling workloads (odd block count) to next round if left_op_tail != right_op_head { - info!( + trace!( "Skipping dangling workload: {} - {}", - left_op_tail.number, right_op_head.number + left_op_tail.number, + right_op_head.number ); join_queue.push_back((left, left_receipt)); continue; @@ -474,26 +383,31 @@ pub async fn compose_derived_rollup_blocks( operation: ComposeInputOperation::JOIN { left, right }, eth_chain_merkle_root: eth_chain_root, }; - info!("Joining ..."); + trace!("Joining ..."); let join_compose_output = join_compose_input .clone() .process() .expect("Join composition failed."); - let join_compose_receipt = - if let (Some(left_receipt), Some(right_receipt)) = (left_receipt, right_receipt) { - maybe_prove( - &cli, - &join_compose_input, - OP_COMPOSE_ELF, - &join_compose_output, + let join_compose_receipt = if let ( + Some((left_receipt_uuid, left_receipt)), + Some((right_receipt_uuid, right_receipt)), + ) = (left_receipt, right_receipt) + { + maybe_prove( + &cli, + &join_compose_input, + OP_COMPOSE_ELF, + &join_compose_output, + ( vec![left_receipt.into(), right_receipt.into()], - file_reference, - Some(receipt_index.lock().unwrap().borrow_mut()), - ) - } else { - None - }; + vec![left_receipt_uuid, right_receipt_uuid], + ), + ) + .await + } else { + None + }; // Send workload to next round join_queue.push_back((join_compose_output, join_compose_receipt)); @@ -517,59 +431,58 @@ pub async fn compose_derived_rollup_blocks( .process() .expect("Finish composition failed."); - let op_compose_receipt = if let (Some(prep_receipt), Some(aggregate_receipt)) = - (prep_compose_receipt, aggregate_receipt) + if let ( + Some((prep_receipt_uuid, prep_receipt)), + Some((aggregate_receipt_uuid, aggregate_receipt)), + ) = (prep_compose_receipt, aggregate_receipt) { maybe_prove( &cli, &finish_compose_input, OP_COMPOSE_ELF, &finish_compose_output, - vec![prep_receipt.into(), aggregate_receipt.into()], - file_reference, - Some(receipt_index.lock().unwrap().borrow_mut()), + ( + vec![prep_receipt.into(), aggregate_receipt.into()], + vec![prep_receipt_uuid, aggregate_receipt_uuid], + ), + ) + .await; + } else if let Cli::Verify(verify_args) = cli { + verify_bonsai_receipt( + OP_COMPOSE_ID.into(), + &finish_compose_output, + verify_args.bonsai_receipt_uuid.clone(), + 4, ) + .await?; } else { - None + info!("Preflight successful!"); }; - dbg!(&finish_compose_output); - - if let Some(final_receipt) = op_compose_receipt { - final_receipt - .verify(OP_COMPOSE_ID) - .expect("Failed to verify final receipt"); - info!("Verified final receipt!"); - } + trace!("Final composition output: {:?}", &finish_compose_output); Ok(()) } -fn build_op_blocks( +async fn build_op_blocks( cli: &Cli, - file_reference: &String, - receipt_index: Arc>, op_block_inputs: Vec>, -) -> (Vec, Vec) { +) -> (Vec, Vec, Vec) { let mut assumptions: Vec = vec![]; + let mut bonsai_uuids = vec![]; let mut op_block_outputs = vec![]; for input in op_block_inputs { let output = OptimismStrategy::build_from(&OP_MAINNET_CHAIN_SPEC, input.clone()) .expect("Failed to build op block") .with_state_hashed(); - if let Some(receipt) = maybe_prove( - cli, - &input, - OP_BLOCK_ELF, - &output, - vec![], - file_reference, - Some(receipt_index.lock().unwrap().borrow_mut()), - ) { + if let Some((bonsai_receipt_uuid, receipt)) = + maybe_prove(cli, &input, OP_BLOCK_ELF, &output, Default::default()).await + { assumptions.push(receipt.into()); + bonsai_uuids.push(bonsai_receipt_uuid); } op_block_outputs.push(output); } - (assumptions, op_block_outputs) + (assumptions, bonsai_uuids, op_block_outputs) } diff --git a/host/testdata/derivation/ethereum/18090206.json.gz b/host/testdata/derivation/ethereum/18090206.json.gz index da043287..33322084 100644 Binary files a/host/testdata/derivation/ethereum/18090206.json.gz and b/host/testdata/derivation/ethereum/18090206.json.gz differ diff --git a/host/testdata/derivation/ethereum/18090207.json.gz b/host/testdata/derivation/ethereum/18090207.json.gz index d6b69373..c3635dba 100644 Binary files a/host/testdata/derivation/ethereum/18090207.json.gz and b/host/testdata/derivation/ethereum/18090207.json.gz differ diff --git a/host/testdata/derivation/ethereum/18090208.json.gz b/host/testdata/derivation/ethereum/18090208.json.gz index 1e886d15..80bd7e9f 100644 Binary files a/host/testdata/derivation/ethereum/18090208.json.gz and b/host/testdata/derivation/ethereum/18090208.json.gz differ diff --git a/host/testdata/derivation/ethereum/18090209.json.gz b/host/testdata/derivation/ethereum/18090209.json.gz index 88cd5f04..d7c49ae0 100644 Binary files a/host/testdata/derivation/ethereum/18090209.json.gz and b/host/testdata/derivation/ethereum/18090209.json.gz differ diff --git a/host/testdata/derivation/ethereum/18090210.json.gz b/host/testdata/derivation/ethereum/18090210.json.gz index d39fe8bc..d60a44cd 100644 Binary files a/host/testdata/derivation/ethereum/18090210.json.gz and b/host/testdata/derivation/ethereum/18090210.json.gz differ diff --git a/host/testdata/derivation/ethereum/18090211.json.gz b/host/testdata/derivation/ethereum/18090211.json.gz index 5cd54573..10e0f035 100644 Binary files a/host/testdata/derivation/ethereum/18090211.json.gz and b/host/testdata/derivation/ethereum/18090211.json.gz differ diff --git a/host/testdata/derivation/ethereum/18090212.json.gz b/host/testdata/derivation/ethereum/18090212.json.gz index d46eca5d..6c720764 100644 Binary files a/host/testdata/derivation/ethereum/18090212.json.gz and b/host/testdata/derivation/ethereum/18090212.json.gz differ diff --git a/host/testdata/derivation/ethereum/18090213.json.gz b/host/testdata/derivation/ethereum/18090213.json.gz index de28d0c1..b953031d 100644 Binary files a/host/testdata/derivation/ethereum/18090213.json.gz and b/host/testdata/derivation/ethereum/18090213.json.gz differ diff --git a/host/testdata/derivation/ethereum/18090214.json.gz b/host/testdata/derivation/ethereum/18090214.json.gz index b4f6249b..c77cb194 100644 Binary files a/host/testdata/derivation/ethereum/18090214.json.gz and b/host/testdata/derivation/ethereum/18090214.json.gz differ diff --git a/host/testdata/derivation/ethereum/18090215.json.gz b/host/testdata/derivation/ethereum/18090215.json.gz index 72ead9d2..fff3b013 100644 Binary files a/host/testdata/derivation/ethereum/18090215.json.gz and b/host/testdata/derivation/ethereum/18090215.json.gz differ diff --git a/host/testdata/derivation/ethereum/18090216.json.gz b/host/testdata/derivation/ethereum/18090216.json.gz index c25ee505..0ea65cd8 100644 Binary files a/host/testdata/derivation/ethereum/18090216.json.gz and b/host/testdata/derivation/ethereum/18090216.json.gz differ diff --git a/host/testdata/derivation/ethereum/18090217.json.gz b/host/testdata/derivation/ethereum/18090217.json.gz index 7b03884b..67abe012 100644 Binary files a/host/testdata/derivation/ethereum/18090217.json.gz and b/host/testdata/derivation/ethereum/18090217.json.gz differ diff --git a/host/testdata/derivation/ethereum/18090218.json.gz b/host/testdata/derivation/ethereum/18090218.json.gz index 899286e0..cc0eb42b 100644 Binary files a/host/testdata/derivation/ethereum/18090218.json.gz and b/host/testdata/derivation/ethereum/18090218.json.gz differ diff --git a/lib/src/builder/execute/ethereum.rs b/lib/src/builder/execute/ethereum.rs index 36edcce1..faf495b6 100644 --- a/lib/src/builder/execute/ethereum.rs +++ b/lib/src/builder/execute/ethereum.rs @@ -16,7 +16,7 @@ use core::{fmt::Debug, mem::take}; use anyhow::{anyhow, bail, Context}; #[cfg(not(target_os = "zkvm"))] -use log::debug; +use log::trace; use revm::{ interpreter::Host, primitives::{Account, Address, ResultAndState, SpecId, TransactTo, TxEnv}, @@ -67,7 +67,6 @@ impl TxExecStrategy for EthTxExecStrategy { #[cfg(not(target_os = "zkvm"))] { use chrono::{TimeZone, Utc}; - use log::info; let dt = Utc .timestamp_opt( block_builder @@ -80,24 +79,24 @@ impl TxExecStrategy for EthTxExecStrategy { ) .unwrap(); - info!("Block no. {}", header.number); - info!(" EVM spec ID: {:?}", spec_id); - info!(" Timestamp: {}", dt); - info!( + trace!("Block no. {}", header.number); + trace!(" EVM spec ID: {:?}", spec_id); + trace!(" Timestamp: {}", dt); + trace!( " Transactions: {}", block_builder.input.state_input.transactions.len() ); - info!( + trace!( " Withdrawals: {}", block_builder.input.state_input.withdrawals.len() ); - info!( + trace!( " Fee Recipient: {:?}", block_builder.input.state_input.beneficiary ); - info!(" Gas limit: {}", block_builder.input.state_input.gas_limit); - info!(" Base fee per gas: {}", header.base_fee_per_gas); - info!( + trace!(" Gas limit: {}", block_builder.input.state_input.gas_limit); + trace!(" Base fee per gas: {}", header.base_fee_per_gas); + trace!( " Extra data: {:?}", block_builder.input.state_input.extra_data ); @@ -144,10 +143,10 @@ impl TxExecStrategy for EthTxExecStrategy { #[cfg(not(target_os = "zkvm"))] { let tx_hash = tx.hash(); - debug!("Tx no. {} (hash: {})", tx_no, tx_hash); - debug!(" Type: {}", tx.essence.tx_type()); - debug!(" Fr: {:?}", tx_from); - debug!(" To: {:?}", tx.essence.to().unwrap_or_default()); + trace!("Tx no. {} (hash: {})", tx_no, tx_hash); + trace!(" Type: {}", tx.essence.tx_type()); + trace!(" Fr: {:?}", tx_from); + trace!(" To: {:?}", tx.essence.to().unwrap_or_default()); } // verify transaction gas @@ -167,7 +166,7 @@ impl TxExecStrategy for EthTxExecStrategy { cumulative_gas_used = cumulative_gas_used.checked_add(gas_used).unwrap(); #[cfg(not(target_os = "zkvm"))] - debug!(" Ok: {:?}", result); + trace!(" Ok: {:?}", result); // create the receipt from the EVM result let receipt = Receipt::new( @@ -194,7 +193,7 @@ impl TxExecStrategy for EthTxExecStrategy { for (address, account) in &state { if account.is_touched() { // log account - debug!( + trace!( " State {:?} (is_selfdestructed={}, is_loaded_as_not_existing={}, is_created={}, is_empty={})", address, account.is_selfdestructed(), @@ -203,17 +202,18 @@ impl TxExecStrategy for EthTxExecStrategy { account.is_empty(), ); // log balance changes - debug!( + trace!( " After balance: {} (Nonce: {})", - account.info.balance, account.info.nonce + account.info.balance, + account.info.nonce ); // log state changes for (addr, slot) in &account.storage { if slot.is_changed() { - debug!(" Storage address: {:?}", addr); - debug!(" Before: {:?}", slot.original_value()); - debug!(" After: {:?}", slot.present_value()); + trace!(" Storage address: {:?}", addr); + trace!(" Before: {:?}", slot.original_value()); + trace!(" After: {:?}", slot.present_value()); } } } @@ -235,9 +235,9 @@ impl TxExecStrategy for EthTxExecStrategy { #[cfg(not(target_os = "zkvm"))] { - debug!("Withdrawal no. {}", withdrawal.index); - debug!(" Recipient: {:?}", withdrawal.address); - debug!(" Value: {}", amount_wei); + trace!("Withdrawal no. {}", withdrawal.index); + trace!(" Recipient: {:?}", withdrawal.address); + trace!(" Value: {}", amount_wei); } // Credit withdrawal amount increase_account_balance(&mut evm.context.evm.db, withdrawal.address, amount_wei)?; diff --git a/lib/src/builder/execute/optimism.rs b/lib/src/builder/execute/optimism.rs index 9bbc7564..c5f4e029 100644 --- a/lib/src/builder/execute/optimism.rs +++ b/lib/src/builder/execute/optimism.rs @@ -16,7 +16,7 @@ use core::{fmt::Debug, mem::take}; use anyhow::{anyhow, bail, Context, Result}; #[cfg(not(target_os = "zkvm"))] -use log::debug; +use log::trace; use revm::{ interpreter::Host, optimism, @@ -70,7 +70,6 @@ impl TxExecStrategy for OpTxExecStrategy { #[cfg(not(target_os = "zkvm"))] { use chrono::{TimeZone, Utc}; - use log::info; let dt = Utc .timestamp_opt( block_builder @@ -83,20 +82,20 @@ impl TxExecStrategy for OpTxExecStrategy { ) .unwrap(); - info!("Block no. {}", header.number); - info!(" EVM spec ID: {:?}", spec_id); - info!(" Timestamp: {}", dt); - info!( + trace!("Block no. {}", header.number); + trace!(" EVM spec ID: {:?}", spec_id); + trace!(" Timestamp: {}", dt); + trace!( " Transactions: {}", block_builder.input.state_input.transactions.len() ); - info!( + trace!( " Fee Recipient: {:?}", block_builder.input.state_input.beneficiary ); - info!(" Gas limit: {}", block_builder.input.state_input.gas_limit); - info!(" Base fee per gas: {}", header.base_fee_per_gas); - info!( + trace!(" Gas limit: {}", block_builder.input.state_input.gas_limit); + trace!(" Base fee per gas: {}", header.base_fee_per_gas); + trace!( " Extra data: {:?}", block_builder.input.state_input.extra_data ); @@ -143,10 +142,10 @@ impl TxExecStrategy for OpTxExecStrategy { #[cfg(not(target_os = "zkvm"))] { let tx_hash = tx.hash(); - debug!("Tx no. {} (hash: {})", tx_no, tx_hash); - debug!(" Type: {}", tx.essence.tx_type()); - debug!(" Fr: {:?}", tx_from); - debug!(" To: {:?}", tx.essence.to().unwrap_or_default()); + trace!("Tx no. {} (hash: {})", tx_no, tx_hash); + trace!(" Type: {}", tx.essence.tx_type()); + trace!(" Fr: {:?}", tx_from); + trace!(" To: {:?}", tx.essence.to().unwrap_or_default()); } // verify transaction gas @@ -160,9 +159,9 @@ impl TxExecStrategy for OpTxExecStrategy { OptimismTxEssence::OptimismDeposited(deposit) => { #[cfg(not(target_os = "zkvm"))] { - debug!(" Source: {:?}", &deposit.source_hash); - debug!(" Mint: {:?}", &deposit.mint); - debug!(" System Tx: {:?}", deposit.is_system_tx); + trace!(" Source: {:?}", &deposit.source_hash); + trace!(" Mint: {:?}", &deposit.mint); + trace!(" System Tx: {:?}", deposit.is_system_tx); } // Initialize tx environment @@ -182,7 +181,7 @@ impl TxExecStrategy for OpTxExecStrategy { cumulative_gas_used = cumulative_gas_used.checked_add(gas_used).unwrap(); #[cfg(not(target_os = "zkvm"))] - debug!(" Ok: {:?}", result); + trace!(" Ok: {:?}", result); // create the receipt from the EVM result let receipt = Receipt::new( @@ -197,7 +196,7 @@ impl TxExecStrategy for OpTxExecStrategy { for (address, account) in &state { if account.is_touched() { // log account - debug!( + trace!( " State {:?} (is_selfdestructed={}, is_loaded_as_not_existing={}, is_created={})", address, account.is_selfdestructed(), @@ -205,17 +204,18 @@ impl TxExecStrategy for OpTxExecStrategy { account.is_created() ); // log balance changes - debug!( + trace!( " After balance: {} (Nonce: {})", - account.info.balance, account.info.nonce + account.info.balance, + account.info.nonce ); // log state changes for (addr, slot) in &account.storage { if slot.is_changed() { - debug!(" Storage address: {:?}", addr); - debug!(" Before: {:?}", slot.original_value()); - debug!(" After: {:?}", slot.present_value()); + trace!(" Storage address: {:?}", addr); + trace!(" Before: {:?}", slot.original_value()); + trace!(" After: {:?}", slot.present_value()); } } } diff --git a/lib/src/host/preflight.rs b/lib/src/host/preflight.rs index 8fc79d17..d47a937b 100644 --- a/lib/src/host/preflight.rs +++ b/lib/src/host/preflight.rs @@ -23,7 +23,7 @@ use ethers_core::types::{ Block as EthersBlock, EIP1186ProofResponse, Transaction as EthersTransaction, }; use hashbrown::{HashMap, HashSet}; -use log::info; +use log::{debug, info}; use zeth_primitives::{ block::Header, ethers::{from_ethers_h160, from_ethers_h256, from_ethers_u256}, @@ -95,7 +95,7 @@ where block_no: block_no - 1, })?; - info!( + debug!( "Initial block: {:?} ({:?})", parent_block.number.unwrap(), parent_block.hash.unwrap() @@ -105,12 +105,12 @@ where // Fetch the target block let block = provider.get_full_block(&BlockQuery { block_no })?; - info!( + debug!( "Final block number: {:?} ({:?})", block.number.unwrap(), block.hash.unwrap() ); - info!("Transaction count: {:?}", block.transactions.len()); + debug!("Transaction count: {:?}", block.transactions.len()); // Create the provider DB let provider_db = ProviderDb::new(provider, parent_header.number); @@ -248,11 +248,11 @@ impl TryFrom> for BlockBuildInput { data.proofs, )?; - info!( + debug!( "The partial state trie consists of {} nodes", state_trie.size() ); - info!( + debug!( "The partial storage tries consist of {} nodes", storage.values().map(|(n, _)| n.size()).sum::() ); diff --git a/lib/src/host/provider/rpc_provider.rs b/lib/src/host/provider/rpc_provider.rs index 2644e1f2..6d9bc5eb 100644 --- a/lib/src/host/provider/rpc_provider.rs +++ b/lib/src/host/provider/rpc_provider.rs @@ -17,7 +17,7 @@ use ethers_core::types::{ Block, Bytes, EIP1186ProofResponse, Transaction, TransactionReceipt, H256, U256, }; use ethers_providers::{Http, Middleware, RetryClient}; -use log::info; +use log::debug; use super::{AccountQuery, BlockQuery, ProofQuery, Provider, StorageQuery}; @@ -45,11 +45,11 @@ impl Provider for RpcProvider { } fn get_full_block(&mut self, query: &BlockQuery) -> Result> { - info!("Querying RPC for full block: {:?}", query); + debug!("Querying RPC for full block: {:?}", query); let response = self .tokio_handle - .block_on(async { self.http_client.get_block_with_txs(query.block_no).await })?; + .block_on(self.http_client.get_block_with_txs(query.block_no))?; match response { Some(out) => Ok(out), @@ -58,11 +58,11 @@ impl Provider for RpcProvider { } fn get_partial_block(&mut self, query: &BlockQuery) -> Result> { - info!("Querying RPC for partial block: {:?}", query); + debug!("Querying RPC for partial block: {:?}", query); let response = self .tokio_handle - .block_on(async { self.http_client.get_block(query.block_no).await })?; + .block_on(self.http_client.get_block(query.block_no))?; match response { Some(out) => Ok(out), @@ -71,75 +71,68 @@ impl Provider for RpcProvider { } fn get_block_receipts(&mut self, query: &BlockQuery) -> Result> { - info!("Querying RPC for block receipts: {:?}", query); + debug!("Querying RPC for block receipts: {:?}", query); let response = self .tokio_handle - .block_on(async { self.http_client.get_block_receipts(query.block_no).await })?; + .block_on(self.http_client.get_block_receipts(query.block_no))?; Ok(response) } fn get_proof(&mut self, query: &ProofQuery) -> Result { - info!("Querying RPC for inclusion proof: {:?}", query); + debug!("Querying RPC for inclusion proof: {:?}", query); - let out = self.tokio_handle.block_on(async { - self.http_client - .get_proof( - query.address, - query.indices.iter().cloned().collect(), - Some(query.block_no.into()), - ) - .await - })?; + let out = self.tokio_handle.block_on(self.http_client.get_proof( + query.address, + query.indices.iter().cloned().collect(), + Some(query.block_no.into()), + ))?; Ok(out) } fn get_transaction_count(&mut self, query: &AccountQuery) -> Result { - info!("Querying RPC for transaction count: {:?}", query); + debug!("Querying RPC for transaction count: {:?}", query); - let out = self.tokio_handle.block_on(async { + let out = self.tokio_handle.block_on( self.http_client - .get_transaction_count(query.address, Some(query.block_no.into())) - .await - })?; + .get_transaction_count(query.address, Some(query.block_no.into())), + )?; Ok(out) } fn get_balance(&mut self, query: &AccountQuery) -> Result { - info!("Querying RPC for balance: {:?}", query); + debug!("Querying RPC for balance: {:?}", query); - let out = self.tokio_handle.block_on(async { + let out = self.tokio_handle.block_on( self.http_client - .get_balance(query.address, Some(query.block_no.into())) - .await - })?; + .get_balance(query.address, Some(query.block_no.into())), + )?; Ok(out) } fn get_code(&mut self, query: &AccountQuery) -> Result { - info!("Querying RPC for code: {:?}", query); + debug!("Querying RPC for code: {:?}", query); - let out = self.tokio_handle.block_on(async { + let out = self.tokio_handle.block_on( self.http_client - .get_code(query.address, Some(query.block_no.into())) - .await - })?; + .get_code(query.address, Some(query.block_no.into())), + )?; Ok(out) } fn get_storage(&mut self, query: &StorageQuery) -> Result { - info!("Querying RPC for storage: {:?}", query); + debug!("Querying RPC for storage: {:?}", query); - let out = self.tokio_handle.block_on(async { - self.http_client - .get_storage_at(query.address, query.index, Some(query.block_no.into())) - .await - })?; + let out = self.tokio_handle.block_on(self.http_client.get_storage_at( + query.address, + query.index, + Some(query.block_no.into()), + ))?; Ok(out) } diff --git a/lib/src/optimism/batcher.rs b/lib/src/optimism/batcher.rs index a882b08a..96da4fa6 100644 --- a/lib/src/optimism/batcher.rs +++ b/lib/src/optimism/batcher.rs @@ -204,7 +204,7 @@ impl Batcher { while let Some(batches) = self.batcher_channel.read_batches() { batches.into_iter().for_each(|batch| { #[cfg(not(target_os = "zkvm"))] - log::debug!( + log::trace!( "received batch: timestamp={}, parent_hash={}, epoch={}", batch.essence.timestamp, batch.essence.parent_hash, @@ -316,7 +316,7 @@ impl Batcher { match batch.essence.timestamp.cmp(&next_timestamp) { Ordering::Greater => { #[cfg(not(target_os = "zkvm"))] - log::debug!( + log::trace!( "Future batch: {} = batch.timestamp > next_timestamp = {}", &batch.essence.timestamp, &next_timestamp @@ -325,7 +325,7 @@ impl Batcher { } Ordering::Less => { #[cfg(not(target_os = "zkvm"))] - log::debug!( + log::trace!( "Batch too old: {} = batch.timestamp < next_timestamp = {}", &batch.essence.timestamp, &next_timestamp @@ -339,7 +339,7 @@ impl Batcher { // "batch.parent_hash != safe_l2_head.hash -> drop" if batch.essence.parent_hash != safe_l2_head.hash { #[cfg(not(target_os = "zkvm"))] - log::debug!( + log::warn!( "Incorrect parent hash: {} != {}", batch.essence.parent_hash, safe_l2_head.hash @@ -351,7 +351,7 @@ impl Batcher { // "batch.epoch_num + sequence_window_size < inclusion_block_number -> drop" if batch.essence.epoch_num + self.config.seq_window_size < batch.inclusion_block_number { #[cfg(not(target_os = "zkvm"))] - log::debug!( + log::warn!( "Batch is not timely: {} + {} < {}", batch.essence.epoch_num, self.config.seq_window_size, @@ -364,7 +364,7 @@ impl Batcher { // "batch.epoch_num < epoch.number -> drop" if batch.essence.epoch_num < epoch.number { #[cfg(not(target_os = "zkvm"))] - log::debug!( + log::warn!( "Batch epoch number is too low: {} < {}", batch.essence.epoch_num, epoch.number @@ -389,7 +389,7 @@ impl Batcher { // From the spec: // "batch.epoch_num > epoch.number+1 -> drop" #[cfg(not(target_os = "zkvm"))] - log::debug!( + log::warn!( "Batch epoch number is too large: {} > {}", batch.essence.epoch_num, epoch.number + 1 @@ -401,7 +401,7 @@ impl Batcher { // "batch.epoch_hash != batch_origin.hash -> drop" if batch.essence.epoch_hash != batch_origin.hash { #[cfg(not(target_os = "zkvm"))] - log::debug!( + log::warn!( "Epoch hash mismatch: {} != {}", batch.essence.epoch_hash, batch_origin.hash @@ -413,7 +413,7 @@ impl Batcher { // "batch.timestamp < batch_origin.time -> drop" if batch.essence.timestamp < batch_origin.timestamp { #[cfg(not(target_os = "zkvm"))] - log::debug!( + log::warn!( "Batch violates timestamp rule: {} < {}", batch.essence.timestamp, batch_origin.timestamp @@ -437,7 +437,7 @@ impl Batcher { // "len(batch.transactions) > 0: -> drop" if !batch.essence.transactions.is_empty() { #[cfg(not(target_os = "zkvm"))] - log::debug!("Sequencer drift detected for non-empty batch; drop."); + log::warn!("Sequencer drift detected for non-empty batch; drop."); return BatchStatus::Drop; } @@ -451,7 +451,7 @@ impl Batcher { // "If batch.timestamp >= next_epoch.time -> drop" if batch.essence.timestamp >= next_epoch.timestamp { #[cfg(not(target_os = "zkvm"))] - log::debug!("Sequencer drift detected; drop; batch timestamp is too far into the future. {} >= {}", batch.essence.timestamp, next_epoch.timestamp); + log::warn!("Sequencer drift detected; drop; batch timestamp is too far into the future. {} >= {}", batch.essence.timestamp, next_epoch.timestamp); return BatchStatus::Drop; } } else { @@ -472,7 +472,7 @@ impl Batcher { for tx in &batch.essence.transactions { if matches!(tx.first(), None | Some(&OPTIMISM_DEPOSITED_TX_TYPE)) { #[cfg(not(target_os = "zkvm"))] - log::debug!("Batch contains empty or invalid transaction"); + log::warn!("Batch contains empty or invalid transaction"); return BatchStatus::Drop; } } diff --git a/lib/src/optimism/batcher_channel.rs b/lib/src/optimism/batcher_channel.rs index a3b9fb12..3054ffdf 100644 --- a/lib/src/optimism/batcher_channel.rs +++ b/lib/src/optimism/batcher_channel.rs @@ -74,7 +74,7 @@ impl BatcherChannels { } #[cfg(not(target_os = "zkvm"))] - log::debug!("received batcher tx: {}", tx.hash()); + log::trace!("received batcher tx: {}", tx.hash()); // From the spec: // "If any one frame fails to parse, the all frames in the transaction are rejected." @@ -93,7 +93,7 @@ impl BatcherChannels { // load received frames into the channel bank for frame in frames { #[cfg(not(target_os = "zkvm"))] - log::debug!( + log::trace!( "received frame: channel_id={}, frame_number={}, is_last={}", frame.channel_id, frame.number, @@ -117,7 +117,7 @@ impl BatcherChannels { while matches!(self.channels.front(), Some(channel) if channel.is_ready()) { let channel = self.channels.pop_front().unwrap(); #[cfg(not(target_os = "zkvm"))] - log::debug!("received channel: {}", channel.id); + log::trace!("received channel: {}", channel.id); self.batches.push_back(channel.read_batches(block_number)); } diff --git a/lib/src/optimism/mod.rs b/lib/src/optimism/mod.rs index 77658180..677b8472 100644 --- a/lib/src/optimism/mod.rs +++ b/lib/src/optimism/mod.rs @@ -140,7 +140,7 @@ impl DeriveMachine { let op_head_block_hash = op_head.block_header.hash(); #[cfg(not(target_os = "zkvm"))] - log::info!( + log::debug!( "Fetched Op head (block no {}) {}", derive_input.op_head_block_no, op_head_block_hash @@ -177,7 +177,7 @@ impl DeriveMachine { "Ethereum head block hash mismatch" ); #[cfg(not(target_os = "zkvm"))] - log::info!( + log::debug!( "Fetched Eth head (block no {}) {}", eth_block_no, set_l1_block_values.hash @@ -243,7 +243,7 @@ impl DeriveMachine { while self.op_head_block_header.number < target_block_no { #[cfg(not(target_os = "zkvm"))] - log::info!( + log::trace!( "op_block_no = {}, eth_block_no = {}", self.op_head_block_header.number, self.op_batcher.state.current_l1_block_number @@ -269,7 +269,7 @@ impl DeriveMachine { // Process the batch #[cfg(not(target_os = "zkvm"))] - log::info!( + log::debug!( "Read batch for Op block {}: timestamp={}, epoch={}, tx count={}, parent hash={:?}", self.op_head_block_header.number + 1, op_batch.0.timestamp, @@ -482,6 +482,8 @@ impl DeriveMachine { BlockBuildOutput::FAILURE { state_input_hash: bad_input_hash, } => { + #[cfg(not(target_os = "zkvm"))] + log::warn!("Failed to build block from batch"); ensure!( new_op_head_input.state_input.hash() == bad_input_hash, "Invalid input partial hash" diff --git a/primitives/Cargo.toml b/primitives/Cargo.toml index e2d98406..534315d2 100644 --- a/primitives/Cargo.toml +++ b/primitives/Cargo.toml @@ -25,6 +25,9 @@ sha2 = { version = "=0.10.6", default-features = false } sha3 = "0.10" thiserror = "1.0" +[target.'cfg(not(target_os = "zkvm"))'.dependencies] +log = "0.4" + [dev-dependencies] bincode = "1.3" hex-literal = "0.4" diff --git a/testing/ef-tests/testguest/Cargo.lock b/testing/ef-tests/testguest/Cargo.lock index c4ee9e04..a1217cc6 100644 --- a/testing/ef-tests/testguest/Cargo.lock +++ b/testing/ef-tests/testguest/Cargo.lock @@ -3997,6 +3997,7 @@ dependencies = [ "bytes", "ethers-core", "k256", + "log", "revm-primitives", "rlp", "serde",