diff --git a/Cargo.lock b/Cargo.lock index 7506201fcf..767f3f055a 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -61,7 +61,7 @@ dependencies = [ "log", "num-bigint", "once_cell", - "rand", + "rand 0.8.5", "revm-precompile 9.2.0", "revm-primitives 7.1.0", "serde", @@ -154,7 +154,7 @@ dependencies = [ "k256", "keccak-asm", "proptest", - "rand", + "rand 0.8.5", "ruint", "serde", "tiny-keccak", @@ -180,7 +180,7 @@ dependencies = [ "keccak-asm", "paste", "proptest", - "rand", + "rand 0.8.5", "ruint", "rustc-hash 2.0.0", "serde", @@ -398,7 +398,7 @@ checksum = "1df2c09229cbc5a028b1d70e00fdb2acee28b1055dfb5ca73eea49c5a25c4e7c" dependencies = [ "colored", "num-traits", - "rand", + "rand 0.8.5", ] [[package]] @@ -408,7 +408,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "94893f1e0c6eeab764ade8dc4c0db24caf4fe7cbbaafc0eba0a9030f447b5185" dependencies = [ "num-traits", - "rand", + "rand 0.8.5", ] [[package]] @@ -640,7 +640,7 @@ dependencies = [ "group", "pairing", "pasta_curves", - "rand_core", + "rand_core 0.6.4", "subtle", ] @@ -693,7 +693,7 @@ dependencies = [ "num", "poseidon-circuit", "pretty_assertions", - "rand", + "rand 0.8.5", "rayon", "revm-precompile 9.2.0", "serde", @@ -854,7 +854,7 @@ dependencies = [ "itertools 0.11.0", "log", "mock", - "rand", + "rand 0.8.5", "rand_chacha", "rand_xorshift", "tokio", @@ -952,7 +952,7 @@ dependencies = [ "hmac", "once_cell", "pbkdf2 0.12.2", - "rand", + "rand 0.8.5", "sha2", "thiserror", ] @@ -1146,7 +1146,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0dc92fb57ca44df6db8059111ab3af99a63d5d0f8375d9972e319a379c6bab76" dependencies = [ "generic-array", - "rand_core", + "rand_core 0.6.4", "subtle", "zeroize", ] @@ -1407,7 +1407,7 @@ dependencies = [ "group", "pem-rfc7468", "pkcs8", - "rand_core", + "rand_core 0.6.4", "sec1", "subtle", "zeroize", @@ -1437,7 +1437,7 @@ checksum = "34aa73646ffb006b8f5147f3dc182bd4bcb190227ce861fc4a4844bf8e3cb2c0" [[package]] name = "encoder" version = "0.1.0" -source = "git+https://github.com/scroll-tech/da-codec.git?tag=v0.1.0#5a28b752d4504bf0966734fe4a6a5433981c74c2" +source = "git+https://github.com/scroll-tech/da-codec.git?tag=v0.1.2#8c5d2f0cd707153151a5154fef702204f6ca40b3" dependencies = [ "zstd 0.13.0", ] @@ -1462,7 +1462,7 @@ dependencies = [ "hex", "k256", "log", - "rand", + "rand 0.8.5", "rlp", "serde", "sha3 0.10.8", @@ -1521,7 +1521,7 @@ dependencies = [ "hex", "hmac", "pbkdf2 0.11.0", - "rand", + "rand 0.8.5", "scrypt", "serde", "serde_json", @@ -1706,7 +1706,7 @@ dependencies = [ "num_enum 0.6.1", "once_cell", "open-fastrlp", - "rand", + "rand 0.8.5", "rlp", "serde", "serde_json", @@ -1807,7 +1807,7 @@ dependencies = [ "eth-keystore", "ethers-core", "hex", - "rand", + "rand 0.8.5", "sha2", "thiserror", "tracing", @@ -1889,7 +1889,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ded41244b729663b1e574f1b4fb731469f69f79c17667b5d776b16cda0479449" dependencies = [ "bitvec", - "rand_core", + "rand_core 0.6.4", "subtle", ] @@ -1900,7 +1900,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "835c052cb0c08c1acf6ffd71c022172e18723949c8282f2b9f27efbc51e64534" dependencies = [ "byteorder", - "rand", + "rand 0.8.5", "rustc-hex", "static_assertions", ] @@ -1952,6 +1952,12 @@ dependencies = [ "winapi", ] +[[package]] +name = "fuchsia-cprng" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a06f77d526c1a601b7c4cdd98f54b5eaabffc14d5f2f0296febdc7f357c6d3ba" + [[package]] name = "funty" version = "2.0.0" @@ -2083,7 +2089,7 @@ dependencies = [ "eth-types", "halo2_proofs", "poseidon-base", - "rand", + "rand 0.8.5", "rand_xorshift", "sha3 0.10.8", "strum 0.25.0", @@ -2179,7 +2185,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f0f9ef7462f7c099f518d754361858f86d8a07af53ba9af0fe635bbccb151a63" dependencies = [ "ff", - "rand_core", + "rand_core 0.6.4", "subtle", ] @@ -2229,9 +2235,9 @@ dependencies = [ "num-bigint", "num-integer", "num-traits", - "rand", + "rand 0.8.5", "rand_chacha", - "rand_core", + "rand_core 0.6.4", "serde", "serde_json", ] @@ -2244,7 +2250,7 @@ dependencies = [ "halo2_proofs", "lazy_static", "num-bigint", - "rand", + "rand 0.8.5", "serde", "serde_json", "strum 0.24.1", @@ -2266,7 +2272,7 @@ dependencies = [ "num-bigint", "num-traits", "poseidon-circuit", - "rand", + "rand 0.8.5", "rand_chacha", "serde", "serde_json", @@ -2287,7 +2293,7 @@ dependencies = [ "halo2_proofs", "halo2curves", "lazy_static", - "rand", + "rand 0.8.5", "subtle", "uint", ] @@ -2310,7 +2316,7 @@ dependencies = [ "num-integer", "poseidon", "rand_chacha", - "rand_core", + "rand_core 0.6.4", "rayon", "sha3 0.9.1", "subtle", @@ -2333,8 +2339,8 @@ dependencies = [ "pairing", "pasta_curves", "paste", - "rand", - "rand_core", + "rand 0.8.5", + "rand_core 0.6.4", "serde", "serde_arrays", "static_assertions", @@ -2663,7 +2669,7 @@ dependencies = [ "paste", "pretty_assertions", "rand_chacha", - "rand_core", + "rand_core 0.6.4", "rand_xorshift", "serde", "serde_json", @@ -2955,7 +2961,7 @@ dependencies = [ "halo2-gate-generator", "halo2_proofs", "num-bigint", - "rand", + "rand 0.8.5", "serde", "serde_json", "strum 0.25.0", @@ -2973,7 +2979,7 @@ dependencies = [ "external-tracer", "itertools 0.11.0", "log", - "rand", + "rand 0.8.5", "rand_chacha", ] @@ -3021,7 +3027,7 @@ checksum = "a5e44f723f1133c9deac646763579fdb3ac745e418f2a7af9cd0c431da1f20b9" dependencies = [ "num-integer", "num-traits", - "rand", + "rand 0.8.5", ] [[package]] @@ -3272,7 +3278,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7676374caaee8a325c9e7a2ae557f216c5563a171d6997b0ef8a65af35147700" dependencies = [ "base64ct", - "rand_core", + "rand_core 0.6.4", "subtle", ] @@ -3286,7 +3292,7 @@ dependencies = [ "ff", "group", "lazy_static", - "rand", + "rand 0.8.5", "static_assertions", "subtle", ] @@ -3422,7 +3428,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "48e4cc64c2ad9ebe670cb8fd69dd50ae301650392e81c05f9bfcb2d5bdbc24b0" dependencies = [ "phf_shared 0.11.2", - "rand", + "rand 0.8.5", ] [[package]] @@ -3539,7 +3545,7 @@ dependencies = [ "halo2_proofs", "log", "poseidon-base", - "rand", + "rand 0.8.5", "rand_xorshift", "thiserror", ] @@ -3661,7 +3667,7 @@ dependencies = [ "bitflags 2.6.0", "lazy_static", "num-traits", - "rand", + "rand 0.8.5", "rand_chacha", "rand_xorshift", "regex-syntax 0.8.4", @@ -3691,7 +3697,7 @@ dependencies = [ "log4rs", "mpt-zktrie", "num-bigint", - "rand", + "rand 0.8.5", "rand_xorshift", "revm 17.1.0", "serde", @@ -3701,6 +3707,8 @@ dependencies = [ "sha2", "snark-verifier", "snark-verifier-sdk", + "tempdir", + "thiserror", "zkevm-circuits", ] @@ -3734,6 +3742,19 @@ version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dc33ff2d4973d518d823d61aa239014831e521c75da58e3df4840d3f47749d09" +[[package]] +name = "rand" +version = "0.4.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "552840b97013b1a26992c11eac34bdd778e464601a4c2054b5f0bff7c6761293" +dependencies = [ + "fuchsia-cprng", + "libc", + "rand_core 0.3.1", + "rdrand", + "winapi", +] + [[package]] name = "rand" version = "0.8.5" @@ -3742,7 +3763,7 @@ checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" dependencies = [ "libc", "rand_chacha", - "rand_core", + "rand_core 0.6.4", ] [[package]] @@ -3752,9 +3773,24 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" dependencies = [ "ppv-lite86", - "rand_core", + "rand_core 0.6.4", ] +[[package]] +name = "rand_core" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a6fdeb83b075e8266dcc8762c22776f6877a63111121f5f8c7411e5be7eed4b" +dependencies = [ + "rand_core 0.4.2", +] + +[[package]] +name = "rand_core" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c33a3c44ca05fa6f1807d8e6743f3824e8509beca625669633be0acbdf509dc" + [[package]] name = "rand_core" version = "0.6.4" @@ -3770,7 +3806,7 @@ version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d25bf25ec5ae4a3f1b92f929810509a2f53d7dca2f50b794ff57e3face536c8f" dependencies = [ - "rand_core", + "rand_core 0.6.4", ] [[package]] @@ -3793,6 +3829,15 @@ dependencies = [ "crossbeam-utils", ] +[[package]] +name = "rdrand" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "678054eb77286b51581ba43620cc911abf02758c91f93f479767aed0f90458b2" +dependencies = [ + "rand_core 0.3.1", +] + [[package]] name = "redox_syscall" version = "0.5.3" @@ -3848,6 +3893,15 @@ version = "0.8.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7a66a03ae7c801facd77a29370b4faec201768915ac14a721ba36f20bc9c209b" +[[package]] +name = "remove_dir_all" +version = "0.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3acd125665422973a33ac9d3dd2df85edad0f4ae9b00dafb1a05e43a9f5ef8e7" +dependencies = [ + "winapi", +] + [[package]] name = "reqwest" version = "0.11.27" @@ -4111,7 +4165,7 @@ dependencies = [ "parity-scale-codec", "primitive-types", "proptest", - "rand", + "rand 0.8.5", "rlp", "ruint-macro", "serde", @@ -4344,7 +4398,7 @@ version = "0.29.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9465315bc9d4566e1724f0fffcbcc446268cb522e60f9a27bcded6b19c108113" dependencies = [ - "rand", + "rand 0.8.5", "secp256k1-sys 0.10.0", ] @@ -4567,7 +4621,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "77549399552de45a898a580c1b41d445bf730df867cc44e6c0233bbc4b8329de" dependencies = [ "digest 0.10.7", - "rand_core", + "rand_core 0.6.4", ] [[package]] @@ -4606,7 +4660,7 @@ dependencies = [ "num-integer", "num-traits", "poseidon", - "rand", + "rand 0.8.5", "revm 2.3.1", "rlp", "rustc-hash 1.1.0", @@ -4630,7 +4684,7 @@ dependencies = [ "num-bigint", "num-integer", "num-traits", - "rand", + "rand 0.8.5", "rand_chacha", "serde", "serde_json", @@ -4777,7 +4831,7 @@ dependencies = [ "byteorder", "crunchy", "lazy_static", - "rand", + "rand 0.8.5", "rustc-hex", ] @@ -4862,6 +4916,16 @@ version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369" +[[package]] +name = "tempdir" +version = "0.3.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "15f2b5fb00ccdf689e0149d1b1b3c03fead81c2b37735d812fa8bddbbf41b6d8" +dependencies = [ + "rand 0.4.6", + "remove_dir_all", +] + [[package]] name = "tempfile" version = "3.12.0" @@ -4919,7 +4983,7 @@ dependencies = [ "mock", "prettytable-rs", "prover", - "rand", + "rand 0.8.5", "rand_chacha", "rayon", "regex", @@ -5201,7 +5265,7 @@ dependencies = [ "http", "httparse", "log", - "rand", + "rand 0.8.5", "rustls", "sha1", "thiserror", @@ -5824,7 +5888,7 @@ dependencies = [ "paste", "poseidon-circuit", "pretty_assertions", - "rand", + "rand 0.8.5", "rand_chacha", "rand_xorshift", "rayon", diff --git a/aggregator/Cargo.toml b/aggregator/Cargo.toml index 08b9cb7b62..3cb4cee33e 100644 --- a/aggregator/Cargo.toml +++ b/aggregator/Cargo.toml @@ -38,7 +38,7 @@ num-bigint.workspace = true # da-compression bitstream-io = "2.2.0" -zstd-encoder = { package = "encoder", git = "https://github.com/scroll-tech/da-codec.git", tag = "v0.1.0" } +zstd-encoder = { package = "encoder", git = "https://github.com/scroll-tech/da-codec.git", tag = "v0.1.2" } [dev-dependencies] diff --git a/aggregator/src/blob_consistency.rs b/aggregator/src/blob_consistency.rs index b49f676011..86eabd90dc 100644 --- a/aggregator/src/blob_consistency.rs +++ b/aggregator/src/blob_consistency.rs @@ -19,9 +19,9 @@ mod eip4844; cfg_if! { if #[cfg(feature = "da-avail")] { // const DATA_AVAILABILITY: DataAvailability = DataAvailability::Avail; - pub use avail::{BlobConsistencyConfig, BlobConsistencyWitness, BLOB_WIDTH}; + pub use avail::{BlobConsistencyConfig, BlobConsistencyWitness, BLOB_WIDTH, get_blob_bytes}; } else { // const DATA_AVAILABILITY: DatayAvailability = DataAvailability::Eip4844; - pub use eip4844::{BlobConsistencyConfig, BlobConsistencyWitness, BLOB_WIDTH}; + pub use eip4844::{BlobConsistencyConfig, BlobConsistencyWitness, BLOB_WIDTH, get_blob_bytes}; } } diff --git a/aggregator/src/blob_consistency/avail.rs b/aggregator/src/blob_consistency/avail.rs index 7c7474c8be..88b93eada7 100644 --- a/aggregator/src/blob_consistency/avail.rs +++ b/aggregator/src/blob_consistency/avail.rs @@ -98,3 +98,8 @@ pub struct AssignedBarycentricEvaluationConfig { /// 32 Assigned cells representing the LE-bytes of evaluation y. pub(crate) y_le: Vec>, } + +/// Get the blob data bytes that will be populated in BlobDataConfig. +pub fn get_blob_bytes(_batch_bytes: &[u8]) -> Vec { + unimplemented!("trick for linting"); +} diff --git a/aggregator/src/blob_consistency/eip4844.rs b/aggregator/src/blob_consistency/eip4844.rs index 8958f0ffd5..3a2b81781b 100644 --- a/aggregator/src/blob_consistency/eip4844.rs +++ b/aggregator/src/blob_consistency/eip4844.rs @@ -84,7 +84,6 @@ fn kzg_to_versioned_hash(commitment: &c_kzg::KzgCommitment) -> H256 { H256::from_slice(&res[..]) } -#[cfg(test)] /// Get the blob data bytes that will be populated in BlobDataConfig. pub fn get_blob_bytes(batch_bytes: &[u8]) -> Vec { let mut blob_bytes = crate::witgen::zstd_encode(batch_bytes); diff --git a/aggregator/src/lib.rs b/aggregator/src/lib.rs index 684a33c973..4ce52cd83a 100644 --- a/aggregator/src/lib.rs +++ b/aggregator/src/lib.rs @@ -27,6 +27,7 @@ mod tests; pub use self::core::extract_proof_and_instances_with_pairing_check; pub use aggregation::*; pub use batch::{BatchHash, BatchHeader}; +pub use blob_consistency::get_blob_bytes; pub use chunk::ChunkInfo; pub use compression::*; pub use constants::MAX_AGG_SNARKS; diff --git a/prover/Cargo.toml b/prover/Cargo.toml index 30ddae897b..d5914e99f8 100644 --- a/prover/Cargo.toml +++ b/prover/Cargo.toml @@ -34,9 +34,13 @@ serde.workspace = true serde_derive = "1.0" serde_json = { workspace = true, features = ["unbounded_depth"] } serde_stacker.workspace = true +thiserror = "1.0" sha2 ="0.10.2" revm = { version = "17.1.0", default-features = false, features = ["std"] } +[dev-dependencies] +tempdir = "0.3" + [features] default = ["scroll"] parallel_syn = ["halo2_proofs/parallel_syn", "zkevm-circuits/parallel_syn"] diff --git a/prover/src/aggregator.rs b/prover/src/aggregator.rs deleted file mode 100644 index 177c313c1a..0000000000 --- a/prover/src/aggregator.rs +++ /dev/null @@ -1,6 +0,0 @@ -mod prover; -mod verifier; - -pub use self::prover::{check_chunk_hashes, Prover}; -pub use aggregator::{BatchData, BatchHash, BatchHeader, MAX_AGG_SNARKS}; -pub use verifier::Verifier; diff --git a/prover/src/aggregator/error.rs b/prover/src/aggregator/error.rs new file mode 100644 index 0000000000..808f9ce60d --- /dev/null +++ b/prover/src/aggregator/error.rs @@ -0,0 +1,43 @@ +/// Errors encountered in the proof generation pipeline for batch and bundle proving. +#[derive(thiserror::Error, Debug)] +pub enum BatchProverError { + /// Represents a mismatch in the verifying key at the specified proof layer. + #[error("verifying key mismatch: layer={0}, expected={1}, found={2}")] + VerifyingKeyMismatch(crate::config::LayerId, String, String), + /// Verifying key for the specified layer was not found in the prover. + #[error("verifying key not found: layer={0}, expected={1}")] + VerifyingKeyNotFound(crate::config::LayerId, String), + /// Sanity check failure indicating that the [`Snark`][snark_verifier_sdk::Snark] + /// [`protocol`][snark_verifier::Protocol] did not match the expected protocols. + #[error("SNARK protocol mismatch: index={0}, expected={1}, found={2}")] + ChunkProtocolMismatch(usize, String, String), + /// Indicates that after generating an EVM verifier contract, the proof itself could not be + /// verified successfully, implying that this sanity check failed. + #[error("EVM verifier contract could not verify proof")] + SanityEVMVerifier, + /// Error indicating that the verification of batch proof failed. + #[error("proof verification failure")] + Verification, + /// Error indicating that the verifier contract's deployment code is not found. + #[error("EVM verifier deployment code not found!")] + VerifierCodeMissing, + /// Error indicating that in the final [`BundleProof`][crate::BundleProofV2] the number of + /// instances found does not match the number of instances expected. + #[error("number of instances in bundle proof mismatch! expected={0}, got={1}")] + PublicInputsMismatch(usize, usize), + /// This variant represents other errors. + #[error("custom: {0}")] + Custom(String), +} + +impl From for BatchProverError { + fn from(value: String) -> Self { + Self::Custom(value) + } +} + +impl From for BatchProverError { + fn from(value: anyhow::Error) -> Self { + Self::Custom(value.to_string()) + } +} diff --git a/prover/src/aggregator/mod.rs b/prover/src/aggregator/mod.rs new file mode 100644 index 0000000000..6351df6be3 --- /dev/null +++ b/prover/src/aggregator/mod.rs @@ -0,0 +1,20 @@ +mod error; +pub use error::BatchProverError; + +mod prover; +pub use prover::{check_chunk_hashes, Prover}; + +mod recursion; +pub use recursion::RecursionTask; + +mod verifier; +pub use verifier::Verifier; + +/// Re-export some types from the [`aggregator`] crate. +pub use aggregator::{get_blob_bytes, BatchData, BatchHash, BatchHeader, MAX_AGG_SNARKS}; + +/// Alias for convenience. +pub type BatchProver<'a> = Prover<'a>; + +/// Alias for convenience. +pub type BatchVerifier<'a> = Verifier<'a>; diff --git a/prover/src/aggregator/prover.rs b/prover/src/aggregator/prover.rs index 8c4f9356ce..bd8235ffad 100644 --- a/prover/src/aggregator/prover.rs +++ b/prover/src/aggregator/prover.rs @@ -1,49 +1,82 @@ -use std::{collections::BTreeMap, env, iter::repeat}; +use std::{env, path::PathBuf}; use aggregator::{decode_bytes, BatchData, BatchHash, BatchHeader, ChunkInfo, MAX_AGG_SNARKS}; -use anyhow::{bail, Result}; use eth_types::H256; -use halo2_proofs::{halo2curves::bn256::Bn256, poly::kzg::commitment::ParamsKZG}; use sha2::{Digest, Sha256}; use snark_verifier_sdk::Snark; use crate::{ + aggregator::BatchProverError, common, config::LayerId, consts::{ BATCH_KECCAK_ROW, BATCH_VK_FILENAME, BUNDLE_VK_FILENAME, FD_HALO2_CHUNK_PROTOCOL, FD_SP1_CHUNK_PROTOCOL, }, - io::{force_to_read, try_to_read}, - proof::BundleProof, types::BundleProvingTask, - BatchProof, BatchProvingTask, ChunkKind, ChunkProof, + utils::{force_read, try_read}, + BatchProofV2, BatchProofV2Metadata, BatchProvingTask, BundleProofV2, ChunkKind, ChunkProof, + ChunkProofV2, ParamsMap, ProverError, }; +/// Prover capable of generating [`BatchProof`] and [`BundleProof`]. #[derive(Debug)] pub struct Prover<'params> { - // Make it public for testing with inner functions (unnecessary for FFI). + /// Encapsulating the common prover. pub prover_impl: common::Prover<'params>, - pub halo2_protocol: Vec, - pub sp1_protocol: Vec, + /// The SNARK [`protocol`][snark_verifier::Protocol] for the halo2-based route, i.e. where + /// the inner SNARK is generated using the [`SuperCircuit`][zkevm_circuits::super_circuit::SuperCircuit]. + halo2_protocol: Vec, + /// The SNARK [`protocol`][snark_verifier::Protocol] for the sp1-based route, i.e. where the + /// inner proof is an Sp1 compressed proof, later SNARKified using a halo2-backend. + sp1_protocol: Vec, + /// The verifying key for [`Layer-4`][LayerId::Layer4] in the proof generation pipeline, i.e. + /// the [`CompressionCircuit`][aggregator::CompressionCircuit] SNARK on top of the + /// [`BatchCircuit`][aggregator::BatchCircuit] SNARK. + /// + /// This is an optional field, as it is generated on-the-fly for dev-mode, while the verifying + /// key is expected in production environments. + /// + /// The verifying key is specified in its raw byte-encoded format. raw_vk_batch: Option>, + /// The verifying key for [`Layer-6`][LayerId::Layer6] in the proof generation pipeline, i.e. + /// the [`CompressionCircuit`][aggregator::CompressionCircuit] SNARK on top of the + /// [`RecursionCircuit`][aggregator::RecursionCircuit] SNARK. + /// + /// This is an optional field, as it is generated on-the-fly for dev-mode, while the verifying + /// key is expected in production environments. + /// + /// The verifying key is specified in its raw byte-encoded format. raw_vk_bundle: Option>, } impl<'params> Prover<'params> { - pub fn from_params_and_assets( - params_map: &'params BTreeMap>, - assets_dir: &str, - ) -> Self { + /// Construct batch prover given a map of degree to KZG setup parameters and a path to the + /// assets directory. + /// + /// Panics if the SNARK [`protocols`][snark_verifier::Protocol] for both [`chunk proof variants`][crate::proof::ChunkKind] + /// are not found in the assets directory. + pub fn from_params_and_assets(params_map: &'params ParamsMap, assets_dir: &str) -> Self { + // Set the number of rows in the keccak-circuit's config. The value is eventually read + // to configure the keccak config at runtime. log::debug!("set env KECCAK_ROWS={}", BATCH_KECCAK_ROW.to_string()); env::set_var("KECCAK_ROWS", BATCH_KECCAK_ROW.to_string()); + // Construct the inner common prover. let prover_impl = common::Prover::from_params_map(params_map); - let halo2_protocol = force_to_read(assets_dir, &FD_HALO2_CHUNK_PROTOCOL); - let sp1_protocol = force_to_read(assets_dir, &FD_SP1_CHUNK_PROTOCOL); - let raw_vk_batch = try_to_read(assets_dir, &BATCH_VK_FILENAME); - let raw_vk_bundle = try_to_read(assets_dir, &BUNDLE_VK_FILENAME); + // The SNARK protocols for both variants of the Layer-2 SNARK must be available in the + // assets directory before setting up the batch prover. The SNARK protocols are + // specifically for the halo2-route and sp1-route of generating chunk proofs. + let halo2_protocol = + force_read(PathBuf::from(assets_dir).join(FD_HALO2_CHUNK_PROTOCOL.clone())); + let sp1_protocol = + force_read(PathBuf::from(assets_dir).join(FD_SP1_CHUNK_PROTOCOL.clone())); + + // Try to read the verifying key for both Layer-4 and Layer-6 compression circuits. + let raw_vk_batch = try_read(PathBuf::from(assets_dir).join(BATCH_VK_FILENAME.clone())); + let raw_vk_bundle = try_read(PathBuf::from(assets_dir).join(BUNDLE_VK_FILENAME.clone())); + if raw_vk_batch.is_none() { log::warn!( "batch-prover: {} doesn't exist in {}", @@ -68,120 +101,199 @@ impl<'params> Prover<'params> { } } - // Return true if chunk proofs are valid (same protocol), false otherwise. - pub fn check_protocol_of_chunks(&self, chunk_proofs: &[ChunkProof]) -> bool { - chunk_proofs.iter().enumerate().all(|(i, proof)| { - let protocol_expected = match proof.chunk_kind { - ChunkKind::Halo2 => &self.halo2_protocol, - ChunkKind::Sp1 => &self.sp1_protocol, - }; - let result = &proof.protocol == protocol_expected; - if !result { - log::error!( - "Non-match protocol of chunk-proof index-{}: expected = {:x}, actual = {:x}", - i, - Sha256::digest(protocol_expected), - Sha256::digest(&proof.protocol), - ); - } - - result - }) - } - + /// Returns the optional verifying key for [`Layer-4`][LayerId::Layer4] in byte-encoded form. pub fn get_batch_vk(&self) -> Option> { self.prover_impl .raw_vk(LayerId::Layer4.id()) .or_else(|| self.raw_vk_batch.clone()) } + /// Returns the optional verifying key for [`Layer-6`][LayerId::Layer6] in byte-encoded form. pub fn get_bundle_vk(&self) -> Option> { self.prover_impl .raw_vk(LayerId::Layer6.id()) .or_else(|| self.raw_vk_bundle.clone()) } - // Return the batch proof for a BatchProvingTask. - // TODO: should we rename the method name to `load_or_gen_batch_proof`? + /// Generate [`BatchProof`] given a [`BatchProvingTask`]. + /// + /// The [`Layer-2`][LayerId::Layer2] SNARKs representing chunk proofs are aggregated using the + /// [`Layer-3`][LayerId::Layer3] [`BatchCircuit`][aggregator::BatchCircuit] and this SNARK is + /// then compressed using the [`Layer-4`][LayerId::Layer4] + /// [`CompressionCircuit`][aggregator::CompressionCircuit]. + /// + /// Returns early if a batch proof with a matching proof identifier is found on disk in the + /// provided output directory. pub fn gen_batch_proof( &mut self, batch: BatchProvingTask, name: Option<&str>, output_dir: Option<&str>, - ) -> Result { + ) -> Result { + // Denotes the identifier for this batch proving task. Eventually a generated proof is + // cached to disk using this identifier. let name = name.map_or_else(|| batch.identifier(), |name| name.to_string()); - log::info!("gen_batch_proof with identifier {name}"); + // Return early if the batch proof was found on disk. if let Some(output_dir) = output_dir { - if let Ok(batch_proof) = BatchProof::from_json_file(output_dir, &name) { - log::info!("batch proof loaded from {output_dir}"); + if let Ok(batch_proof) = BatchProofV2::from_json(output_dir, &name) { return Ok(batch_proof); } } + // Load from disk or generate the layer-3 SNARK using the batch circuit. let (layer3_snark, batch_hash) = - self.load_or_gen_last_agg_snark::(&name, batch, output_dir)?; - - // Load or generate batch compression thin proof (layer-4). - let layer4_snark = self.prover_impl.load_or_gen_comp_snark( - &name, - LayerId::Layer4.id(), - true, - LayerId::Layer4.degree(), - layer3_snark, - output_dir, - )?; + self.load_or_gen_last_agg_snark::(batch, &name, output_dir)?; + + // Load from disk or generate the layer-4 SNARK using thin compression circuit. + let layer4_snark = self + .prover_impl + .load_or_gen_comp_snark( + &name, + LayerId::Layer4.id(), + true, + LayerId::Layer4.degree(), + layer3_snark, + output_dir, + ) + .map_err(|e| BatchProverError::Custom(e.to_string()))?; log::info!("Got batch compression thin proof (layer-4): {name}"); - self.check_batch_vk(); + // Sanity check on the layer-4 verifying key. + self.check_batch_vk()?; + // Get the proving key for layer-4. let pk = self.prover_impl.pk(LayerId::Layer4.id()); - let batch_proof = BatchProof::new(layer4_snark, pk, batch_hash)?; + + // Build a wrapper around the layer-4 SNARK, aka batch proof. + let batch_proof_metadata = BatchProofV2Metadata::new(&layer4_snark, batch_hash)?; + let batch_proof = BatchProofV2::new(layer4_snark, pk, batch_proof_metadata)?; + + // If an output directory was provided, write the generated batch proof and layer-4 + // verifying key to disk. if let Some(output_dir) = output_dir { - batch_proof.dump_vk(output_dir, "agg")?; batch_proof.dump(output_dir, &name)?; - log::debug!("batch proof dumped to {output_dir}"); } Ok(batch_proof) } - // Generate layer3 snark. - // Then it could be used to generate a layer4 proof. - pub fn load_or_gen_last_agg_snark( + /// Generate [`BundleProof`] given a [`BundleProvingTask`]. + /// + /// The bundle proving task consists of a list of [`Layer-4`][LayerId::Layer4] + /// [`BatchProofs`][BatchProof] representing the batches being bundled. + /// + /// The [`RecursionCircuit`][aggregator::RecursionCircuit] recursively proves the correctness + /// of all those batch proofs. + pub fn gen_bundle_proof( &mut self, - name: &str, - batch: BatchProvingTask, + bundle: BundleProvingTask, + name: Option<&str>, output_dir: Option<&str>, - ) -> Result<(Snark, H256)> { - let real_chunk_count = batch.chunk_proofs.len(); - assert!((1..=MAX_AGG_SNARKS).contains(&real_chunk_count)); + ) -> Result { + // Denotes the identifier for this bundle proving task. Eventually a generated proof is + // written to disk using this name. + let name = name.map_or_else(|| bundle.identifier(), |name| name.to_string()); + + // Collect the layer-4 SNARKs from the batch proofs. + let bundle_snarks = bundle + .batch_proofs + .iter() + .map(Snark::try_from) + .collect::, _>>()?; + + // Load from disk or generate a layer-5 Recursive Circuit SNARK. + let layer5_snark = self + .prover_impl + .load_or_gen_recursion_snark( + &name, + LayerId::Layer5.id(), + LayerId::Layer5.degree(), + &bundle_snarks, + output_dir, + ) + .map_err(|e| BatchProverError::Custom(e.to_string()))?; + + // Load from disk or generate a layer-6 Compression Circuit SNARK. Since we use a Keccak + // hasher for the proof transcript at layer-6, the output proof is EVM-verifiable. + let layer6_proof = self + .prover_impl + .load_or_gen_comp_evm_proof( + &name, + LayerId::Layer6.id(), + true, + LayerId::Layer6.degree(), + layer5_snark, + output_dir, + ) + .map_err(|e| BatchProverError::Custom(e.to_string()))?; + + // Sanity check for the layer-6 verifying key. + self.check_bundle_vk()?; + + // Wrap the layer-6 proof into the wrapper Bundle Proof. + let bundle_proof = BundleProofV2::new_from_raw( + &layer6_proof.proof.proof, + &layer6_proof.proof.instances, + &layer6_proof.proof.vk, + )?; - if !self.check_protocol_of_chunks(&batch.chunk_proofs) { - bail!("non-match-chunk-protocol: {name}"); + // If an output directory was provided, write the bundle proof to disk. + if let Some(output_dir) = output_dir { + bundle_proof.dump(output_dir, "recursion")?; } - let mut chunk_hashes: Vec<_> = batch + + Ok(bundle_proof) + } + + /// Generate the [`Layer-3`][LayerId::Layer3] SNARK using the [`BatchCircuit`][aggregator::BatchCircuit]. + /// + /// Returns early if the SNARK was located on disk. + fn load_or_gen_last_agg_snark( + &mut self, + batch: BatchProvingTask, + name: &str, + output_dir: Option<&str>, + ) -> Result<(Snark, H256), ProverError> { + // Early return with an error if the number of SNARKs to aggregate is not within limits. + let num_chunks = batch.chunk_proofs.len(); + if !(1..=MAX_AGG_SNARKS).contains(&num_chunks) { + return Err(BatchProverError::Custom(format!( + "1 <= num_chunks <= MAX_AGG_SNARKS, found={num_chunks}" + )) + .into()); + } + + // Sanity check on the chunk proof's SNARK protocols. + self.check_protocol_of_chunks(&batch.chunk_proofs)?; + + // Split chunk info and snarks from the batch proving task. + let mut chunk_infos = batch .chunk_proofs .iter() - .map(|p| p.chunk_info.clone()) - .collect(); - let mut layer2_snarks: Vec<_> = batch + .map(|proof| proof.inner.chunk_info().clone()) + .collect::>(); + let mut layer2_snarks = batch .chunk_proofs - .into_iter() - .map(|p| p.to_snark()) - .collect(); - - if real_chunk_count < MAX_AGG_SNARKS { - let padding_snark = layer2_snarks.last().unwrap().clone(); - let mut padding_chunk_hash = chunk_hashes.last().unwrap().clone(); - padding_chunk_hash.is_padding = true; - - // Extend to MAX_AGG_SNARKS for both chunk hashes and layer-2 snarks. - chunk_hashes.extend(repeat(padding_chunk_hash).take(MAX_AGG_SNARKS - real_chunk_count)); - layer2_snarks.extend(repeat(padding_snark).take(MAX_AGG_SNARKS - real_chunk_count)); + .iter() + .map(Snark::try_from) + .collect::, ProverError>>()?; + + // Pad the SNARKs with the last SNARK until we have MAX_AGG_SNARKS number of SNARKs. + if num_chunks < MAX_AGG_SNARKS { + let padding_chunk_info = { + let mut last_chunk = chunk_infos.last().expect("num_chunks > 0").clone(); + last_chunk.is_padding = true; + last_chunk + }; + let padding_snark = layer2_snarks.last().expect("num_chunks > 0").clone(); + + // Extend to MAX_AGG_SNARKS for both chunk infos and layer-2 snarks. + chunk_infos.resize(MAX_AGG_SNARKS, padding_chunk_info); + layer2_snarks.resize(MAX_AGG_SNARKS, padding_snark); } - // Load or generate aggregation snark (layer-3). + // Reconstruct the batch header. let batch_header = BatchHeader::construct_from_chunks( batch.batch_header.version, batch.batch_header.batch_index, @@ -189,162 +301,187 @@ impl<'params> Prover<'params> { batch.batch_header.total_l1_message_popped, batch.batch_header.parent_batch_hash, batch.batch_header.last_block_timestamp, - &chunk_hashes, + &chunk_infos, &batch.blob_bytes, ); + let batch_hash = batch_header.batch_hash(); - // sanity check between: - // - BatchHeader supplied from infra - // - BatchHeader re-constructed by circuits + // Sanity checks between the Batch Header supplied vs reconstructed. // - // for the fields data_hash, z, y, blob_versioned_hash. - assert_eq!( - batch_header.data_hash, batch.batch_header.data_hash, - "BatchHeader(sanity) mismatch data_hash expected={}, got={}", - batch.batch_header.data_hash, batch_header.data_hash - ); - assert_eq!( - batch_header.blob_data_proof[0], batch.batch_header.blob_data_proof[0], - "BatchHeader(sanity) mismatch blob data proof (z) expected={}, got={}", - batch_header.blob_data_proof[0], batch.batch_header.blob_data_proof[0], - ); - assert_eq!( - batch_header.blob_data_proof[1], batch.batch_header.blob_data_proof[1], - "BatchHeader(sanity) mismatch blob data proof (y) expected={}, got={}", - batch_header.blob_data_proof[1], batch.batch_header.blob_data_proof[1], - ); - assert_eq!( - batch_header.blob_versioned_hash, batch.batch_header.blob_versioned_hash, - "BatchHeader(sanity) mismatch blob versioned hash expected={}, got={}", - batch_header.blob_versioned_hash, batch.batch_header.blob_versioned_hash, - ); + // Batch's data_hash field must match. + if batch_header.data_hash != batch.batch_header.data_hash { + return Err(BatchProverError::Custom(format!( + "BatchHeader(sanity) data_hash mismatch! expected={}, got={}", + batch.batch_header.data_hash, batch_header.data_hash + )) + .into()); + } + // Batch's random challenge point (z) must match. + if batch_header.blob_data_proof[0] != batch.batch_header.blob_data_proof[0] { + return Err(BatchProverError::Custom(format!( + "BatchHeader(sanity) random challenge (z) mismatch! expected={}, got={}", + batch.batch_header.blob_data_proof[0], batch_header.blob_data_proof[0], + )) + .into()); + } + // Batch's evaluation at z, i.e. y, must match. + if batch_header.blob_data_proof[1] != batch.batch_header.blob_data_proof[1] { + return Err(BatchProverError::Custom(format!( + "BatchHeader(sanity) evaluation (y) mismatch! expected={}, got={}", + batch.batch_header.blob_data_proof[1], batch_header.blob_data_proof[1], + )) + .into()); + } + // The versioned hash of the blob that encodes the batch must match. + if batch_header.blob_versioned_hash != batch.batch_header.blob_versioned_hash { + return Err(BatchProverError::Custom(format!( + "BatchHeader(sanity) blob versioned_hash mismatch! expected={}, got={}", + batch.batch_header.blob_versioned_hash, batch_header.blob_versioned_hash, + )) + .into()); + } - let batch_hash = batch_header.batch_hash(); + // Build relevant types that are used for batch circuit witness assignments. let batch_info: BatchHash = - BatchHash::construct(&chunk_hashes, batch_header, &batch.blob_bytes); + BatchHash::construct(&chunk_infos, batch_header, &batch.blob_bytes); let batch_data: BatchData = BatchData::from(&batch_info); - // sanity check: - // - conditionally decoded blob should match batch data. + // Sanity check: validate that conditionally decoded blob should match batch data. let batch_bytes = batch_data.get_batch_data_bytes(); - let decoded_blob_bytes = decode_bytes(&batch.blob_bytes)?; - assert_eq!( - batch_bytes, decoded_blob_bytes, - "BatchProvingTask(sanity) mismatch batch bytes and decoded blob bytes", - ); + let decoded_blob_bytes = + decode_bytes(&batch.blob_bytes).map_err(|e| BatchProverError::Custom(e.to_string()))?; + if batch_bytes != decoded_blob_bytes { + return Err(BatchProverError::Custom(format!( + "BatchProvingTask(sanity) decoded blob bytes do not match batch bytes! len(expected)={}, len(got)={}", + decoded_blob_bytes.len(), + batch_bytes.len(), + )).into()); + } - let layer3_snark = self.prover_impl.load_or_gen_agg_snark( - name, - LayerId::Layer3.id(), - LayerId::Layer3.degree(), - batch_info, - &self.halo2_protocol, - &self.sp1_protocol, - &layer2_snarks, - output_dir, - )?; - log::info!("Got aggregation snark (layer-3): {name}"); + // Load from disk or generate the layer-3 SNARK using the batch circuit. + let layer3_snark = self + .prover_impl + .load_or_gen_agg_snark( + name, + LayerId::Layer3.id(), + LayerId::Layer3.degree(), + batch_info, + &self.halo2_protocol, + &self.sp1_protocol, + &layer2_snarks, + output_dir, + ) + .map_err(|e| BatchProverError::Custom(e.to_string()))?; Ok((layer3_snark, batch_hash)) } - // Given a bundle proving task that consists of a list of batch proofs for all intermediate - // batches, bundles them into a single bundle proof using the RecursionCircuit, effectively - // proving the validity of all those batches. - pub fn gen_bundle_proof( - &mut self, - bundle: BundleProvingTask, - name: Option<&str>, - output_dir: Option<&str>, - ) -> Result { - let name = name.map_or_else(|| bundle.identifier(), |name| name.to_string()); - - let bundle_snarks = bundle - .batch_proofs - .iter() - .map(|proof| proof.into()) - .collect::>(); - - let layer5_snark = self.prover_impl.load_or_gen_recursion_snark( - &name, - LayerId::Layer5.id(), - LayerId::Layer5.degree(), - &bundle_snarks, - output_dir, - )?; - - let layer6_evm_proof = self.prover_impl.load_or_gen_comp_evm_proof( - &name, - LayerId::Layer6.id(), - true, - LayerId::Layer6.degree(), - layer5_snark, - output_dir, - )?; - - self.check_bundle_vk(); - - let bundle_proof: BundleProof = layer6_evm_proof.proof.into(); - if let Some(output_dir) = output_dir { - bundle_proof.dump(output_dir, "recursion")?; + /// Sanity check: validate that the SNARK [`protocol`][snark_verifier::Protocol] for the SNARKs + /// being aggregated by the [`BatchCircuit`][aggregator::BatchCircuit] match the expected SNARK + /// protocols conditional to the chunk proof generation route utilised, i.e. halo2 or sp1. + fn check_protocol_of_chunks(&self, chunk_proofs: &[ChunkProofV2]) -> Result<(), ProverError> { + for (i, proof) in chunk_proofs.iter().enumerate() { + let expected = match proof.inner.chunk_kind() { + ChunkKind::Halo2 => &self.halo2_protocol, + ChunkKind::Sp1 => &self.sp1_protocol, + }; + if proof.inner.protocol().ne(expected) { + let expected_digest = format!("{:x}", Sha256::digest(expected)); + let found_digest = format!("{:x}", Sha256::digest(proof.inner.protocol())); + log::error!( + "BatchProver: SNARK protocol mismatch! index={}, expected={}, found={}", + i, + expected_digest, + found_digest, + ); + return Err(BatchProverError::ChunkProtocolMismatch( + i, + expected_digest, + found_digest, + ) + .into()); + } } - Ok(bundle_proof) + Ok(()) } - /// Check vk generated is same with vk loaded from assets - fn check_batch_vk(&self) { - if self.raw_vk_batch.is_some() { - let gen_vk = self - .prover_impl - .raw_vk(LayerId::Layer4.id()) - .unwrap_or_default(); - if gen_vk.is_empty() { - log::warn!("no gen_vk found, skip check_vk"); - return; - } - let init_vk = self.raw_vk_batch.clone().unwrap_or_default(); - if gen_vk != init_vk { - log::error!( - "batch-prover: generated VK is different with init one - gen_vk = {}, init_vk = {}", - base64::encode(gen_vk), - base64::encode(init_vk), - ); + /// Sanity check for the [`VerifyinKey`][halo2_proofs::plonk::VerifyingKey] used to generate + /// Layer-4 SNARK that is wrapped inside the [`BatchProof`]. The prover generated VK is + /// expected to match the VK used to initialise the prover. + fn check_batch_vk(&self) -> Result<(), ProverError> { + let layer = LayerId::Layer4; + if let Some(expected_vk) = self.raw_vk_batch.as_ref() { + let base64_exp_vk = base64::encode(expected_vk); + if let Some(generated_vk) = self.prover_impl.raw_vk(layer.id()).as_ref() { + let base64_gen_vk = base64::encode(generated_vk); + if generated_vk.ne(expected_vk) { + log::error!( + "BatchProver: {:?} VK mismatch! found={}, expected={}", + layer, + base64_gen_vk, + base64_exp_vk, + ); + return Err(BatchProverError::VerifyingKeyMismatch( + layer, + base64_gen_vk, + base64_exp_vk, + ) + .into()); + } + } else { + return Err(BatchProverError::VerifyingKeyNotFound(layer, base64_exp_vk).into()); } } + + Ok(()) } - /// Check vk generated is same with vk loaded from assets - fn check_bundle_vk(&self) { - if self.raw_vk_bundle.is_some() { - let gen_vk = self - .prover_impl - .raw_vk(LayerId::Layer6.id()) - .unwrap_or_default(); - if gen_vk.is_empty() { - log::warn!("no gen_vk found, skip check_vk"); - return; - } - let init_vk = self.raw_vk_bundle.clone().unwrap_or_default(); - if gen_vk != init_vk { - log::error!( - "bundle-prover: generated VK is different with init one - gen_vk = {}, init_vk = {}", - base64::encode(gen_vk), - base64::encode(init_vk), - ); + /// Sanity check for the [`VerifyinKey`][halo2_proofs::plonk::VerifyingKey] used to generate + /// Layer-6 SNARK that is wrapped inside the [`BundleProof`]. The prover generated VK is + /// expected to match the VK used to initialise the prover. + fn check_bundle_vk(&self) -> Result<(), ProverError> { + let layer = LayerId::Layer6; + if let Some(expected_vk) = self.raw_vk_bundle.as_ref() { + let base64_exp_vk = base64::encode(expected_vk); + if let Some(generated_vk) = self.prover_impl.raw_vk(layer.id()).as_ref() { + let base64_gen_vk = base64::encode(generated_vk); + if generated_vk.ne(expected_vk) { + log::error!( + "BatchProver: {:?} VK mismatch! found={}, expected={}", + layer, + base64_gen_vk, + base64_exp_vk, + ); + return Err(BatchProverError::VerifyingKeyMismatch( + layer, + base64_gen_vk, + base64_exp_vk, + ) + .into()); + } + } else { + return Err(BatchProverError::VerifyingKeyNotFound(layer, base64_exp_vk).into()); } } + + Ok(()) } } pub fn check_chunk_hashes( name: &str, chunk_hashes_proofs: &[(ChunkInfo, ChunkProof)], -) -> Result<()> { +) -> anyhow::Result<()> { for (idx, (in_arg, chunk_proof)) in chunk_hashes_proofs.iter().enumerate() { let in_proof = &chunk_proof.chunk_info; - crate::proof::compare_chunk_info(&format!("{name} chunk num {idx}"), in_arg, in_proof)?; + if let Err(e) = + crate::proof::compare_chunk_info(&format!("{name} chunk num {idx}"), in_arg, in_proof) + { + anyhow::bail!(e); + } } + Ok(()) } diff --git a/prover/src/recursion.rs b/prover/src/aggregator/recursion.rs similarity index 91% rename from prover/src/recursion.rs rename to prover/src/aggregator/recursion.rs index 8e1694a56e..897a3125d5 100644 --- a/prover/src/recursion.rs +++ b/prover/src/aggregator/recursion.rs @@ -1,6 +1,5 @@ -use halo2_proofs::halo2curves::bn256::Fr; - use aggregator::{BatchCircuit, StateTransition}; +use halo2_proofs::halo2curves::bn256::Fr; use snark_verifier_sdk::Snark; /// 4 fields for 2 hashes (Hi, Lo) @@ -19,10 +18,7 @@ const NUM_INIT_INSTANCES: usize = ST_INSTANCE; #[derive(Clone, Debug)] pub struct RecursionTask<'a, const N_SNARK: usize> { - /// The [`snarks`][snark] from the [`BatchCircuit`][batch_circuit]. - /// - /// [snark]: snark_verifier_sdk::Snark - /// [batch_circuit]: aggregator::BatchCircuit + /// The [`snarks`][Snark] from the [`BatchCircuit`][aggregator::BatchCircuit]. snarks: &'a [Snark], } diff --git a/prover/src/aggregator/verifier.rs b/prover/src/aggregator/verifier.rs index 0b3bc252f1..04746d9cea 100644 --- a/prover/src/aggregator/verifier.rs +++ b/prover/src/aggregator/verifier.rs @@ -1,28 +1,35 @@ -use crate::{ - common, - config::{LAYER4_CONFIG_PATH, LAYER4_DEGREE}, - consts::{batch_vk_filename, DEPLOYMENT_CODE_FILENAME}, - io::{force_to_read, try_to_read}, - proof::BundleProof, -}; +use std::{env, path::PathBuf}; + use aggregator::CompressionCircuit; use halo2_proofs::{ halo2curves::bn256::{Bn256, G1Affine}, plonk::VerifyingKey, poly::kzg::commitment::ParamsKZG, }; -use snark_verifier_sdk::verify_evm_calldata; -use snark_verifier_sdk::Snark; -use std::{collections::BTreeMap, env}; +use crate::{ + common, + config::{LAYER4_CONFIG_PATH, LAYER4_DEGREE}, + consts::{batch_vk_filename, DEPLOYMENT_CODE_FILENAME}, + utils::{deploy_and_call, force_read, try_read}, + BatchProofV2, BatchProverError, BundleProofV2, ParamsMap, ProverError, +}; + +/// Verifier capable of verifying both [`BatchProof`][crate::BatchProof] and [`BundleProof`]. #[derive(Debug)] pub struct Verifier<'params> { - // Make it public for testing with inner functions (unnecessary for FFI). + /// Encapsulate the common verifier. pub inner: common::Verifier<'params, CompressionCircuit>, + /// The EVM deployment code for the verifier contract. + /// + /// This field is optional as it is not set in dev-mode. It is expected in production + /// environments where we already have the verifier contract's deployment code available. In + /// dev-mode or E2E testing, we generate the deployment code on-the-fly. deployment_code: Option>, } impl<'params> Verifier<'params> { + /// Construct a new batch verifier. pub fn new( params: &'params ParamsKZG, vk: VerifyingKey, @@ -36,33 +43,55 @@ impl<'params> Verifier<'params> { } } - pub fn from_params_and_assets( - params_map: &'params BTreeMap>, - assets_dir: &str, - ) -> Self { - let raw_vk = force_to_read(assets_dir, &batch_vk_filename()); - let deployment_code = try_to_read(assets_dir, &DEPLOYMENT_CODE_FILENAME); + /// Instantiate a new batch verifier given a map of degree to KZG setup parameters and a + /// directory to find assets. + /// + /// Panics if the verifying key is not found in the assets directory. + pub fn from_params_and_assets(params_map: &'params ParamsMap, assets_dir: &str) -> Self { + // Read verifying key in the assets directory. + let path = PathBuf::from(assets_dir).join(batch_vk_filename()); + let raw_vk = force_read(&path); + + // Try to read the bytecode to deploy the verifier contract. + let path = PathBuf::from(assets_dir).join(DEPLOYMENT_CODE_FILENAME.clone()); + let deployment_code = try_read(&path); + // The Layer-4 compressioe circuit is configured with the shape as per + // [`LAYER4_CONFIG_PATH`]. env::set_var("COMPRESSION_CONFIG", &*LAYER4_CONFIG_PATH); - let params = params_map.get(&*LAYER4_DEGREE).expect("should be loaded"); - let inner = common::Verifier::from_params(params, &raw_vk); + + let params = params_map + .get(&*LAYER4_DEGREE) + .unwrap_or_else(|| panic!("KZG params don't contain degree={:?}", LAYER4_DEGREE)); Self { - inner, + inner: common::Verifier::from_params(params, &raw_vk), deployment_code, } } - pub fn verify_batch_proof(&self, snark: impl Into) -> bool { - self.inner.verify_snark(snark.into()) + /// Verify a [`Layer-4`][crate::config::LayerId::Layer4] [`CompressionCircuit`] [`Snark`]. + pub fn verify_batch_proof(&self, batch_proof: &BatchProofV2) -> Result<(), ProverError> { + let snark = batch_proof.try_into()?; + if self.inner.verify_snark(snark) { + Ok(()) + } else { + Err(BatchProverError::Verification.into()) + } } - pub fn verify_bundle_proof(&self, bundle_proof: BundleProof) -> bool { - if let Some(deployment_code) = self.deployment_code.clone() { - verify_evm_calldata(deployment_code, bundle_proof.calldata()) + /// Verify a [`Layer-6`][crate::config::LayerId::Layer6] EVM-verifiable + /// [`Proof`][crate::proof::EvmProof], aka [`BundleProof`]. + /// + /// Returns `false` if the verifier contract's deployment bytecode is not set. Otherwise + /// deploys the contract and verifies the proof utilising an [`EVM Executor`][revm]. + pub fn verify_bundle_proof(&self, bundle_proof: &BundleProofV2) -> Result<(), ProverError> { + if let Some(code) = self.deployment_code.as_ref() { + deploy_and_call(code.to_vec(), bundle_proof.calldata()) + .map_err(|e| BatchProverError::Custom(e.to_string()))?; + Ok(()) } else { - log::warn!("No deployment_code found for EVM verifier"); - false + Err(BatchProverError::VerifierCodeMissing.into()) } } } diff --git a/prover/src/common.rs b/prover/src/common.rs deleted file mode 100644 index 5499b16b5b..0000000000 --- a/prover/src/common.rs +++ /dev/null @@ -1,5 +0,0 @@ -mod prover; -mod verifier; - -pub use self::{prover::Prover, verifier::Verifier}; -pub use aggregator::{ChunkInfo, CompressionCircuit}; diff --git a/prover/src/common/mod.rs b/prover/src/common/mod.rs new file mode 100644 index 0000000000..00b9f04627 --- /dev/null +++ b/prover/src/common/mod.rs @@ -0,0 +1,8 @@ +mod prover; +pub use prover::Prover; + +mod verifier; +pub use verifier::Verifier; + +// Re-export from the aggregator crate. +pub use aggregator::{ChunkInfo, CompressionCircuit}; diff --git a/prover/src/common/prover/aggregation.rs b/prover/src/common/prover/aggregation.rs index d17e838a94..d458554f5e 100644 --- a/prover/src/common/prover/aggregation.rs +++ b/prover/src/common/prover/aggregation.rs @@ -1,19 +1,61 @@ -use super::Prover; -use crate::{ - config::layer_config_path, - io::{load_snark, write_snark}, - utils::gen_rng, -}; +use std::{env, path::Path}; + use aggregator::{BatchCircuit, BatchHash}; use anyhow::{anyhow, Result}; use halo2_proofs::halo2curves::bn256::G1Affine; use rand::Rng; use snark_verifier_sdk::Snark; -use std::env; -impl<'params> Prover<'params> { +use crate::{ + config::layer_config_path, + utils::{gen_rng, read_json_deep, write_json}, +}; + +impl<'params> super::Prover<'params> { + #[allow(clippy::too_many_arguments)] + pub fn load_or_gen_agg_snark( + &mut self, + name: &str, + id: &str, + degree: u32, + batch_info: BatchHash, + halo2_protocol: &[u8], + sp1_protocol: &[u8], + previous_snarks: &[Snark], + output_dir: Option<&str>, + ) -> Result { + // If an output directory is provided and we are successfully able to locate a SNARK with + // the same identifier on disk, return early. + if let Some(dir) = output_dir { + let path = Path::new(dir).join(format!("aggregation_snark_{}_{}.json", id, name)); + if let Ok(snark) = read_json_deep(&path) { + return Ok(snark); + } + } + + // Generate the layer-3 SNARK. + let rng = gen_rng(); + let snark = self.gen_agg_snark( + id, + degree, + rng, + batch_info, + halo2_protocol, + sp1_protocol, + previous_snarks, + )?; + + // Write to disk if an output directory is provided. + if let Some(dir) = output_dir { + let path = Path::new(dir).join(format!("aggregation_snark_{}_{}.json", id, name)); + write_json(&path, &snark)?; + } + + Ok(snark) + } + #[allow(clippy::too_many_arguments)] - pub fn gen_agg_snark( + fn gen_agg_snark( &mut self, id: &str, degree: u32, @@ -42,45 +84,4 @@ impl<'params> Prover<'params> { self.gen_snark(id, degree, &mut rng, circuit, "gen_agg_snark") } - - #[allow(clippy::too_many_arguments)] - pub fn load_or_gen_agg_snark( - &mut self, - name: &str, - id: &str, - degree: u32, - batch_info: BatchHash, - halo2_protocol: &[u8], - sp1_protocol: &[u8], - previous_snarks: &[Snark], - output_dir: Option<&str>, - ) -> Result { - let file_path = format!( - "{}/aggregation_snark_{}_{}.json", - output_dir.unwrap_or_default(), - id, - name - ); - - match output_dir.and_then(|_| load_snark(&file_path).ok().flatten()) { - Some(snark) => Ok(snark), - None => { - let rng = gen_rng(); - let result = self.gen_agg_snark( - id, - degree, - rng, - batch_info, - halo2_protocol, - sp1_protocol, - previous_snarks, - ); - if let (Some(_), Ok(snark)) = (output_dir, &result) { - write_snark(&file_path, snark); - } - - result - } - } - } } diff --git a/prover/src/common/prover/compression.rs b/prover/src/common/prover/compression.rs index 80078f84d1..3614b066a8 100644 --- a/prover/src/common/prover/compression.rs +++ b/prover/src/common/prover/compression.rs @@ -1,59 +1,60 @@ -use super::Prover; -use crate::{ - config::layer_config_path, - io::{load_snark, write_snark}, - utils::gen_rng, -}; +use std::{env, path::Path}; + use aggregator::CompressionCircuit; use anyhow::{anyhow, Result}; use rand::Rng; use snark_verifier_sdk::Snark; -use std::env; -impl<'params> Prover<'params> { - pub fn gen_comp_snark( +use crate::{ + config::layer_config_path, + utils::{gen_rng, read_json_deep, write_json}, +}; + +impl<'params> super::Prover<'params> { + pub fn load_or_gen_comp_snark( &mut self, + name: &str, id: &str, has_accumulator: bool, degree: u32, - mut rng: impl Rng + Send, prev_snark: Snark, + output_dir: Option<&str>, ) -> Result { - env::set_var("COMPRESSION_CONFIG", layer_config_path(id)); + // If an output directory is provided and we are successfully able to locate a SNARK with + // the same identifier on disk, return early. + if let Some(dir) = output_dir { + let path = Path::new(dir).join(format!("compression_snark_{}_{}.json", id, name)); + if let Ok(snark) = read_json_deep(&path) { + return Ok(snark); + } + } - let circuit = - CompressionCircuit::new(self.params(degree), prev_snark, has_accumulator, &mut rng) - .map_err(|err| anyhow!("Failed to construct compression circuit: {err:?}"))?; - self.gen_snark(id, degree, &mut rng, circuit, "gen_comp_snark") + // Generate the compression SNARK. + let rng = gen_rng(); + let snark = self.gen_comp_snark(id, has_accumulator, degree, rng, prev_snark)?; + + // Write to disk if an output directory is provided. + if let Some(dir) = output_dir { + let path = Path::new(dir).join(format!("compression_snark_{}_{}.json", id, name)); + write_json(&path, &snark)?; + } + + Ok(snark) } - pub fn load_or_gen_comp_snark( + fn gen_comp_snark( &mut self, - name: &str, id: &str, has_accumulator: bool, degree: u32, + mut rng: impl Rng + Send, prev_snark: Snark, - output_dir: Option<&str>, ) -> Result { - let file_path = format!( - "{}/compression_snark_{}_{}.json", - output_dir.unwrap_or_default(), - id, - name - ); - - match output_dir.and_then(|_| load_snark(&file_path).ok().flatten()) { - Some(snark) => Ok(snark), - None => { - let rng = gen_rng(); - let result = self.gen_comp_snark(id, has_accumulator, degree, rng, prev_snark); - if let (Some(_), Ok(snark)) = (output_dir, &result) { - write_snark(&file_path, snark); - } - - result - } - } + env::set_var("COMPRESSION_CONFIG", layer_config_path(id)); + + let circuit = + CompressionCircuit::new(self.params(degree), prev_snark, has_accumulator, &mut rng) + .map_err(|err| anyhow!("Failed to construct compression circuit: {err:?}"))?; + self.gen_snark(id, degree, &mut rng, circuit, "gen_comp_snark") } } diff --git a/prover/src/common/prover/evm.rs b/prover/src/common/prover/evm.rs index c019898092..13330ddcc4 100644 --- a/prover/src/common/prover/evm.rs +++ b/prover/src/common/prover/evm.rs @@ -1,9 +1,3 @@ -use super::Prover; -use crate::{ - config::layer_config_path, - utils::{gen_rng, read_env_var}, - EvmProof, -}; use aggregator::CompressionCircuit; use anyhow::{anyhow, Result}; use halo2_proofs::halo2curves::bn256::Fr; @@ -11,7 +5,13 @@ use rand::Rng; use snark_verifier_sdk::{gen_evm_proof_shplonk, CircuitExt, Snark}; use std::env; -impl<'params> Prover<'params> { +use crate::{ + config::layer_config_path, + utils::{gen_evm_verifier, gen_rng, read_env_var}, + EvmProof, +}; + +impl<'params> super::Prover<'params> { pub fn load_or_gen_comp_evm_proof( &mut self, name: &str, @@ -68,7 +68,7 @@ impl<'params> Prover<'params> { let evm_proof = EvmProof::new(proof, &instances, num_instance, Some(pk))?; if read_env_var("SCROLL_PROVER_DUMP_YUL", false) { - crate::evm::gen_evm_verifier::(params, pk.get_vk(), &evm_proof, output_dir); + gen_evm_verifier::(params, pk.get_vk(), &evm_proof, output_dir)?; } Ok(evm_proof) diff --git a/prover/src/common/prover/inner.rs b/prover/src/common/prover/inner.rs index ae87ea5746..44e706600d 100644 --- a/prover/src/common/prover/inner.rs +++ b/prover/src/common/prover/inner.rs @@ -1,17 +1,47 @@ -use super::Prover; -use crate::{ - config::INNER_DEGREE, - io::{load_snark, write_snark}, - utils::{gen_rng, metric_of_witness_block}, - zkevm::circuit::{SuperCircuit, TargetCircuit}, -}; +use std::path::Path; + use anyhow::Result; use rand::Rng; use snark_verifier_sdk::{gen_snark_shplonk, Snark}; use zkevm_circuits::evm_circuit::witness::Block; -impl<'params> Prover<'params> { - pub fn gen_inner_snark( +use crate::{ + config::INNER_DEGREE, + utils::{gen_rng, metric_of_witness_block, read_json_deep, write_json}, + zkevm::circuit::{SuperCircuit, TargetCircuit}, +}; + +impl<'params> super::Prover<'params> { + pub fn load_or_gen_inner_snark( + &mut self, + name: &str, + id: &str, + witness_block: &Block, + output_dir: Option<&str>, + ) -> Result { + // If an output directory is provided and we are successfully able to locate a SNARK with + // the same identifier on disk, return early. + if let Some(dir) = output_dir { + let path = Path::new(dir).join(format!("inner_snark_{}_{}.json", id, name)); + if let Ok(snark) = read_json_deep(&path) { + return Ok(snark); + } + } + + // Generate the inner SNARK. + let rng = gen_rng(); + let snark = self.gen_inner_snark::(id, rng, witness_block)?; + + // Write to disk if an output directory is provided. + if let Some(dir) = output_dir { + let path = Path::new(dir).join(format!("inner_snark_{}_{}.json", id, name)); + write_json(&path, &snark)?; + } + + Ok(snark) + } + + fn gen_inner_snark( &mut self, id: &str, mut rng: impl Rng + Send, @@ -37,32 +67,4 @@ impl<'params> Prover<'params> { Ok(snark) } - - pub fn load_or_gen_inner_snark( - &mut self, - name: &str, - id: &str, - witness_block: &Block, - output_dir: Option<&str>, - ) -> Result { - let file_path = format!( - "{}/inner_snark_{}_{}.json", - output_dir.unwrap_or_default(), - id, - name - ); - - match output_dir.and_then(|_| load_snark(&file_path).ok().flatten()) { - Some(snark) => Ok(snark), - None => { - let rng = gen_rng(); - let result = self.gen_inner_snark::(id, rng, witness_block); - if let (Some(_), Ok(snark)) = (output_dir, &result) { - write_snark(&file_path, snark); - } - - result - } - } - } } diff --git a/prover/src/common/prover.rs b/prover/src/common/prover/mod.rs similarity index 86% rename from prover/src/common/prover.rs rename to prover/src/common/prover/mod.rs index b3df0582e9..a478cab1db 100644 --- a/prover/src/common/prover.rs +++ b/prover/src/common/prover/mod.rs @@ -1,10 +1,15 @@ -use crate::utils::{load_params, param_path_for_degree}; +use std::collections::{BTreeMap, BTreeSet, HashMap}; + use halo2_proofs::{ - halo2curves::bn256::{Bn256, G1Affine}, + halo2curves::bn256::G1Affine, plonk::ProvingKey, poly::{commitment::Params, kzg::commitment::ParamsKZG}, }; -use std::collections::{BTreeMap, BTreeSet, HashMap}; + +use crate::{ + utils::{load_params, param_path_for_degree}, + ParamsMap, +}; mod aggregation; mod chunk; @@ -18,20 +23,20 @@ mod utils; #[derive(Debug)] pub struct Prover<'params> { // degree -> params (use BTreeMap to find proper degree for params downsize) - pub params_map: &'params BTreeMap>, + pub params_map: &'params ParamsMap, // Cached id -> pk pk_map: HashMap>, } impl<'params> Prover<'params> { - pub fn from_params_map(params_map: &'params BTreeMap>) -> Self { + pub fn from_params_map(params_map: &'params ParamsMap) -> Self { Self { params_map, pk_map: HashMap::new(), } } - pub fn load_params_map(params_dir: &str, degrees: &[u32]) -> BTreeMap> { + pub fn load_params_map(params_dir: &str, degrees: &[u32]) -> ParamsMap { let degrees = BTreeSet::from_iter(degrees); let max_degree = **degrees.last().unwrap(); diff --git a/prover/src/common/prover/recursion.rs b/prover/src/common/prover/recursion.rs index 168b5641ca..2fa6bb13b0 100644 --- a/prover/src/common/prover/recursion.rs +++ b/prover/src/common/prover/recursion.rs @@ -1,4 +1,4 @@ -use std::env; +use std::{env, path::Path}; use aggregator::{initial_recursion_snark, RecursionCircuit, StateTransition, MAX_AGG_SNARKS}; use anyhow::Result; @@ -6,16 +6,43 @@ use rand::Rng; use snark_verifier_sdk::{gen_snark_shplonk, Snark}; use crate::{ + aggregator::RecursionTask, config::layer_config_path, - io::{load_snark, write_snark}, - recursion::RecursionTask, - utils::gen_rng, + utils::{gen_rng, read_json_deep, write_json}, }; -use super::Prover; +impl<'params> super::Prover<'params> { + pub fn load_or_gen_recursion_snark( + &mut self, + name: &str, + id: &str, + degree: u32, + batch_snarks: &[Snark], + output_dir: Option<&str>, + ) -> Result { + // If an output directory is provided and we are successfully able to locate a SNARK with + // the same identifier on disk, return early. + if let Some(dir) = output_dir { + let path = Path::new(dir).join(format!("recursion_snark_{}_{}.json", id, name)); + if let Ok(snark) = read_json_deep(&path) { + return Ok(snark); + } + } + + // Generate the layer-5 recursion SNARK. + let rng = gen_rng(); + let snark = self.gen_recursion_snark(id, degree, rng, batch_snarks)?; + + // Write to disk if an output directory is provided. + if let Some(dir) = output_dir { + let path = Path::new(dir).join(format!("recursion_snark_{}_{}.json", id, name)); + write_json(&path, &snark)?; + } + + Ok(snark) + } -impl<'params> Prover<'params> { - pub fn gen_recursion_snark( + fn gen_recursion_snark( &mut self, id: &str, degree: u32, @@ -78,33 +105,4 @@ impl<'params> Prover<'params> { Ok(cur_snark) } - - pub fn load_or_gen_recursion_snark( - &mut self, - name: &str, - id: &str, - degree: u32, - batch_snarks: &[Snark], - output_dir: Option<&str>, - ) -> Result { - let file_path = format!( - "{}/recursion_snark_{}_{}.json", - output_dir.unwrap_or_default(), - id, - name - ); - - match output_dir.and_then(|_| load_snark(&file_path).ok().flatten()) { - Some(snark) => Ok(snark), - None => { - let rng = gen_rng(); - let result = self.gen_recursion_snark(id, degree, rng, batch_snarks); - if let (Some(_), Ok(snark)) = (output_dir, &result) { - write_snark(&file_path, snark); - } - - result - } - } - } } diff --git a/prover/src/common/prover/utils.rs b/prover/src/common/prover/utils.rs index 8bf8363cb3..f2b529248d 100644 --- a/prover/src/common/prover/utils.rs +++ b/prover/src/common/prover/utils.rs @@ -1,5 +1,3 @@ -use super::Prover; -use crate::io::serialize_vk; use anyhow::Result; use halo2_proofs::{ halo2curves::bn256::{Bn256, Fr, G1Affine}, @@ -9,7 +7,9 @@ use halo2_proofs::{ use rand::Rng; use snark_verifier_sdk::{gen_snark_shplonk, CircuitExt, Snark}; -impl<'params> Prover<'params> { +use crate::utils::serialize_vk; + +impl<'params> super::Prover<'params> { pub fn gen_snark>( &mut self, id: &str, diff --git a/prover/src/common/verifier/evm.rs b/prover/src/common/verifier/evm.rs index 783eea2d00..82ece6996e 100644 --- a/prover/src/common/verifier/evm.rs +++ b/prover/src/common/verifier/evm.rs @@ -1,10 +1,12 @@ -use super::Verifier; -use crate::EvmProof; use halo2_proofs::halo2curves::bn256::Fr; use snark_verifier_sdk::CircuitExt; -impl<'params, C: CircuitExt> Verifier<'params, C> { - pub fn gen_evm_verifier(&self, evm_proof: &EvmProof, output_dir: Option<&str>) { - crate::evm::gen_evm_verifier::(self.params, &self.vk, evm_proof, output_dir) +impl<'params, C: CircuitExt> super::Verifier<'params, C> { + pub fn gen_evm_verifier( + &self, + evm_proof: &crate::EvmProof, + output_dir: Option<&str>, + ) -> Result<(), crate::ProverError> { + crate::gen_evm_verifier::(self.params, &self.vk, evm_proof, output_dir) } } diff --git a/prover/src/common/verifier.rs b/prover/src/common/verifier/mod.rs similarity index 96% rename from prover/src/common/verifier.rs rename to prover/src/common/verifier/mod.rs index 5474811ecd..cec86f33cd 100644 --- a/prover/src/common/verifier.rs +++ b/prover/src/common/verifier/mod.rs @@ -1,4 +1,3 @@ -use crate::io::deserialize_vk; use halo2_proofs::{ halo2curves::bn256::{Bn256, Fr, G1Affine}, plonk::VerifyingKey, @@ -7,6 +6,8 @@ use halo2_proofs::{ use snark_verifier_sdk::{verify_snark_shplonk, CircuitExt, Snark}; use std::marker::PhantomData; +use crate::utils::deserialize_vk; + mod evm; mod utils; diff --git a/prover/src/common/verifier/utils.rs b/prover/src/common/verifier/utils.rs index b5883feec1..44c639901a 100644 --- a/prover/src/common/verifier/utils.rs +++ b/prover/src/common/verifier/utils.rs @@ -1,4 +1,3 @@ -use super::Verifier; use halo2_proofs::{ halo2curves::bn256::{Bn256, Fr, G1Affine}, plonk::VerifyingKey, @@ -6,7 +5,7 @@ use halo2_proofs::{ }; use snark_verifier_sdk::CircuitExt; -impl<'params, C: CircuitExt> Verifier<'params, C> { +impl<'params, C: CircuitExt> super::Verifier<'params, C> { pub fn params(&self) -> &ParamsKZG { self.params } diff --git a/prover/src/config.rs b/prover/src/config.rs index 8db379416f..895773e2cd 100644 --- a/prover/src/config.rs +++ b/prover/src/config.rs @@ -1,34 +1,79 @@ +use std::{ + collections::HashSet, + fmt, + fs::File, + path::{Path, PathBuf}, + sync::LazyLock, +}; + use crate::utils::read_env_var; -use aggregator::ConfigParams; -use std::{collections::HashSet, fmt, fs::File, path::Path, sync::LazyLock}; +/// Degree (k) used for the inner circuit, i.e. +/// [`SuperCircuit`][zkevm_circuits::super_circuit::SuperCircuit]. pub static INNER_DEGREE: LazyLock = LazyLock::new(|| read_env_var("SCROLL_PROVER_INNER_DEGREE", 20)); -pub static ASSETS_DIR: LazyLock = - LazyLock::new(|| read_env_var("SCROLL_PROVER_ASSETS_DIR", "configs".to_string())); +/// Name of the directory to find asset files on disk. +pub static ASSETS_DIR: LazyLock = + LazyLock::new(|| read_env_var("SCROLL_PROVER_ASSETS_DIR", PathBuf::from("configs"))); -pub static LAYER1_CONFIG_PATH: LazyLock = +/// The path to the [`Config Parameters`][aggregator::ConfigParams] JSON file that define the shape +/// of the [`Layer-1`][LayerId::Layer1] [`Circuit`][halo2_proofs::plonk::Circuit]. +pub static LAYER1_CONFIG_PATH: LazyLock = LazyLock::new(|| asset_file_path("layer1.config")); -pub static LAYER2_CONFIG_PATH: LazyLock = + +/// The path to the [`Config Parameters`][aggregator::ConfigParams] JSON file that define the shape +/// of the [`Layer-2`][LayerId::Layer2] [`Circuit`][halo2_proofs::plonk::Circuit]. +pub static LAYER2_CONFIG_PATH: LazyLock = LazyLock::new(|| asset_file_path("layer2.config")); -pub static LAYER3_CONFIG_PATH: LazyLock = + +/// The path to the [`Config Parameters`][aggregator::ConfigParams] JSON file that define the shape +/// of the [`Layer-3`][LayerId::Layer3] [`Circuit`][halo2_proofs::plonk::Circuit]. +pub static LAYER3_CONFIG_PATH: LazyLock = LazyLock::new(|| asset_file_path("layer3.config")); -pub static LAYER4_CONFIG_PATH: LazyLock = + +/// The path to the [`Config Parameters`][aggregator::ConfigParams] JSON file that define the shape +/// of the [`Layer-4`][LayerId::Layer4] [`Circuit`][halo2_proofs::plonk::Circuit]. +pub static LAYER4_CONFIG_PATH: LazyLock = LazyLock::new(|| asset_file_path("layer4.config")); -pub static LAYER5_CONFIG_PATH: LazyLock = + +/// The path to the [`Config Parameters`][aggregator::ConfigParams] JSON file that define the shape +/// of the [`Layer-5`][LayerId::Layer5] [`Circuit`][halo2_proofs::plonk::Circuit]. +pub static LAYER5_CONFIG_PATH: LazyLock = LazyLock::new(|| asset_file_path("layer5.config")); -pub static LAYER6_CONFIG_PATH: LazyLock = + +/// The path to the [`Config Parameters`][aggregator::ConfigParams] JSON file that define the shape +/// of the [`Layer-6`][LayerId::Layer6] [`Circuit`][halo2_proofs::plonk::Circuit]. +pub static LAYER6_CONFIG_PATH: LazyLock = LazyLock::new(|| asset_file_path("layer6.config")); -pub static LAYER1_DEGREE: LazyLock = LazyLock::new(|| layer_degree(&LAYER1_CONFIG_PATH)); -pub static LAYER2_DEGREE: LazyLock = LazyLock::new(|| layer_degree(&LAYER2_CONFIG_PATH)); -pub static LAYER3_DEGREE: LazyLock = LazyLock::new(|| layer_degree(&LAYER3_CONFIG_PATH)); -pub static LAYER4_DEGREE: LazyLock = LazyLock::new(|| layer_degree(&LAYER4_CONFIG_PATH)); -pub static LAYER5_DEGREE: LazyLock = LazyLock::new(|| layer_degree(&LAYER5_CONFIG_PATH)); -pub static LAYER6_DEGREE: LazyLock = LazyLock::new(|| layer_degree(&LAYER6_CONFIG_PATH)); +/// The degree (k) for the halo2 [`Circuit`][halo2_proofs::plonk::Circuit] at +/// [`Layer-1`][LayerId::Layer1]. +pub static LAYER1_DEGREE: LazyLock = LazyLock::new(|| layer_degree(&*LAYER1_CONFIG_PATH)); + +/// The degree (k) for the halo2 [`Circuit`][halo2_proofs::plonk::Circuit] at +/// [`Layer-2`][LayerId::Layer2]. +pub static LAYER2_DEGREE: LazyLock = LazyLock::new(|| layer_degree(&*LAYER2_CONFIG_PATH)); + +/// The degree (k) for the halo2 [`Circuit`][halo2_proofs::plonk::Circuit] at +/// [`Layer-3`][LayerId::Layer3]. +pub static LAYER3_DEGREE: LazyLock = LazyLock::new(|| layer_degree(&*LAYER3_CONFIG_PATH)); -pub static ZKEVM_DEGREES: LazyLock> = LazyLock::new(|| { +/// The degree (k) for the halo2 [`Circuit`][halo2_proofs::plonk::Circuit] at +/// [`Layer-4`][LayerId::Layer4]. +pub static LAYER4_DEGREE: LazyLock = LazyLock::new(|| layer_degree(&*LAYER4_CONFIG_PATH)); + +/// The degree (k) for the halo2 [`Circuit`][halo2_proofs::plonk::Circuit] at +/// [`Layer-5`][LayerId::Layer5]. +pub static LAYER5_DEGREE: LazyLock = LazyLock::new(|| layer_degree(&*LAYER5_CONFIG_PATH)); + +/// The degree (k) for the halo2 [`Circuit`][halo2_proofs::plonk::Circuit] at +/// [`Layer-6`][LayerId::Layer6]. +pub static LAYER6_DEGREE: LazyLock = LazyLock::new(|| layer_degree(&*LAYER6_CONFIG_PATH)); + +/// The list of degrees for Inner, Layer-1 and Layer-2, i.e. the proof generation [`layers`][LayerId] +/// covered by the [`ChunkProver`][crate::ChunkProver]. +pub static CHUNK_PROVER_DEGREES: LazyLock> = LazyLock::new(|| { Vec::from_iter(HashSet::from([ *INNER_DEGREE, *LAYER1_DEGREE, @@ -36,7 +81,9 @@ pub static ZKEVM_DEGREES: LazyLock> = LazyLock::new(|| { ])) }); -pub static AGG_DEGREES: LazyLock> = LazyLock::new(|| { +/// The list of degrees for Layer-3, Layer-4, Layer-5 and Layer-6, i.e. the proof generation [`layers`][LayerId] +/// covered by the [`BatchProver`][crate::BatchProver]. +pub static BATCH_PROVER_DEGREES: LazyLock> = LazyLock::new(|| { Vec::from_iter(HashSet::from([ *LAYER3_DEGREE, *LAYER4_DEGREE, @@ -45,6 +92,7 @@ pub static AGG_DEGREES: LazyLock> = LazyLock::new(|| { ])) }); +/// The various proof layers in the proof generation pipeline. #[derive(Clone, Copy, Debug)] pub enum LayerId { /// Super (inner) circuit layer @@ -70,6 +118,7 @@ impl fmt::Display for LayerId { } impl LayerId { + /// Returns the identifier by layer. pub fn id(&self) -> &str { match self { Self::Inner => "inner", @@ -82,6 +131,7 @@ impl LayerId { } } + /// The degree (k) for the [`Circuit`][halo2_proofs::plonk::Circuit] by layer. pub fn degree(&self) -> u32 { match self { Self::Inner => *INNER_DEGREE, @@ -94,43 +144,53 @@ impl LayerId { } } - pub fn config_path(&self) -> &str { + /// The path to the [`Config Parameters`][aggregator::ConfigParams] used to configure the shape + /// of the [`Circuit`][halo2_proofs::plonk::Circuit]. + pub fn config_path(&self) -> PathBuf { match self { - Self::Layer1 => &LAYER1_CONFIG_PATH, - Self::Layer2 => &LAYER2_CONFIG_PATH, - Self::Layer3 => &LAYER3_CONFIG_PATH, - Self::Layer4 => &LAYER4_CONFIG_PATH, - Self::Layer5 => &LAYER5_CONFIG_PATH, - Self::Layer6 => &LAYER6_CONFIG_PATH, + Self::Layer1 => LAYER1_CONFIG_PATH.to_path_buf(), + Self::Layer2 => LAYER2_CONFIG_PATH.to_path_buf(), + Self::Layer3 => LAYER3_CONFIG_PATH.to_path_buf(), + Self::Layer4 => LAYER4_CONFIG_PATH.to_path_buf(), + Self::Layer5 => LAYER5_CONFIG_PATH.to_path_buf(), + Self::Layer6 => LAYER6_CONFIG_PATH.to_path_buf(), Self::Inner => unreachable!("No config file for super (inner) circuit"), } } -} -pub fn asset_file_path(filename: &str) -> String { - Path::new(&*ASSETS_DIR) - .join(filename) - .to_string_lossy() - .into_owned() + /// Whether or not the [`Snark`][snark_verifier_sdk::Snark] generated at this layer has an + /// accumulator. + /// + /// Every SNARK layer on top of the [`innermost layer`][LayerId::Inner] has an accumulator. + pub fn accumulator(&self) -> bool { + !matches!(self, Self::Inner) + } } -pub fn layer_config_path(id: &str) -> &str { +/// Returns the path to the [`Config Parameters`][aggregator::ConfigParams] that configure the +/// shape of the [`Circuit`][halo2_proofs::plonk::Circuit] given the [`id`][LayerId::id] of the +/// layer. +pub fn layer_config_path(id: &str) -> PathBuf { match id { - "layer1" => &LAYER1_CONFIG_PATH, - "layer2" => &LAYER2_CONFIG_PATH, - "layer3" => &LAYER3_CONFIG_PATH, - "layer4" => &LAYER4_CONFIG_PATH, - "layer5" => &LAYER5_CONFIG_PATH, - "layer6" => &LAYER6_CONFIG_PATH, + "layer1" => LAYER1_CONFIG_PATH.to_path_buf(), + "layer2" => LAYER2_CONFIG_PATH.to_path_buf(), + "layer3" => LAYER3_CONFIG_PATH.to_path_buf(), + "layer4" => LAYER4_CONFIG_PATH.to_path_buf(), + "layer5" => LAYER5_CONFIG_PATH.to_path_buf(), + "layer6" => LAYER6_CONFIG_PATH.to_path_buf(), _ => panic!("Wrong id-{id} to get layer config path"), } } -fn layer_degree(config_file: &str) -> u32 { - let f = File::open(config_file).unwrap_or_else(|_| panic!("Failed to open {config_file}")); +fn asset_file_path(filename: &str) -> PathBuf { + ASSETS_DIR.join(filename) +} + +fn layer_degree + fmt::Debug>(path: P) -> u32 { + let f = File::open(&path).unwrap_or_else(|_| panic!("Failed to open {path:?}")); - let params: ConfigParams = - serde_json::from_reader(f).unwrap_or_else(|_| panic!("Failed to parse {config_file}")); + let params = serde_json::from_reader::<_, aggregator::ConfigParams>(f) + .unwrap_or_else(|_| panic!("Failed to parse {path:?}")); params.degree } diff --git a/prover/src/consts.rs b/prover/src/consts.rs index 19b1800ddb..e9c7af048a 100644 --- a/prover/src/consts.rs +++ b/prover/src/consts.rs @@ -1,6 +1,7 @@ -use crate::utils::read_env_var; use std::sync::LazyLock; +use crate::utils::read_env_var; + // TODO: is it a good design to use LazyLock? Why not read env var each time? pub fn bundle_vk_filename() -> String { diff --git a/prover/src/error.rs b/prover/src/error.rs new file mode 100644 index 0000000000..5be7545621 --- /dev/null +++ b/prover/src/error.rs @@ -0,0 +1,47 @@ +use std::path::PathBuf; + +use crate::{BatchProverError, ChunkProverError}; + +/// Represents error variants possibly encountered during the proof generation process. +#[derive(Debug, thiserror::Error)] +pub enum ProverError { + /// Error occurred while doing i/o operations. + #[error(transparent)] + Io(#[from] std::io::Error), + /// Error encountered while reading from or writing to files. + #[error("error during read/write! path={path}, e={source}")] + IoReadWrite { + /// The path we tried to read from or write to. + path: PathBuf, + /// The source error. + source: std::io::Error, + }, + /// Error occurred while doing serde operations. + #[error(transparent)] + Serde(#[from] serde_json::Error), + /// Error encountered during JSON serde. + #[error("error during read/write json! path={path}, e={source}")] + JsonReadWrite { + /// The path of the file we tried to serialize/deserialize. + path: PathBuf, + /// The source error. + source: serde_json::Error, + }, + /// Error encountered while reading variable from the process environment. + #[error("error while reading env var! key={key}, e={source}")] + EnvVar { + /// The key tried to be read. + key: String, + /// The source error. + source: std::env::VarError, + }, + /// Error propagated in the [`ChunkProver`][crate::ChunkProver] pipeline. + #[error(transparent)] + ChunkProverError(#[from] ChunkProverError), + /// Error propagated from the [`BatchProver`][crate::BatchProver] pipeline. + #[error(transparent)] + BatchProverError(#[from] BatchProverError), + /// Other errors. + #[error("custom error: {0}")] + Custom(String), +} diff --git a/prover/src/inner.rs b/prover/src/inner.rs deleted file mode 100644 index 7b8f21b530..0000000000 --- a/prover/src/inner.rs +++ /dev/null @@ -1,5 +0,0 @@ -mod prover; -mod verifier; - -pub use self::prover::Prover; -pub use verifier::Verifier; diff --git a/prover/src/inner/prover.rs b/prover/src/inner/prover.rs deleted file mode 100644 index 2d9471df18..0000000000 --- a/prover/src/inner/prover.rs +++ /dev/null @@ -1,69 +0,0 @@ -use crate::{ - common, - config::INNER_DEGREE, - io::serialize_vk, - utils::{chunk_trace_to_witness_block, gen_rng}, - zkevm::circuit::TargetCircuit, - Proof, -}; -use anyhow::Result; -use eth_types::l2_types::BlockTrace; -use snark_verifier_sdk::Snark; -use std::marker::PhantomData; - -mod mock; - -#[derive(Debug)] -pub struct Prover<'params, C: TargetCircuit> { - // Make it public for testing with inner functions (unnecessary for FFI). - pub prover_impl: common::Prover<'params>, - phantom: PhantomData, -} - -impl<'params, C: TargetCircuit> From> for Prover<'params, C> { - fn from(prover_impl: common::Prover<'params>) -> Self { - Self { - prover_impl, - phantom: PhantomData, - } - } -} - -impl<'params, C: TargetCircuit> Prover<'params, C> { - pub fn degrees() -> Vec { - vec![*INNER_DEGREE] - } - - pub fn gen_inner_snark(&mut self, id: &str, block_traces: Vec) -> Result { - assert!(!block_traces.is_empty()); - let rng = gen_rng(); - let witness_block = chunk_trace_to_witness_block(block_traces)?; - self.prover_impl - .gen_inner_snark::(id, rng, &witness_block) - } - - pub fn load_or_gen_inner_proof( - &mut self, - name: &str, - id: &str, - block_traces: Vec, - output_dir: Option<&str>, - ) -> Result { - let filename = format!("{id}_{name}"); - match output_dir.and_then(|output_dir| Proof::from_json_file(output_dir, &filename).ok()) { - Some(proof) => Ok(proof), - None => { - let result = self.gen_inner_snark(id, block_traces).map(|snark| { - let raw_vk = serialize_vk(self.prover_impl.pk(id).unwrap().get_vk()); - Proof::from_snark(snark, raw_vk) - }); - - if let (Some(output_dir), Ok(proof)) = (output_dir, &result) { - proof.dump(output_dir, &filename)?; - } - - result - } - } - } -} diff --git a/prover/src/inner/prover/mock.rs b/prover/src/inner/prover/mock.rs deleted file mode 100644 index e023b06f4f..0000000000 --- a/prover/src/inner/prover/mock.rs +++ /dev/null @@ -1,43 +0,0 @@ -use super::Prover; -use crate::{ - config::INNER_DEGREE, - utils::metric_of_witness_block, - zkevm::circuit::{block_traces_to_witness_block, TargetCircuit}, -}; -use anyhow::bail; -use eth_types::l2_types::BlockTrace; -use halo2_proofs::{dev::MockProver, halo2curves::bn256::Fr}; -use snark_verifier_sdk::CircuitExt; -use zkevm_circuits::witness::Block; - -impl<'params, C: TargetCircuit> Prover<'params, C> { - pub fn mock_prove_target_circuit(block_trace: BlockTrace) -> anyhow::Result<()> { - Self::mock_prove_target_circuit_chunk(vec![block_trace]) - } - - pub fn mock_prove_target_circuit_chunk(block_traces: Vec) -> anyhow::Result<()> { - let witness_block = block_traces_to_witness_block(block_traces)?; - Self::mock_prove_witness_block(&witness_block) - } - - pub fn mock_prove_witness_block(witness_block: &Block) -> anyhow::Result<()> { - log::info!( - "mock proving chunk, chunk metric {:?}", - metric_of_witness_block(witness_block) - ); - let circuit = C::from_witness_block(witness_block)?; - let prover = MockProver::::run(*INNER_DEGREE, &circuit, circuit.instances())?; - if let Err(errs) = prover.verify_par() { - log::error!("err num: {}", errs.len()); - for err in &errs { - log::error!("{}", err); - } - bail!("{:#?}", errs); - } - log::info!( - "mock prove done. chunk metric: {:?}", - metric_of_witness_block(witness_block), - ); - Ok(()) - } -} diff --git a/prover/src/inner/verifier.rs b/prover/src/inner/verifier.rs deleted file mode 100644 index 0b3a5be138..0000000000 --- a/prover/src/inner/verifier.rs +++ /dev/null @@ -1,41 +0,0 @@ -use std::collections::BTreeMap; - -use crate::{common, config::INNER_DEGREE, io::deserialize_vk, zkevm::circuit::TargetCircuit}; -use halo2_proofs::{halo2curves::bn256::Bn256, plonk::keygen_vk, poly::kzg::commitment::ParamsKZG}; -use snark_verifier_sdk::Snark; - -#[derive(Debug)] -pub struct Verifier<'params, C: TargetCircuit> { - // Make it public for testing with inner functions (unnecessary for FFI). - pub inner: common::Verifier<'params, C::Inner>, -} - -impl<'params, C: TargetCircuit> From> for Verifier<'params, C> { - fn from(inner: common::Verifier<'params, C::Inner>) -> Self { - Self { inner } - } -} - -impl<'params, C: TargetCircuit> Verifier<'params, C> { - pub fn from_params_map( - params_map: &'params BTreeMap>, - raw_vk: Option<&[u8]>, - ) -> Self { - let params = params_map.get(&*INNER_DEGREE).expect("should be loaded"); - - let vk = raw_vk.map_or_else( - || { - let dummy_circuit = C::dummy_inner_circuit().expect("gen dummy circuit"); - keygen_vk(params, &dummy_circuit).unwrap() - }, - deserialize_vk::, - ); - - let verifier = common::Verifier::new(params, vk); - verifier.into() - } - - pub fn verify_inner_snark(&self, snark: Snark) -> bool { - self.inner.verify_snark(snark) - } -} diff --git a/prover/src/io.rs b/prover/src/io.rs deleted file mode 100644 index 644e1ac0aa..0000000000 --- a/prover/src/io.rs +++ /dev/null @@ -1,164 +0,0 @@ -use anyhow; -use halo2_proofs::{ - halo2curves::bn256::{Fr, G1Affine}, - plonk::{Circuit, VerifyingKey}, - SerdeFormat, -}; -use serde::de::Deserialize; -use snark_verifier::util::arithmetic::PrimeField; -use snark_verifier_sdk::Snark; -use std::io::BufReader; -use std::{ - fs::File, - io::{Cursor, Read, Write}, - path::{Path, PathBuf}, -}; - -pub fn from_json_file<'de, P, T>(filename: P) -> anyhow::Result -where - P: AsRef, - T: Deserialize<'de>, -{ - let file_path = filename.as_ref(); - if !file_path.exists() { - anyhow::bail!("File {:?} doesn't exist", file_path); - } - - let fd = File::open(file_path)?; - let mut deserializer = serde_json::Deserializer::from_reader(BufReader::new(fd)); - deserializer.disable_recursion_limit(); - let deserializer = serde_stacker::Deserializer::new(&mut deserializer); - - Ok(serde::Deserialize::deserialize(deserializer)?) -} - -pub fn serialize_fr(f: &Fr) -> Vec { - f.to_bytes().to_vec() -} - -pub fn deserialize_fr(buf: Vec) -> Fr { - Fr::from_repr(buf.try_into().unwrap()).unwrap() -} -pub fn serialize_fr_vec(v: &[Fr]) -> Vec> { - v.iter().map(serialize_fr).collect() -} -pub fn deserialize_fr_vec(l2_buf: Vec>) -> Vec { - l2_buf.into_iter().map(deserialize_fr).collect() -} - -pub fn serialize_fr_matrix(m: &[Vec]) -> Vec>> { - m.iter().map(|v| serialize_fr_vec(v.as_slice())).collect() -} - -pub fn deserialize_fr_matrix(l3_buf: Vec>>) -> Vec> { - l3_buf.into_iter().map(deserialize_fr_vec).collect() -} - -pub fn serialize_instance(instance: &[Vec]) -> Vec { - let instances_for_serde = serialize_fr_matrix(instance); - - serde_json::to_vec(&instances_for_serde).unwrap() -} - -pub fn read_all

(filename: P) -> Vec -where - P: AsRef, -{ - let mut buf = vec![]; - let mut fd = std::fs::File::open(filename).unwrap(); - fd.read_to_end(&mut buf).unwrap(); - buf -} - -pub fn read_file(folder: &mut PathBuf, filename: &str) -> Vec { - let mut buf = vec![]; - - folder.push(filename); - let mut fd = std::fs::File::open(folder.as_path()).unwrap(); - folder.pop(); - - fd.read_to_end(&mut buf).unwrap(); - buf -} - -pub fn try_to_read(dir: &str, filename: &str) -> Option> { - let mut path = PathBuf::from(dir); - path.push(filename); - - if path.exists() { - Some(read_all(path)) - } else { - None - } -} - -pub fn force_to_read(dir: &str, filename: &str) -> Vec { - try_to_read(dir, filename).unwrap_or_else(|| panic!("File {filename} must exist in {dir}")) -} - -pub fn write_file(folder: &mut PathBuf, filename: &str, buf: &[u8]) { - folder.push(filename); - let mut fd = std::fs::File::create(folder.as_path()).unwrap(); - folder.pop(); - - fd.write_all(buf).unwrap(); -} - -pub fn serialize_vk(vk: &VerifyingKey) -> Vec { - let mut result = Vec::::new(); - vk.write(&mut result, SerdeFormat::Processed).unwrap(); - result -} - -pub fn deserialize_vk>(raw_vk: &[u8]) -> VerifyingKey { - VerifyingKey::::read::<_, C>(&mut Cursor::new(raw_vk), SerdeFormat::Processed, ()) - .unwrap_or_else(|_| panic!("failed to deserialize vk with len {}", raw_vk.len())) -} - -pub fn write_snark(file_path: &str, snark: &Snark) { - log::debug!("write_snark to {file_path}"); - let mut fd = std::fs::File::create(file_path).unwrap(); - serde_json::to_writer(&mut fd, snark).unwrap(); - log::debug!("write_snark to {file_path} done"); -} - -pub fn load_snark(file_path: &str) -> anyhow::Result> { - if !Path::new(file_path).exists() { - return Ok(None); - } - - let fd = File::open(file_path)?; - let mut deserializer = serde_json::Deserializer::from_reader(fd); - deserializer.disable_recursion_limit(); - let deserializer = serde_stacker::Deserializer::new(&mut deserializer); - let snark = serde::Deserialize::deserialize(deserializer)?; - Ok(Some(snark)) -} - -pub fn load_instances(buf: &[u8]) -> Vec>> { - let instances: Vec>>> = serde_json::from_reader(buf).unwrap(); - instances - .into_iter() - .map(|l1| { - l1.into_iter() - .map(|l2| { - l2.into_iter() - .map(|buf| Fr::from_bytes(&buf.try_into().unwrap()).unwrap()) - .collect() - }) - .collect() - }) - .collect() -} - -#[ignore] -#[test] -fn test_block_trace_convert() { - let trace_v1: eth_types::l2_types::BlockTrace = - from_json_file("src/testdata/trace_v1_5224657.json").expect("should load"); - let trace_v2: eth_types::l2_types::BlockTraceV2 = trace_v1.into(); - let mut fd = std::fs::File::create("src/testdata/trace_v2_5224657.json").unwrap(); - serde_json::to_writer_pretty(&mut fd, &trace_v2).unwrap(); - // then we can use this command to compare the traces: - // vimdiff <(jq -S "del(.executionResults)|del(.txStorageTraces)" src/testdata/trace_v1_5224657.json) <(jq -S . src/testdata/trace_v2_5224657.json) -} diff --git a/prover/src/lib.rs b/prover/src/lib.rs index cfcd316b59..48293e5c55 100644 --- a/prover/src/lib.rs +++ b/prover/src/lib.rs @@ -1,32 +1,90 @@ -/// Meaning of each circuit: -/// inner: first layer EVM super circuit -/// layer1: compression circuit of "inner" -/// layer2: comppresion circuit of "layer1" -/// layer3: batch circuit. Proving many "layer2" circuits, plus blob/kzg handling. -/// layer4: compression circuit of "layer3". Final layer circuit currently. -/// -// TODO: don't always use "pub mod". -// We need to define which types and methods should be public carefully. -pub mod aggregator; -pub mod common; -pub mod config; -pub mod consts; -mod evm; -pub mod inner; -pub mod io; -pub mod proof; -pub mod recursion; -pub mod test; -pub mod types; -pub mod utils; -pub mod zkevm; - -pub use aggregator::{check_chunk_hashes, BatchData, BatchHash, BatchHeader, MAX_AGG_SNARKS}; -pub use common::{ChunkInfo, CompressionCircuit}; +//! This crate exposes an interface to setup provers and verifiers for Scroll's proof generation +//! pipeline. +//! +//! Scroll's proof generation pipeline implements a [layered][config::LayerId] approach where +//! [`SNARK(s)`][snark_verifier_sdk::Snark] from a layer is (are) used in the subsequent layer. +//! +//! A high-level abstraction has been implemented with the notion of: +//! - Block: Consists of a list of txs +//! - Chunk: Composed of a list of contiguous Blocks +//! - Batch: Composed of a list of contiguous Chunks +//! - Bundle: Composed of a list of contiguous Batches +//! +//! The proof generation pipeline starts at the `Chunk` level where the inner proof can be +//! generated either via the halo2-route or the sp1-route, aka [`ChunkKind`]. +//! +//! The pipeline for the halo2-route is described below: +//! 1. [`Inner`][config::LayerId::Inner] layer: SNARK generated by halo2-based +//! [`SuperCircuit`][zkevm_circuits::super_circuit::SuperCircuit] +//! 2. [`Layer-1`][config::LayerId::Layer1]: Inner SNARK compressed by the [`CompressionCircuit`] +//! configured with a wide shape, i.e. higher advices for a lower degree +//! 3. [`Layer-2`][config::LayerId::Layer2]: Layer-1 SNARK compressed by the `CompressionCircuit` +//! with a thin shape, i.e. higher degree for lower advices +//! +//! The pipeline for the sp1-route is described below: +//! 1. [`Inner`][config::LayerId::Inner] layer: Sp1 compressed proof generated via the Sp1 Prover. +//! 2. [`Layer-1`][config::LayerId::Layer1]: Inner STARK is SNARKified using a halo2-backend. +//! 3. [`Layer-2`][config::LayerId::Layer2]: Layer-1 SNARK compressed by the `CompressionCircuit` +//! with a thin shape, i.e. higher degree for lower advices +//! +//! For both of the above described branches, we continue the pipeline with: +//! 4. [`Layer-3`][config::LayerId::Layer3]: List of Layer-2 SNARKs aggregated using the +//! [`BatchCircuit`] +//! 5. [`Layer-4`][config::LayerId::Layer4]: Layer-3 SNARK compressed by the `CompressionCircuit` +//! 6. [`Layer-5`][config::LayerId::Layer5]: Layer-4 SNARKs are recursively aggregated using the +//! [`RecursionCircuit`] +//! 7. [`Layer-6`][config::LayerId::Layer6]: Layer-5 SNARK is compressed by the +//! `CompressionCircuit` with a thin shape, while using Keccak hasher as the transcript digest +//! to allow verification of Layer-6 proof in EVM. + +mod aggregator; +pub use aggregator::{ + check_chunk_hashes, get_blob_bytes, BatchData, BatchHash, BatchHeader, BatchProver, + BatchProverError, BatchVerifier, RecursionTask, MAX_AGG_SNARKS, +}; + +mod common; +pub use common::{ChunkInfo, CompressionCircuit, Prover, Verifier}; + +mod config; +pub use config::*; + +mod consts; +pub use consts::*; + +mod error; +pub use error::*; + +mod proof; +pub use proof::*; + +mod test; +pub use test::{batch_prove, bundle_prove, chunk_prove, inner_prove}; + +mod types; +pub use types::{BatchProvingTask, BundleProvingTask, ChunkProvingTask, WitnessBlock}; + +mod utils; +pub use utils::*; + +mod zkevm; +pub use zkevm::{ + circuit::calculate_row_usage_of_witness_block, circuit::chunk_trace_to_witness_block, + ChunkProver, ChunkProverError, ChunkVerifier, CircuitCapacityChecker, RowUsage, + SubCircuitRowUsage, +}; + +/// Re-export the eth-types crate. pub use eth_types; -pub use eth_types::l2_types::BlockTrace; -pub use evm::deploy_and_call; -pub use proof::{BatchProof, BundleProof, ChunkKind, ChunkProof, EvmProof, Proof}; + +/// Re-export some types from snark-verifier-sdk. pub use snark_verifier_sdk::{CircuitExt, Snark}; -pub use types::{BatchProvingTask, BundleProvingTask, ChunkProvingTask, WitnessBlock}; + +/// Re-export the zkevm-circuits crate. pub use zkevm_circuits; + +use halo2_proofs::{halo2curves::bn256::Bn256, poly::kzg::commitment::ParamsKZG}; +use std::collections::BTreeMap; + +/// Alias for convenience. +pub type ParamsMap = BTreeMap>; diff --git a/prover/src/proof/batch.rs b/prover/src/proof/batch.rs index e0f4aec4f8..bceacf5dc7 100644 --- a/prover/src/proof/batch.rs +++ b/prover/src/proof/batch.rs @@ -1,18 +1,22 @@ -use super::{dump_as_json, dump_vk, from_json_file, Proof}; -use crate::types::base64; +use std::path::Path; + use anyhow::Result; -use eth_types::H256; +use eth_types::{base64, H256}; use halo2_proofs::{halo2curves::bn256::G1Affine, plonk::ProvingKey}; use serde_derive::{Deserialize, Serialize}; use snark_verifier::Protocol; use snark_verifier_sdk::Snark; +use crate::utils::read_json_deep; + +use super::{dump_as_json, dump_vk, InnerProof}; + #[derive(Clone, Debug, Deserialize, Serialize)] pub struct BatchProof { #[serde(with = "base64")] pub protocol: Vec, #[serde(flatten)] - proof: Proof, + proof: InnerProof, pub batch_hash: H256, } @@ -32,7 +36,7 @@ impl From<&BatchProof> for Snark { impl BatchProof { pub fn new(snark: Snark, pk: Option<&ProvingKey>, batch_hash: H256) -> Result { let protocol = serde_json::to_vec(&snark.protocol)?; - let proof = Proof::new(snark.proof, &snark.instances, pk); + let proof = InnerProof::new(snark.proof, &snark.instances, pk); Ok(Self { protocol, @@ -42,7 +46,8 @@ impl BatchProof { } pub fn from_json_file(dir: &str, name: &str) -> Result { - from_json_file(dir, &dump_filename(name)) + let file_path = Path::new(dir).join(dump_filename(name)); + Ok(read_json_deep(&file_path)?) } pub fn dump_vk(&self, dir: &str, name: &str) -> Result<()> { @@ -50,7 +55,7 @@ impl BatchProof { if self.proof.vk.is_empty() { log::warn!("batch proof vk is empty, skip dumping"); } else { - dump_vk(dir, &filename, &self.proof.vk) + dump_vk(dir, &filename, &self.proof.vk)?; } Ok(()) } diff --git a/prover/src/proof/bundle.rs b/prover/src/proof/bundle.rs index 52fa290db9..599edb0e17 100644 --- a/prover/src/proof/bundle.rs +++ b/prover/src/proof/bundle.rs @@ -1,8 +1,10 @@ -use super::{dump_as_json, dump_data, dump_vk, serialize_instance}; -use crate::{utils::short_git_version, Proof}; use anyhow::Result; use serde_derive::{Deserialize, Serialize}; +use crate::utils::short_git_version; + +use super::{dump_as_json, dump_data, dump_vk, serialize_instance, InnerProof}; + // 3 limbs per field element, 4 field elements const ACC_LEN: usize = 12; @@ -15,7 +17,6 @@ const ACC_LEN: usize = 12; // - chain id // - (hi, lo) pending withdraw root // - bundle count - const PI_LEN: usize = 13; const ACC_BYTES: usize = ACC_LEN * 32; @@ -24,11 +25,11 @@ const PI_BYTES: usize = PI_LEN * 32; #[derive(Clone, Debug, Deserialize, Serialize)] pub struct BundleProof { #[serde(flatten)] - on_chain_proof: Proof, + on_chain_proof: InnerProof, } -impl From for BundleProof { - fn from(proof: Proof) -> Self { +impl From for BundleProof { + fn from(proof: InnerProof) -> Self { let instances = proof.instances(); assert_eq!(instances.len(), 1); assert_eq!(instances[0].len(), ACC_LEN + PI_LEN); @@ -46,7 +47,7 @@ impl From for BundleProof { let instances = serialize_instance(&instances[0][ACC_LEN..]); Self { - on_chain_proof: Proof { + on_chain_proof: InnerProof { proof, instances, vk, @@ -76,21 +77,23 @@ impl BundleProof { dir, &format!("pi_{filename}.data"), &self.on_chain_proof.instances, - ); + )?; dump_data( dir, &format!("proof_{filename}.data"), &self.on_chain_proof.proof, - ); + )?; + + dump_vk(dir, &filename, &self.on_chain_proof.vk)?; - dump_vk(dir, &filename, &self.on_chain_proof.vk); + dump_as_json(dir, &filename, &self)?; - dump_as_json(dir, &filename, &self) + Ok(()) } // Recover a `Proof` which follows halo2 semantic of "proof" and "instance", // where "accumulators" are instance instead of proof, not like "onchain proof". - pub fn proof_to_verify(self) -> Proof { + pub fn proof_to_verify(self) -> InnerProof { // raw.proof is accumulator + proof assert!(self.on_chain_proof.proof.len() > ACC_BYTES); // raw.instances is PI @@ -103,9 +106,9 @@ impl BundleProof { instances.extend(self.on_chain_proof.instances); let vk = self.on_chain_proof.vk; - let git_version = Some(short_git_version()); + let git_version = short_git_version(); - Proof { + InnerProof { proof, instances, vk, diff --git a/prover/src/proof/chunk.rs b/prover/src/proof/chunk.rs index 4aefdb4f58..b7f3e117db 100644 --- a/prover/src/proof/chunk.rs +++ b/prover/src/proof/chunk.rs @@ -1,14 +1,18 @@ -use super::{dump_as_json, dump_data, dump_vk, from_json_file, Proof}; -use crate::{types::base64, zkevm::SubCircuitRowUsage}; +use std::path::Path; + use aggregator::ChunkInfo; -use anyhow::{bail, Result}; +use eth_types::base64; use halo2_proofs::{halo2curves::bn256::G1Affine, plonk::ProvingKey}; use serde_derive::{Deserialize, Serialize}; use snark_verifier::Protocol; use snark_verifier_sdk::Snark; +use crate::{utils::read_json_deep, zkevm::SubCircuitRowUsage}; + +use super::{dump_as_json, dump_data, dump_vk, InnerProof}; + /// The innermost SNARK belongs to the following variants. -#[derive(Clone, Debug, Deserialize, Serialize)] +#[derive(Copy, Clone, Debug, Deserialize, Serialize, PartialEq)] pub enum ChunkKind { /// halo2-based SuperCircuit. Halo2, @@ -27,7 +31,7 @@ pub struct ChunkProof { #[serde(with = "base64")] pub protocol: Vec, #[serde(flatten)] - pub proof: Proof, + pub proof: InnerProof, pub chunk_info: ChunkInfo, pub chunk_kind: ChunkKind, #[serde(default)] @@ -37,33 +41,34 @@ pub struct ChunkProof { macro_rules! compare_field { ($desc:expr, $field:ident, $lhs:ident, $rhs:ident) => { if $lhs.$field != $rhs.$field { - bail!( + return Err(format!( "{} chunk different {}: {} != {}", $desc, stringify!($field), $lhs.$field, $rhs.$field - ); + )); } }; } /// Check chunk info is consistent with chunk info embedded inside proof -pub fn compare_chunk_info(name: &str, lhs: &ChunkInfo, rhs: &ChunkInfo) -> Result<()> { +pub fn compare_chunk_info(name: &str, lhs: &ChunkInfo, rhs: &ChunkInfo) -> Result<(), String> { compare_field!(name, chain_id, lhs, rhs); compare_field!(name, prev_state_root, lhs, rhs); compare_field!(name, post_state_root, lhs, rhs); compare_field!(name, withdraw_root, lhs, rhs); compare_field!(name, data_hash, lhs, rhs); if lhs.tx_bytes != rhs.tx_bytes { - bail!( + return Err(format!( "{} chunk different {}: {} != {}", name, "tx_bytes", hex::encode(&lhs.tx_bytes), hex::encode(&rhs.tx_bytes) - ); + )); } + Ok(()) } @@ -74,9 +79,9 @@ impl ChunkProof { chunk_info: ChunkInfo, chunk_kind: ChunkKind, row_usages: Vec, - ) -> Result { + ) -> anyhow::Result { let protocol = serde_json::to_vec(&snark.protocol)?; - let proof = Proof::new(snark.proof, &snark.instances, pk); + let proof = InnerProof::new(snark.proof, &snark.instances, pk); Ok(Self { protocol, @@ -87,26 +92,29 @@ impl ChunkProof { }) } - pub fn from_json_file(dir: &str, name: &str) -> Result { - from_json_file(dir, &dump_filename(name)) + pub fn from_json_file(dir: &str, name: &str) -> anyhow::Result { + let path = Path::new(dir).join(dump_filename(name)); + Ok(read_json_deep(&path)?) } - pub fn dump(&self, dir: &str, name: &str) -> Result<()> { + pub fn dump(&self, dir: &str, name: &str) -> anyhow::Result<()> { let filename = dump_filename(name); // Dump vk and protocol. - dump_vk(dir, &filename, &self.proof.vk); - dump_data(dir, &format!("chunk_{filename}.protocol"), &self.protocol); - dump_as_json(dir, &filename, &self) + dump_vk(dir, &filename, &self.proof.vk)?; + dump_data(dir, &format!("chunk_{filename}.protocol"), &self.protocol)?; + dump_as_json(dir, &filename, &self)?; + + Ok(()) } - pub fn to_snark(self) -> Snark { + pub fn to_snark(&self) -> Snark { let instances = self.proof.instances(); let protocol = serde_json::from_slice::>(&self.protocol).unwrap(); Snark { protocol, - proof: self.proof.proof, + proof: self.proof.proof.clone(), instances, } } diff --git a/prover/src/proof/evm.rs b/prover/src/proof/evm.rs index 92f17204ec..0b4fc04131 100644 --- a/prover/src/proof/evm.rs +++ b/prover/src/proof/evm.rs @@ -1,4 +1,5 @@ -use super::{dump_as_json, dump_vk, from_json_file, Proof}; +use std::path::Path; + use anyhow::Result; use halo2_proofs::{ halo2curves::bn256::{Fr, G1Affine}, @@ -6,9 +7,13 @@ use halo2_proofs::{ }; use serde_derive::{Deserialize, Serialize}; +use crate::utils::read_json_deep; + +use super::{dump_as_json, dump_vk, InnerProof}; + #[derive(Clone, Debug, Deserialize, Serialize)] pub struct EvmProof { - pub proof: Proof, + pub proof: InnerProof, pub num_instance: Vec, } @@ -19,7 +24,7 @@ impl EvmProof { num_instance: Vec, pk: Option<&ProvingKey>, ) -> Result { - let proof = Proof::new(proof, instances, pk); + let proof = InnerProof::new(proof, instances, pk); Ok(Self { proof, @@ -28,14 +33,17 @@ impl EvmProof { } pub fn from_json_file(dir: &str, name: &str) -> Result { - from_json_file(dir, &dump_filename(name)) + let path = Path::new(dir).join(dump_filename(name)); + Ok(read_json_deep(&path)?) } pub fn dump(&self, dir: &str, name: &str) -> Result<()> { let filename = dump_filename(name); - dump_vk(dir, &filename, &self.proof.vk); - dump_as_json(dir, &filename, &self) + dump_vk(dir, &filename, &self.proof.vk)?; + dump_as_json(dir, &filename, &self)?; + + Ok(()) } } diff --git a/prover/src/proof.rs b/prover/src/proof/mod.rs similarity index 63% rename from prover/src/proof.rs rename to prover/src/proof/mod.rs index 74e640468c..4ec4c1d2b0 100644 --- a/prover/src/proof.rs +++ b/prover/src/proof/mod.rs @@ -1,43 +1,60 @@ -use crate::{ - io::{deserialize_fr, deserialize_vk, serialize_fr, serialize_vk, write_file}, - types::base64, - utils::short_git_version, -}; +use std::{fs::File, path::Path}; + use anyhow::Result; +use eth_types::base64; use halo2_proofs::{ halo2curves::bn256::{Fr, G1Affine}, plonk::{Circuit, ProvingKey, VerifyingKey}, }; use serde_derive::{Deserialize, Serialize}; use snark_verifier_sdk::Snark; -use std::{fs::File, path::PathBuf}; -mod batch; -mod bundle; -mod chunk; -mod evm; +use crate::utils::{ + deploy_and_call, deserialize_fr, deserialize_vk, serialize_fr, serialize_vk, short_git_version, + write, +}; +mod batch; pub use batch::BatchProof; + +mod bundle; pub use bundle::BundleProof; + +mod chunk; pub use chunk::{compare_chunk_info, ChunkKind, ChunkProof}; + +mod evm; pub use evm::EvmProof; +mod proof_v2; +pub use proof_v2::*; + +/// Proof extracted from [`Snark`]. #[derive(Clone, Debug, Default, Deserialize, Serialize)] -pub struct Proof { +pub struct InnerProof { + /// The raw bytes of the proof in the [`Snark`]. + /// + /// Serialized using base64 format in order to not bloat the JSON-encoded proof dump. #[serde(with = "base64")] - proof: Vec, + pub proof: Vec, + /// The public values, aka instances of this [`Snark`]. #[serde(with = "base64")] - instances: Vec, + pub instances: Vec, + /// The raw bytes of the [`VerifyingKey`] of the [`Circuit`] used to generate the [`Snark`]. #[serde(with = "base64")] - vk: Vec, - pub git_version: Option, + pub vk: Vec, + /// The git ref of the codebase. + /// + /// Generally useful for debug reasons to know the exact commit using which this proof was + /// generated. + pub git_version: String, } -impl Proof { +impl InnerProof { pub fn new(proof: Vec, instances: &[Vec], pk: Option<&ProvingKey>) -> Self { let instances = serialize_instances(instances); let vk = pk.map_or_else(Vec::new, |pk| serialize_vk(pk.get_vk())); - let git_version = Some(short_git_version()); + let git_version = short_git_version(); Self { proof, @@ -47,16 +64,12 @@ impl Proof { } } - pub fn from_json_file(dir: &str, filename: &str) -> Result { - from_json_file(dir, filename) - } - pub fn from_snark(snark: Snark, vk: Vec) -> Self { let proof = snark.proof; let instances = serialize_instances(&snark.instances); - let git_version = Some(short_git_version()); + let git_version = short_git_version(); - Proof { + Self { proof, instances, vk, @@ -65,17 +78,17 @@ impl Proof { } pub fn dump(&self, dir: &str, filename: &str) -> Result<()> { - dump_vk(dir, filename, &self.vk); + dump_vk(dir, filename, &self.vk)?; + dump_as_json(dir, filename, &self)?; - dump_as_json(dir, filename, &self) + Ok(()) } pub fn evm_verify(&self, deployment_code: Vec) -> bool { let instances = self.instances(); let proof = self.proof().to_vec(); let calldata = snark_verifier::loader::evm::encode_calldata(&instances, &proof); - let result = crate::evm::deploy_and_call(deployment_code, calldata); - result.is_ok() + deploy_and_call(deployment_code, calldata).is_ok() } pub fn instances(&self) -> Vec> { @@ -101,25 +114,20 @@ impl Proof { } } -pub fn dump_as_json(dir: &str, filename: &str, proof: &P) -> Result<()> { - // Write full proof as json. +pub fn dump_as_json(dir: &str, filename: &str, proof: &T) -> Result<()> { let mut fd = File::create(dump_proof_path(dir, filename))?; serde_json::to_writer(&mut fd, proof)?; Ok(()) } -pub fn dump_data(dir: &str, filename: &str, data: &[u8]) { - write_file(&mut PathBuf::from(dir), filename, data); -} - -pub fn dump_vk(dir: &str, filename: &str, raw_vk: &[u8]) { - dump_data(dir, &format!("vk_{filename}.vkey"), raw_vk); +pub fn dump_data(dir: &str, filename: &str, data: &[u8]) -> Result<()> { + let path = Path::new(dir).join(filename); + Ok(write(&path, data)?) } -pub fn from_json_file<'de, P: serde::Deserialize<'de>>(dir: &str, filename: &str) -> Result

{ - let file_path = dump_proof_path(dir, filename); - crate::io::from_json_file(file_path) +pub fn dump_vk(dir: &str, filename: &str, raw_vk: &[u8]) -> Result<()> { + dump_data(dir, &format!("vk_{filename}.vkey"), raw_vk) } fn dump_proof_path(dir: &str, filename: &str) -> String { diff --git a/prover/src/proof/proof_v2.rs b/prover/src/proof/proof_v2.rs new file mode 100644 index 0000000000..e54de279b6 --- /dev/null +++ b/prover/src/proof/proof_v2.rs @@ -0,0 +1,393 @@ +use std::path::{Path, PathBuf}; + +use aggregator::ChunkInfo; +use eth_types::{base64, H256}; +use halo2_proofs::{ + halo2curves::bn256::{Fr, G1Affine}, + plonk::ProvingKey, +}; +use serde_derive::{Deserialize, Serialize}; +use snark_verifier::Protocol; +use snark_verifier_sdk::Snark; + +use crate::{ + deserialize_fr, read_json_deep, serialize_vk, short_git_version, write, write_json, + zkevm::RowUsage, BatchProverError, ChunkKind, ProverError, +}; + +use super::serialize_instances; + +/// Proof generated at certain checkpoints in the proof generation pipeline. +/// +/// Variants of [`ProofV2`] are [`ChunkProofV2`], [`BatchProofV2`] and [`BundleProofV2`], that are +/// the output of proof generation at [`Layer-2`][crate::LayerId::Layer2], [`Layer-4`][crate::LayerId::Layer4] +/// and [`Layer-6`][crate::LayerId::Layer6] respectively. +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct ProofV2 { + /// The inner data that differs between chunk proofs, batch proofs and bundle proofs. + #[serde(flatten)] + pub inner: Inner, + /// The raw bytes of the proof in the [`Snark`]. + /// + /// Serialized using base64 format in order to not bloat the JSON-encoded proof dump. + #[serde(with = "base64")] + pub proof: Vec, + /// The public values, aka instances of this [`Snark`]. + #[serde(with = "base64")] + pub instances: Vec, + /// The raw bytes of the [`VerifyingKey`] of the [`Circuit`] used to generate the [`Snark`]. + #[serde(with = "base64")] + pub vk: Vec, + /// The git ref of the codebase. + /// + /// Generally useful for debug reasons to know the exact commit using which this proof was + /// generated. + pub git_version: String, +} + +impl TryFrom<&ProofV2> for Snark { + type Error = ProverError; + + fn try_from(value: &ProofV2) -> Result { + let protocol = value + .inner + .protocol() + .ok_or(ProverError::Custom(String::from( + "protocol either not found or cannot be deserialized successfully", + )))?; + + let instances = value.deserialize_instances(); + + let proof = value.proof.to_vec(); + + Ok(Self { + protocol, + proof, + instances, + }) + } +} + +impl ProofV2 { + /// Construct a new proof given the inner metadata, proving key and the + /// [`Snark`][snark_verifier_sdk::Snark]. + pub fn new( + snark: Snark, + proving_key: Option<&ProvingKey>, + inner: Inner, + ) -> Result { + let instances = serialize_instances(&snark.instances); + let vk = proving_key.map_or_else(Vec::new, |pk| serialize_vk(pk.get_vk())); + + Ok(Self { + inner, + proof: snark.proof, + instances, + vk, + git_version: short_git_version(), + }) + } + + /// Read and deserialize the proof. + pub fn from_json>(dir: P, suffix: &str) -> Result { + let path = Self::path_proof(dir, suffix); + read_json_deep(path) + } + + /// Serialize the proof and other peripheral data, before dumping in the provided directory. + pub fn dump>(&self, dir: P, suffix: &str) -> Result<(), ProverError> { + // Dump the verifying key. + write(Self::path_vk(&dir, suffix), &self.vk)?; + + // Dump the proof itself. + write_json(Self::path_proof(&dir, suffix), &self)?; + + // Dump any other data for the inner data. + self.inner.dump(&dir, suffix)?; + + Ok(()) + } + + /// Deserialize public values in the native scalar field. + fn deserialize_instances(&self) -> Vec> { + vec![self + .instances + .chunks(32) + .map(|bytes| deserialize_fr(bytes.iter().rev().cloned().collect())) + .collect::>()] + } + + /// Path to the JSON-encoded proof in the directory. + fn path_proof>(dir: P, suffix: &str) -> PathBuf { + Inner::path_proof(dir, suffix) + } + + /// Path to the encoded [`VerifyingKey`][halo2_proofs::plonk::VerifyingKey] in the directory. + fn path_vk>(dir: P, suffix: &str) -> PathBuf { + Inner::path_vk(dir, suffix) + } +} + +pub trait Proof: Clone + std::fmt::Debug + serde::Serialize { + /// Name of the proof layer. + const NAME: &'static str; + + ///

/proof_{NAME}_{suffix}.json + fn path_proof>(dir: P, suffix: &str) -> PathBuf { + dir.as_ref() + .join(format!("proof_{}_{}.json", Self::NAME, suffix)) + } + + /// /vk_{NAME}_{suffix}.vkey + fn path_vk>(dir: P, suffix: &str) -> PathBuf { + dir.as_ref() + .join(format!("vk_{}_{}.vkey", Self::NAME, suffix)) + } + + /// /protocol_{NAME}_{suffix}.protocol + fn path_protocol>(dir: P, suffix: &str) -> PathBuf { + dir.as_ref() + .join(format!("protocol_{}_{}.protocol", Self::NAME, suffix,)) + } + + /// Returns the SNARK protocol, if any in the metadata. + fn protocol(&self) -> Option>; + + /// Dump relevant fields from the proof metadata in the provided directory. + fn dump>(&self, dir: P, suffix: &str) -> Result<(), ProverError>; +} + +/// Alias for convenience. +pub type ChunkProofV2 = ProofV2; + +/// Alias for convenience. +pub type BatchProofV2 = ProofV2; + +/// Alias for convenience. +pub type BundleProofV2 = ProofV2; + +/// The number of scalar field elements used to encode the KZG accumulator. +/// +/// The accumulator is essentially an `(lhs, rhs)` pair of [`G1Affine`] points, where each +/// `G1Affine` point comprises of 2 base field elements `(x, y)`. But since each base field +/// element is split into 3 limbs each, where each limb is our native scalar [`Fr`], in total we +/// have 12 scalar field elements to represent this accumulator. +const ACCUMULATOR_LEN: usize = 12; + +/// Each scalar field [`Fr`] element is encoded using 32 bytes. +const ACCUMULATOR_BYTES: usize = ACCUMULATOR_LEN * 32; + +/// The public input (excluding the accumulator) for the outermost +/// [`Layer-6`][crate::LayerId::Layer6] circuit is basically the public input carried forward from +/// the `Layer-5` [`RecursionCircuit`][aggregator::RecursionCircuit]. +/// +/// They are the following: +/// - Fr: Preprocessed Digest +/// - Fr: Recursion Round +/// - (Fr, Fr): Pre State Root (finalized) +/// - (Fr, Fr): Pre Batch Hash (finalized) +/// - (Fr, Fr): Post State Root (pending finalization) +/// - (Fr, Fr): Post Batch Hash (pending finalization) +/// - Fr: Chain ID +/// - (Fr, Fr): Post Withdraw Root (pending finalization) +/// +/// In total these are 13 scalar field elements. +const PUBLIC_INPUT_LEN: usize = 13; + +/// Each scalar field [`Fr`] element is encoded using 32 bytes. +const PUBLIC_INPUT_BYTES: usize = PUBLIC_INPUT_LEN * 32; + +impl BundleProofV2 { + /// Construct a new proof given raw proof and instance values. Generally to be used in the case + /// of final EVM proof using the [`gen_evm_verifier`][snark_verifier_sdk::gen_evm_verifier] + /// method. + pub fn new_from_raw(proof: &[u8], instances: &[u8], vk: &[u8]) -> Result { + // Sanity check on the number of public input bytes. + let expected_len = ACCUMULATOR_BYTES + PUBLIC_INPUT_BYTES; + let got_len = instances.len(); + if got_len != expected_len { + return Err(BatchProverError::PublicInputsMismatch(expected_len, got_len).into()); + } + + Ok(Self { + inner: BundleProofV2Metadata, + proof: proof.to_vec(), + instances: instances.to_vec(), + vk: vk.to_vec(), + git_version: short_git_version(), + }) + } + + /// Encode the calldata for the proof verification transaction to be made on-chain. + /// + /// [ public_input_bytes | accumulator_bytes | proof ] + pub fn calldata(&self) -> Vec { + std::iter::empty() + .chain(self.instances.iter()) + .chain(self.proof.iter()) + .cloned() + .collect::>() + } +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct ChunkProofV2Metadata { + /// The [`Protocol`][snark_verifier::Protocol] for the SNARK construction for the chunk proof. + #[serde(with = "base64")] + protocol: Vec, + /// The chunk proof can be for either the halo2 or sp1 routes. + chunk_kind: ChunkKind, + /// The EVM execution traces as a result of executing all txs in the chunk. + chunk_info: ChunkInfo, + /// Optional [Circuit-Capacity Checker][ccc] row usage statistics from the halo2-route. + /// + /// Is `None` for the sp1-route. + /// + /// [ccc]: crate::zkevm::CircuitCapacityChecker + row_usage: Option, +} + +impl ChunkProofV2Metadata { + /// Construct new chunk proof metadata. + pub fn new( + snark: &Snark, + chunk_kind: ChunkKind, + chunk_info: ChunkInfo, + row_usage: Option, + ) -> Result { + let protocol = serde_json::to_vec(&snark.protocol)?; + + Ok(Self { + protocol, + chunk_kind, + chunk_info, + row_usage, + }) + } + /// Get the chunk info embedded + pub fn chunk_info(&self) -> &ChunkInfo { + &self.chunk_info + } + /// Get the chunk kind + pub fn chunk_kind(&self) -> ChunkKind { + self.chunk_kind + } + /// Get the chunk protocol + pub fn protocol(&self) -> &Vec { + &self.protocol + } +} + +impl Proof for ChunkProofV2Metadata { + const NAME: &'static str = "chunk"; + + fn protocol(&self) -> Option> { + serde_json::from_slice(&self.protocol).ok() + } + + fn dump>(&self, dir: P, suffix: &str) -> Result<(), ProverError> { + write(Self::path_protocol(&dir, suffix), &self.protocol)?; + + Ok(()) + } +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct BatchProofV2Metadata { + /// The [`Protocol`][snark_verifier::Protocol] for the SNARK construction for the chunk proof. + #[serde(with = "base64")] + protocol: Vec, + /// The hash of [`BatchHeader`][aggregator::BatchHeader] of the batch. + pub batch_hash: H256, +} + +impl BatchProofV2Metadata { + /// Create new batch proof metadata. + pub fn new(snark: &Snark, batch_hash: H256) -> Result { + let protocol = serde_json::to_vec(&snark.protocol)?; + + Ok(Self { + protocol, + batch_hash, + }) + } +} + +impl Proof for BatchProofV2Metadata { + const NAME: &'static str = "batch"; + + fn protocol(&self) -> Option> { + serde_json::from_slice(&self.protocol).ok() + } + + fn dump>(&self, dir: P, suffix: &str) -> Result<(), ProverError> { + write(Self::path_protocol(&dir, suffix), &self.protocol)?; + + Ok(()) + } +} + +#[derive(Clone, Debug, Serialize, Deserialize, Default)] +pub struct BundleProofV2Metadata; + +impl Proof for BundleProofV2Metadata { + const NAME: &'static str = "bundle"; + + fn protocol(&self) -> Option> { + None + } + + fn dump>(&self, _dir: P, _suffix: &str) -> Result<(), ProverError> { + Ok(()) + } +} + +#[cfg(test)] +mod tests { + use tempdir::TempDir; + + use crate::{deploy_and_call, read, read_json, BundleProofV2, EvmProof}; + + #[test] + fn bundle_proof_backwards_compat() -> anyhow::Result<()> { + // Read [`EvmProof`] from test data. + let evm_proof = read_json::<_, EvmProof>("test_data/evm-proof.json")?; + + // Build bundle proofs. + let bundle_proof_v2 = BundleProofV2::new_from_raw( + &evm_proof.proof.proof, + &evm_proof.proof.instances, + &evm_proof.proof.vk, + )?; + let bundle_proof = crate::BundleProof::from(evm_proof.proof); + + assert_eq!(bundle_proof.calldata(), bundle_proof_v2.calldata()); + + Ok(()) + } + + #[test] + fn verify_bundle_proof() -> anyhow::Result<()> { + // Create a tmp test directory. + let dir = TempDir::new("proof_v2")?; + + // Read [`EvmProof`] from test data. + let evm_proof = read_json::<_, EvmProof>("test_data/evm-proof.json")?; + let verifier = read("test_data/evm-verifier.bin")?; + + // Build bundle proof v2. + let bundle_proof = BundleProofV2::new_from_raw( + &evm_proof.proof.proof, + &evm_proof.proof.instances, + &evm_proof.proof.vk, + )?; + + // Dump the bundle proof v2. + bundle_proof.dump(&dir, "suffix")?; + + // Verify the bundle proof v2 with EVM verifier contract. + assert!(deploy_and_call(verifier, bundle_proof.calldata()).is_ok()); + + Ok(dir.close()?) + } +} diff --git a/prover/src/test/batch.rs b/prover/src/test/batch.rs index 15ecf06248..74a949799d 100644 --- a/prover/src/test/batch.rs +++ b/prover/src/test/batch.rs @@ -1,22 +1,20 @@ -use halo2_proofs::{halo2curves::bn256::Bn256, poly::kzg::commitment::ParamsKZG}; +use std::{ + path::PathBuf, + sync::{LazyLock, Mutex}, +}; use crate::{ aggregator::{Prover, Verifier}, - config::{LayerId, AGG_DEGREES}, + config::{LayerId, BATCH_PROVER_DEGREES}, consts::DEPLOYMENT_CODE_FILENAME, - io::force_to_read, types::BundleProvingTask, - utils::read_env_var, - BatchProvingTask, -}; -use std::{ - collections::BTreeMap, - sync::{LazyLock, Mutex}, + utils::{force_read, read_env_var}, + BatchProvingTask, ParamsMap, }; -static PARAMS_MAP: LazyLock>> = LazyLock::new(|| { +static PARAMS_MAP: LazyLock = LazyLock::new(|| { let params_dir = read_env_var("SCROLL_PROVER_PARAMS_DIR", "./test_params".to_string()); - crate::common::Prover::load_params_map(¶ms_dir, &AGG_DEGREES) + crate::common::Prover::load_params_map(¶ms_dir, &BATCH_PROVER_DEGREES) }); static BATCH_PROVER: LazyLock> = LazyLock::new(|| { @@ -42,7 +40,8 @@ pub fn batch_prove(test: &str, batch: BatchProvingTask) { let params = prover.prover_impl.params(LayerId::Layer4.degree()); - let deployment_code = force_to_read(&assets_dir, &DEPLOYMENT_CODE_FILENAME); + let path = PathBuf::from(assets_dir).join(DEPLOYMENT_CODE_FILENAME.clone()); + let deployment_code = force_read(&path); let pk = prover .prover_impl @@ -55,7 +54,7 @@ pub fn batch_prove(test: &str, batch: BatchProvingTask) { verifier }; let verified = verifier.verify_batch_proof(&proof); - assert!(verified, "{test}: failed to verify batch proof"); + assert!(verified.is_ok(), "{test}: failed to verify batch proof"); log::info!("{test}: batch-prove END"); } @@ -75,7 +74,8 @@ pub fn bundle_prove(test: &str, bundle: BundleProvingTask) { let params = prover.prover_impl.params(LayerId::Layer4.degree()); - let deployment_code = force_to_read(&assets_dir, &DEPLOYMENT_CODE_FILENAME); + let path = PathBuf::from(assets_dir).join(DEPLOYMENT_CODE_FILENAME.clone()); + let deployment_code = force_read(&path); let pk = prover .prover_impl @@ -88,8 +88,8 @@ pub fn bundle_prove(test: &str, bundle: BundleProvingTask) { verifier }; - let verified = verifier.verify_bundle_proof(proof); - assert!(verified, "{test}: failed to verify bundle proof"); + let verified = verifier.verify_bundle_proof(&proof); + assert!(verified.is_ok(), "{test}: failed to verify bundle proof"); log::info!("{test}: bundle-prove END"); } diff --git a/prover/src/test/chunk.rs b/prover/src/test/chunk.rs index 4d7c29d1e4..bf468a6987 100644 --- a/prover/src/test/chunk.rs +++ b/prover/src/test/chunk.rs @@ -1,19 +1,15 @@ -use halo2_proofs::{halo2curves::bn256::Bn256, poly::kzg::commitment::ParamsKZG}; +use std::sync::{LazyLock, Mutex}; use crate::{ - config::ZKEVM_DEGREES, + config::CHUNK_PROVER_DEGREES, utils::read_env_var, zkevm::{Prover, Verifier}, - ChunkProof, ChunkProvingTask, -}; -use std::{ - collections::BTreeMap, - sync::{LazyLock, Mutex}, + ChunkProofV2, ChunkProvingTask, ParamsMap, }; -static PARAMS_MAP: LazyLock>> = LazyLock::new(|| { +static PARAMS_MAP: LazyLock = LazyLock::new(|| { let params_dir = read_env_var("SCROLL_PROVER_PARAMS_DIR", "./test_params".to_string()); - crate::common::Prover::load_params_map(¶ms_dir, &ZKEVM_DEGREES) + crate::common::Prover::load_params_map(¶ms_dir, &CHUNK_PROVER_DEGREES) }); static CHUNK_PROVER: LazyLock> = LazyLock::new(|| { @@ -24,13 +20,13 @@ static CHUNK_PROVER: LazyLock> = LazyLock::new(|| { Mutex::new(prover) }); -pub fn chunk_prove(desc: &str, chunk: ChunkProvingTask) -> ChunkProof { +pub fn chunk_prove(desc: &str, chunk: ChunkProvingTask) -> ChunkProofV2 { log::info!("{desc}: chunk-prove BEGIN"); let mut prover = CHUNK_PROVER.lock().expect("poisoned chunk-prover"); let proof = prover - .gen_chunk_proof(chunk, None, None, None) + .gen_halo2_chunk_proof(chunk, None, None, None) .unwrap_or_else(|err| panic!("{desc}: failed to generate chunk snark: {err}")); log::info!("{desc}: generated chunk proof"); @@ -41,8 +37,8 @@ pub fn chunk_prove(desc: &str, chunk: ChunkProvingTask) -> ChunkProof { verifier }; - let verified = verifier.verify_chunk_proof(proof.clone()); - assert!(verified, "{desc}: failed to verify chunk snark"); + let verified = verifier.verify_chunk_proof(&proof); + assert!(verified.is_ok(), "{desc}: failed to verify chunk snark"); log::info!("{desc}: chunk-prove END"); diff --git a/prover/src/test/inner.rs b/prover/src/test/inner.rs index fe6e90d5df..ea249cb219 100644 --- a/prover/src/test/inner.rs +++ b/prover/src/test/inner.rs @@ -1,18 +1,14 @@ -use halo2_proofs::{halo2curves::bn256::Bn256, poly::kzg::commitment::ParamsKZG}; +use std::sync::{LazyLock, Mutex}; use crate::{ common::{Prover, Verifier}, config::{LayerId, INNER_DEGREE}, - utils::{gen_rng, read_env_var}, + utils::read_env_var, zkevm::circuit::{SuperCircuit, TargetCircuit}, - WitnessBlock, -}; -use std::{ - collections::BTreeMap, - sync::{LazyLock, Mutex}, + ParamsMap, WitnessBlock, }; -static PARAMS_MAP: LazyLock>> = LazyLock::new(|| { +static PARAMS_MAP: LazyLock = LazyLock::new(|| { let params_dir = read_env_var("SCROLL_PROVER_PARAMS_DIR", "./test_params".to_string()); crate::common::Prover::load_params_map(¶ms_dir, &[*INNER_DEGREE]) }); @@ -29,9 +25,8 @@ pub fn inner_prove(test: &str, witness_block: &WitnessBlock) { let mut prover = INNER_PROVER.lock().expect("poisoned inner-prover"); - let rng = gen_rng(); let snark = prover - .gen_inner_snark::("inner", rng, witness_block) + .load_or_gen_inner_snark("", "inner", witness_block, None) .unwrap_or_else(|err| panic!("{test}: failed to generate inner snark: {err}")); log::info!("{test}: generated inner snark"); diff --git a/prover/src/test.rs b/prover/src/test/mod.rs similarity index 99% rename from prover/src/test.rs rename to prover/src/test/mod.rs index 982dba2a11..2f120d6e2d 100644 --- a/prover/src/test.rs +++ b/prover/src/test/mod.rs @@ -1,7 +1,8 @@ mod batch; -mod chunk; -mod inner; - pub use batch::{batch_prove, bundle_prove}; + +mod chunk; pub use chunk::chunk_prove; + +mod inner; pub use inner::inner_prove; diff --git a/prover/src/types.rs b/prover/src/types.rs index 891ac2495b..6b0757d7b0 100644 --- a/prover/src/types.rs +++ b/prover/src/types.rs @@ -1,76 +1,114 @@ use aggregator::{BatchHeader, ChunkInfo, MAX_AGG_SNARKS}; -use eth_types::l2_types::BlockTrace; +use eth_types::{base64, l2_types::BlockTrace}; use serde::{Deserialize, Serialize}; use zkevm_circuits::evm_circuit::witness::Block; +use crate::{BatchProofV2, ChunkProofV2}; + +/// Alias for convenience. pub type WitnessBlock = Block; +/// Helper type to deserialize JSON-encoded RPC result for [`BlockTrace`]. #[derive(Deserialize, Serialize, Default, Debug, Clone)] pub struct BlockTraceJsonRpcResult { + /// The value of the "result" key. pub result: BlockTrace, } -pub use eth_types::base64; - -use crate::{BatchProof, ChunkKind, ChunkProof}; +/// Defines a proving task for chunk proof generation. #[derive(Debug, Clone, Deserialize, Serialize)] pub struct ChunkProvingTask { - /// Prover can check `chunk_info` is consistent with block traces + /// Optional chunk data encapsulated within the proving task. + /// + /// As part of a sanity check, the prover reconstructs the chunk data using the EVM execution + /// traces from all blocks in the chunk and compares against the supplied chunk data. pub chunk_info: Option, + /// The EVM execution traces for all blocks in the chunk. pub block_traces: Vec, - pub chunk_kind: ChunkKind, } impl ChunkProvingTask { - pub fn new(block_traces: Vec, chunk_kind: ChunkKind) -> Self { + /// Create a new chunk proving task given the chunk trace. + pub fn new(block_traces: Vec) -> Self { Self { block_traces, chunk_info: None, - chunk_kind, } } + + /// Returns true if there are no block traces in the chunk. pub fn is_empty(&self) -> bool { self.block_traces.is_empty() } - /// Used for cache/load proof from disk + + /// An identifier for the chunk. It is the block number of the first block in the chunk. + /// + /// This is used as a file descriptor to save to (load from) disk in order to avoid proof + /// generation if the same proof/SNARK is already found on disk. pub fn identifier(&self) -> String { self.block_traces .first() .map_or(0, |trace: &BlockTrace| { - trace.header.number.expect("block num").low_u64() + trace + .header + .number + .expect("block number should be present") + .low_u64() }) .to_string() } } +/// Defines a proving task for batch proof generation. #[derive(Debug, Clone, Deserialize, Serialize)] pub struct BatchProvingTask { - pub chunk_proofs: Vec, + /// Chunk proofs for the contiguous list of chunks within the batch. + pub chunk_proofs: Vec, + /// The [`BatchHeader`], as computed on-chain for this batch. + /// + /// Ref: https://github.com/scroll-tech/scroll-contracts/blob/2ac4f3f7e090d7127db4b13b3627cb3ce2d762bc/src/libraries/codec/BatchHeaderV3Codec.sol pub batch_header: BatchHeader, + /// The bytes encoding the batch data that will finally be published on-chain in the form of an + /// EIP-4844 blob. #[serde(with = "base64")] pub blob_bytes: Vec, } impl BatchProvingTask { - /// Used for cache/load proof from disk + /// An identifier for the batch. It is the public input hash of the last chunk in the batch. + /// + /// This is used as a file descriptor to save to (load from) disk in order to avoid proof + /// generation if the same proof/SNARK is already found on disk. pub fn identifier(&self) -> String { self.chunk_proofs .last() .unwrap() - .chunk_info + .inner + .chunk_info() .public_input_hash() .to_low_u64_le() .to_string() } } +/// Defines a proving task for bundle proof generation. #[derive(Debug, Clone, Deserialize, Serialize)] pub struct BundleProvingTask { - pub batch_proofs: Vec, + /// The [`BatchProofs`][BatchProof] for the contiguous list of batches to be bundled together. + pub batch_proofs: Vec, } impl BundleProvingTask { + /// An identifier for the bundle. It is the batch hash of the last batch in the bundle. + /// + /// This is used as a file descriptor to save to (load from) disk in order to avoid proof + /// generation if the same proof/SNARK is already found on disk. pub fn identifier(&self) -> String { - self.batch_proofs.last().unwrap().batch_hash.to_string() + self.batch_proofs + .last() + .unwrap() + .inner + .batch_hash + .to_string() } } diff --git a/prover/src/evm.rs b/prover/src/utils/evm.rs similarity index 78% rename from prover/src/evm.rs rename to prover/src/utils/evm.rs index fa5ad6f180..d9030199d1 100644 --- a/prover/src/evm.rs +++ b/prover/src/utils/evm.rs @@ -1,52 +1,58 @@ -use crate::{io::write_file, EvmProof}; +use std::path::{Path, PathBuf}; + use halo2_proofs::{ halo2curves::bn256::{Bn256, Fr, G1Affine}, plonk::VerifyingKey, poly::kzg::commitment::ParamsKZG, }; +use revm::{ + primitives::{Env, ExecutionResult, Output, SpecId, TxEnv, TxKind}, + Evm, InMemoryDB, +}; + use snark_verifier::pcs::kzg::{Bdfg21, Kzg}; use snark_verifier_sdk::CircuitExt; -use std::{path::PathBuf, str::FromStr}; + +use crate::{utils::write, BatchProverError, EvmProof, ProverError}; /// Dump YUL and binary bytecode(use `solc` in PATH) to output_dir. -/// Panic if error encountered. +/// +/// Panics if the verifier contract cannot successfully verify the [`EvmProof`]. pub fn gen_evm_verifier>( params: &ParamsKZG, vk: &VerifyingKey, evm_proof: &EvmProof, output_dir: Option<&str>, -) { - let yul_file_path = output_dir.map(|dir| { - let mut path = PathBuf::from_str(dir).unwrap(); - path.push("evm_verifier.yul"); - path - }); +) -> Result<(), ProverError> { + // YUL contract code will be dumped to the following path. + let yul_path = output_dir.map(|dir| PathBuf::from(dir).join("evm_verifier.yul")); // Generate deployment code and dump YUL file. let deployment_code = snark_verifier_sdk::gen_evm_verifier::>( params, vk, evm_proof.num_instance.clone(), - yul_file_path.as_deref(), + yul_path.as_deref(), ); + // Write the contract binary if an output directory was specified. if let Some(dir) = output_dir { - // Dump bytecode. - let mut dir = PathBuf::from_str(dir).unwrap(); - write_file(&mut dir, "evm_verifier.bin", &deployment_code); + let path = Path::new(dir).join("evm_verifier.bin"); + write(&path, &deployment_code)?; } - let success = evm_proof.proof.evm_verify(deployment_code); - assert!(success); + if evm_proof.proof.evm_verify(deployment_code) { + Ok(()) + } else { + Err(ProverError::BatchProverError( + BatchProverError::SanityEVMVerifier, + )) + } } -use revm::{ - primitives::{Env, ExecutionResult, Output, SpecId, TxEnv, TxKind}, - Evm, InMemoryDB, -}; - /// Deploy contract and then call with calldata. -/// Returns gas_used of call to deployed contract if both transactions are successful. +/// +/// Returns the gas used to verify proof. pub fn deploy_and_call(deployment_code: Vec, calldata: Vec) -> Result { let mut env = Box::::default(); env.tx = TxEnv { diff --git a/prover/src/utils/io.rs b/prover/src/utils/io.rs new file mode 100644 index 0000000000..2b56e21b9f --- /dev/null +++ b/prover/src/utils/io.rs @@ -0,0 +1,122 @@ +use std::{ + fs, + io::{Cursor, Write}, + path::{Path, PathBuf}, +}; + +use halo2_proofs::{ + halo2curves::bn256::{Fr, G1Affine}, + plonk::{Circuit, VerifyingKey}, + SerdeFormat, +}; +use serde::{ + de::{Deserialize, DeserializeOwned}, + Serialize, +}; +use snark_verifier::util::arithmetic::PrimeField; + +use crate::ProverError; + +pub fn serialize_fr(f: &Fr) -> Vec { + f.to_bytes().to_vec() +} + +pub fn deserialize_fr(buf: Vec) -> Fr { + Fr::from_repr(buf.try_into().unwrap()).unwrap() +} +pub fn serialize_fr_vec(v: &[Fr]) -> Vec> { + v.iter().map(serialize_fr).collect() +} +pub fn deserialize_fr_vec(l2_buf: Vec>) -> Vec { + l2_buf.into_iter().map(deserialize_fr).collect() +} + +pub fn serialize_fr_matrix(m: &[Vec]) -> Vec>> { + m.iter().map(|v| serialize_fr_vec(v.as_slice())).collect() +} + +pub fn deserialize_fr_matrix(l3_buf: Vec>>) -> Vec> { + l3_buf.into_iter().map(deserialize_fr_vec).collect() +} + +pub fn serialize_instance(instance: &[Vec]) -> Vec { + let instances_for_serde = serialize_fr_matrix(instance); + + serde_json::to_vec(&instances_for_serde).unwrap() +} + +pub fn write_file(folder: &mut PathBuf, filename: &str, buf: &[u8]) { + folder.push(filename); + let mut fd = std::fs::File::create(folder.as_path()).unwrap(); + folder.pop(); + + fd.write_all(buf).unwrap(); +} + +pub fn serialize_vk(vk: &VerifyingKey) -> Vec { + let mut result = Vec::::new(); + vk.write(&mut result, SerdeFormat::Processed).unwrap(); + result +} + +pub fn deserialize_vk>(raw_vk: &[u8]) -> VerifyingKey { + VerifyingKey::::read::<_, C>(&mut Cursor::new(raw_vk), SerdeFormat::Processed, ()) + .unwrap_or_else(|_| panic!("failed to deserialize vk with len {}", raw_vk.len())) +} + +/// Read bytes from a file. +pub fn read>(path: P) -> Result, ProverError> { + let path = path.as_ref(); + fs::read(path).map_err(|source| ProverError::IoReadWrite { + source, + path: path.into(), + }) +} + +/// Wrapper to read JSON file. +pub fn read_json, T: DeserializeOwned>(path: P) -> Result { + let path = path.as_ref(); + let bytes = read(path)?; + serde_json::from_slice(&bytes).map_err(|source| ProverError::JsonReadWrite { + source, + path: path.to_path_buf(), + }) +} + +/// Wrapper to read JSON that might be deeply nested. +pub fn read_json_deep, T: DeserializeOwned>(path: P) -> Result { + let fd = fs::File::open(path)?; + let mut deserializer = serde_json::Deserializer::from_reader(fd); + deserializer.disable_recursion_limit(); + let deserializer = serde_stacker::Deserializer::new(&mut deserializer); + Ok(Deserialize::deserialize(deserializer)?) +} + +/// Try to read bytes from a file. +/// +/// Returns an optional value, which is `None` in case of an i/o error encountered. +pub fn try_read>(path: P) -> Option> { + self::read(path).ok() +} + +/// Read bytes from a file. +/// +/// Panics if any i/o error encountered. +pub fn force_read + std::fmt::Debug>(path: P) -> Vec { + self::read(path.as_ref()).unwrap_or_else(|_| panic!("no file found! path={path:?}")) +} + +/// Wrapper functionality to write bytes to a file. +pub fn write>(path: P, data: &[u8]) -> Result<(), ProverError> { + let path = path.as_ref(); + fs::write(path, data).map_err(|source| ProverError::IoReadWrite { + source, + path: path.into(), + }) +} + +/// Serialize the provided type to JSON format and write to the given path. +pub fn write_json, T: Serialize>(path: P, value: &T) -> Result<(), ProverError> { + let mut writer = fs::File::create(path)?; + Ok(serde_json::to_writer(&mut writer, value)?) +} diff --git a/prover/src/utils.rs b/prover/src/utils/mod.rs similarity index 94% rename from prover/src/utils.rs rename to prover/src/utils/mod.rs index c34535ed5a..eff19a0468 100644 --- a/prover/src/utils.rs +++ b/prover/src/utils/mod.rs @@ -1,8 +1,11 @@ -#![allow(deprecated)] -use crate::{ - types::BlockTraceJsonRpcResult, - zkevm::circuit::{block_traces_to_witness_block, print_chunk_stats}, +use std::{ + fs::{self, metadata, File}, + io::{BufReader, Read}, + path::{Path, PathBuf}, + str::FromStr, + sync::Once, }; + use anyhow::{bail, Result}; use chrono::Utc; use eth_types::l2_types::BlockTrace; @@ -19,18 +22,20 @@ use log4rs::{ use rand::{Rng, SeedableRng}; use rand_xorshift::XorShiftRng; use std::fmt::Debug; -use std::{ - fs::{self, metadata, File}, - io::{BufReader, Read}, - path::{Path, PathBuf}, - str::FromStr, - sync::Once, -}; use zkevm_circuits::evm_circuit::witness::Block; +use crate::types::BlockTraceJsonRpcResult; + +mod evm; +pub use evm::*; + +mod io; +pub use io::*; + pub static LOGGER: Once = Once::new(); pub const DEFAULT_SERDE_FORMAT: SerdeFormat = SerdeFormat::RawBytesUnchecked; + pub const GIT_VERSION: &str = git_version!(args = ["--abbrev=7", "--always"]); pub const PARAMS_G2_SECRET_POWER: &str = "(Fq2 { c0: 0x17944351223333f260ddc3b4af45191b856689eda9eab5cbcddbbe570ce860d2, c1: 0x186282957db913abd99f91db59fe69922e95040603ef44c0bd7aa3adeef8f5ac }, Fq2 { c0: 0x297772d34bc9aa8ae56162486363ffe417b02dc7e8c207fc2cc20203e67a02ad, c1: 0x298adc7396bd3865cbf6d6df91bae406694e6d2215baa893bdeadb63052895f4 })"; @@ -127,14 +132,6 @@ pub fn metric_of_witness_block(block: &Block) -> ChunkMetric { } } -pub fn chunk_trace_to_witness_block(chunk_trace: Vec) -> Result { - if chunk_trace.is_empty() { - bail!("Empty chunk trace"); - } - print_chunk_stats(&chunk_trace); - block_traces_to_witness_block(chunk_trace) -} - // Return the output dir. pub fn init_env_and_log(id: &str) -> String { dotenvy::dotenv().ok(); diff --git a/prover/src/zkevm.rs b/prover/src/zkevm.rs deleted file mode 100644 index 6a53ee7bbc..0000000000 --- a/prover/src/zkevm.rs +++ /dev/null @@ -1,17 +0,0 @@ -#[cfg(feature = "scroll")] -mod capacity_checker; -pub mod circuit; -mod prover; -mod verifier; - -pub use self::prover::Prover; -#[cfg(feature = "scroll")] -pub use capacity_checker::{CircuitCapacityChecker, RowUsage}; -use serde::{Deserialize, Serialize}; -pub use verifier::Verifier; - -#[derive(Debug, Clone, Deserialize, Serialize)] -pub struct SubCircuitRowUsage { - pub name: String, - pub row_number: usize, -} diff --git a/prover/src/zkevm/capacity_checker.rs b/prover/src/zkevm/capacity_checker.rs index add4a0a492..35cbe18947 100644 --- a/prover/src/zkevm/capacity_checker.rs +++ b/prover/src/zkevm/capacity_checker.rs @@ -1,5 +1,4 @@ -use super::circuit::{calculate_row_usage_of_witness_block, finalize_builder}; -use bus_mapping::circuit_input_builder::{self, CircuitInputBuilder}; +use bus_mapping::circuit_input_builder::{Blocks, CircuitInputBuilder}; use eth_types::{ l2_types::BlockTrace, state_db::{CodeDB, StateDB}, @@ -14,7 +13,13 @@ use zkevm_circuits::{ super_circuit::params::{get_sub_circuit_limit_and_confidence, get_super_circuit_params}, }; -pub use super::SubCircuitRowUsage; +use super::circuit::{calculate_row_usage_of_witness_block, finalize_builder}; + +#[derive(Debug, Clone, Deserialize, Serialize)] +pub struct SubCircuitRowUsage { + pub name: String, + pub row_number: usize, +} #[derive(Debug, Clone, Deserialize, Serialize)] pub struct RowUsage { @@ -130,10 +135,7 @@ impl CircuitCapacityChecker { self.acc_row_usage.clone() } } - pub fn estimate_circuit_capacity( - &mut self, - trace: BlockTrace, - ) -> Result { + pub fn estimate_circuit_capacity(&mut self, trace: BlockTrace) -> anyhow::Result { let (mut estimate_builder, codedb_prev) = if let Some((code_db, sdb, mpt_state)) = self.builder_ctx.take() { // here we create a new builder for another (sealed) witness block @@ -141,8 +143,7 @@ impl CircuitCapacityChecker { // the previous one and do not use zktrie state, // notice the prev_root in current builder may be not invalid (since the state has // changed but we may not update it in light mode) - let mut builder_block = - circuit_input_builder::Blocks::init(trace.chain_id, get_super_circuit_params()); + let mut builder_block = Blocks::init(trace.chain_id, get_super_circuit_params()); builder_block.start_l1_queue_index = trace.start_l1_queue_index; builder_block.prev_state_root = mpt_state .as_ref() diff --git a/prover/src/zkevm/circuit/builder.rs b/prover/src/zkevm/circuit/builder.rs index 8cd13a08eb..b61c6bf7cd 100644 --- a/prover/src/zkevm/circuit/builder.rs +++ b/prover/src/zkevm/circuit/builder.rs @@ -1,6 +1,4 @@ -use crate::zkevm::SubCircuitRowUsage; -use anyhow::{bail, Result}; -use bus_mapping::circuit_input_builder::CircuitInputBuilder; +use bus_mapping::{circuit_input_builder::CircuitInputBuilder, Error as CircuitBuilderError}; use eth_types::{l2_types::BlockTrace, ToWord}; use itertools::Itertools; use mpt_zktrie::state::ZkTrieHash; @@ -10,14 +8,20 @@ use zkevm_circuits::{ witness::block_convert, }; +use crate::zkevm::{ChunkProverError, SubCircuitRowUsage}; + +/// Returns the row-usage for all sub-circuits in the process of applying the entire witness block +/// to the super circuit. pub fn calculate_row_usage_of_witness_block( witness_block: &Block, -) -> Result> { +) -> Result, ChunkProverError> { let rows = ScrollSuperCircuit::min_num_rows_block_subcircuits(witness_block); // Check whether we need to "estimate" poseidon sub circuit row usage if witness_block.mpt_updates.smt_traces.is_empty() { - bail!("light mode no longer supported"); + return Err(ChunkProverError::Custom( + "light mode no longer supported".to_string(), + )); } let first_block_num = witness_block.first_block_number(); @@ -34,45 +38,80 @@ pub fn calculate_row_usage_of_witness_block( .sum::(), rows, ); - let row_usage_details: Vec = rows + + Ok(rows .into_iter() .map(|x| SubCircuitRowUsage { name: x.name, row_number: x.row_num_real, }) - .collect_vec(); - Ok(row_usage_details) + .collect_vec()) } -pub fn print_chunk_stats(block_traces: &[BlockTrace]) { - let num_blocks = block_traces.len(); - let num_txs = block_traces - .iter() - .map(|b| b.transactions.len()) - .sum::(); - let total_tx_len = block_traces - .iter() - .flat_map(|b| b.transactions.iter().map(|t| t.data.len())) - .sum::(); - log::info!( - "check capacity of block traces, num_block {}, num_tx {}, tx total len {}", - num_blocks, - num_txs, - total_tx_len - ); +/// Generate a dummy witness block to eventually generate proving key and verifying key for the +/// target circuit without going through the expensive process of actual witness assignment. +pub fn dummy_witness_block() -> Block { + let dummy_chain_id = 0; + zkevm_circuits::witness::dummy_witness_block(dummy_chain_id) } -pub fn dummy_witness_block() -> Result { - log::debug!("generate dummy witness block"); - let dummy_chain_id = 0; - let witness_block = zkevm_circuits::witness::dummy_witness_block(dummy_chain_id); - log::debug!("generate dummy witness block done"); +/// Build a witness block from block traces for all blocks in the chunk. +pub fn chunk_trace_to_witness_block( + chunk_trace: Vec, +) -> Result { + if chunk_trace.is_empty() { + return Err(ChunkProverError::Custom("Empty chunk trace".to_string())); + } + print_chunk_stats(&chunk_trace); + block_traces_to_witness_block(chunk_trace) +} + +/// Finalize building and return witness block +pub fn finalize_builder(builder: &mut CircuitInputBuilder) -> Result { + builder.finalize_building()?; + + log::debug!("converting builder.block to witness block"); + + let mut witness_block = block_convert(&builder.block, &builder.code_db)?; + log::debug!( + "witness_block built with circuits_params {:?}", + witness_block.circuits_params + ); + + if let Some(state) = &mut builder.mpt_init_state { + if *state.root() != [0u8; 32] { + log::debug!("apply_mpt_updates"); + witness_block.apply_mpt_updates_and_update_mpt_state(state); + log::debug!("apply_mpt_updates done"); + } else { + // Empty state root means circuit capacity checking, or dummy witness block for key gen? + log::info!("empty state root, skip apply_mpt_updates"); + } + + let root_after = witness_block.post_state_root().to_word(); + log::debug!( + "finish replay trie updates, root {}, root after {:#x?}", + hex::encode(state.root()), + root_after, + ); + // switch state to new root + let mut new_root_hash = ZkTrieHash::default(); + root_after.to_big_endian(&mut new_root_hash); + assert!(state.switch_to(new_root_hash)); + } + Ok(witness_block) } -pub fn block_traces_to_witness_block(block_traces: Vec) -> Result { +/// Build a witness block from block traces for all blocks in the chunk. +/// +/// Kind of a duplication of [`self::chunk_trace_to_witness_block`], so should eventually be +/// deprecated. +fn block_traces_to_witness_block(block_traces: Vec) -> Result { if block_traces.is_empty() { - bail!("use dummy_witness_block instead"); + return Err(ChunkProverError::Custom( + "empty block traces! hint: use dummy_witness_block instead".to_string(), + )); } let block_num = block_traces.len(); let total_tx_num = block_traces @@ -80,12 +119,12 @@ pub fn block_traces_to_witness_block(block_traces: Vec) -> Result(); if total_tx_num > MAX_TXS { - bail!( + return Err(ChunkProverError::Custom(format!( "tx num overflow {}, block range {} to {}", total_tx_num, block_traces[0].header.number.unwrap(), block_traces[block_num - 1].header.number.unwrap() - ); + ))); } log::info!( "block_traces_to_witness_block, block num {}, tx num {}", @@ -113,39 +152,20 @@ pub fn block_traces_to_witness_block(block_traces: Vec) -> Result Result { - builder.finalize_building()?; - - log::debug!("converting builder.block to witness block"); - - let mut witness_block = block_convert(&builder.block, &builder.code_db)?; - log::debug!( - "witness_block built with circuits_params {:?}", - witness_block.circuits_params +fn print_chunk_stats(block_traces: &[BlockTrace]) { + let num_blocks = block_traces.len(); + let num_txs = block_traces + .iter() + .map(|b| b.transactions.len()) + .sum::(); + let total_tx_len = block_traces + .iter() + .flat_map(|b| b.transactions.iter().map(|t| t.data.len())) + .sum::(); + log::info!( + "check capacity of block traces, num_block {}, num_tx {}, tx total len {}", + num_blocks, + num_txs, + total_tx_len ); - - if let Some(state) = &mut builder.mpt_init_state { - if *state.root() != [0u8; 32] { - log::debug!("apply_mpt_updates"); - witness_block.apply_mpt_updates_and_update_mpt_state(state); - log::debug!("apply_mpt_updates done"); - } else { - // Empty state root means circuit capacity checking, or dummy witness block for key gen? - log::info!("empty state root, skip apply_mpt_updates"); - } - - let root_after = witness_block.post_state_root().to_word(); - log::debug!( - "finish replay trie updates, root {}, root after {:#x?}", - hex::encode(state.root()), - root_after, - ); - // switch state to new root - let mut new_root_hash = ZkTrieHash::default(); - root_after.to_big_endian(&mut new_root_hash); - assert!(state.switch_to(new_root_hash)); - } - - Ok(witness_block) } diff --git a/prover/src/zkevm/circuit.rs b/prover/src/zkevm/circuit/mod.rs similarity index 70% rename from prover/src/zkevm/circuit.rs rename to prover/src/zkevm/circuit/mod.rs index f936282157..d563b849e6 100644 --- a/prover/src/zkevm/circuit.rs +++ b/prover/src/zkevm/circuit/mod.rs @@ -1,34 +1,29 @@ -use builder::dummy_witness_block; use halo2_proofs::halo2curves::bn256::Fr; use snark_verifier_sdk::CircuitExt; use zkevm_circuits::{super_circuit::params::ScrollSuperCircuit, util::SubCircuit, witness}; mod builder; -pub use self::builder::{ - block_traces_to_witness_block, calculate_row_usage_of_witness_block, finalize_builder, - print_chunk_stats, +pub use builder::{ + calculate_row_usage_of_witness_block, chunk_trace_to_witness_block, finalize_builder, }; -pub use zkevm_circuits::super_circuit::params::{MAX_CALLDATA, MAX_INNER_BLOCKS, MAX_TXS}; - /// A target circuit trait is a wrapper of inner circuit, with convenient APIs for building /// circuits from traces. pub trait TargetCircuit { /// The actual inner circuit that implements Circuit trait. type Inner: CircuitExt + SubCircuit; - /// Generate a dummy circuit with an empty trace. - /// This is useful for generating vk and pk. + /// Generate a dummy circuit with an empty trace. This is useful for generating vk and pk. fn dummy_inner_circuit() -> anyhow::Result where Self: Sized, { - let witness_block = dummy_witness_block()?; + let witness_block = builder::dummy_witness_block(); let circuit = Self::from_witness_block(&witness_block)?; Ok(circuit) } - /// Build the inner circuit and the instances from the witness block + /// Build the inner circuit and the instances from the witness block. fn from_witness_block(witness_block: &witness::Block) -> anyhow::Result where Self: Sized, diff --git a/prover/src/zkevm/error.rs b/prover/src/zkevm/error.rs new file mode 100644 index 0000000000..a970145d08 --- /dev/null +++ b/prover/src/zkevm/error.rs @@ -0,0 +1,39 @@ +/// Various errors potentially encountered during proof generation. +#[derive(thiserror::Error, Debug)] +pub enum ChunkProverError { + /// Indicates that the halo2-based [`SuperCircuit`][super_circ] does not have sufficient + /// capacity to populate block traces from all the blocks in the chunk. The error encapsulates + /// the [`RowUsage`][row_usage] observed from populating the chunk. + /// + /// [super_circ]: zkevm_circuits::super_circuit::SuperCircuit + /// [row_usage]: crate::zkevm::RowUsage + #[error("halo2 circuit-capacity exceeded")] + CircuitCapacityOverflow(crate::zkevm::RowUsage), + /// Represents an error propagated from the [`bus_mapping`] crate. + #[error(transparent)] + CircuitBuilder(#[from] bus_mapping::Error), + /// Represents the [`halo2 error`][halo2_error] being propagated. + /// + /// [halo2_error]: halo2_proofs::plonk::Error + #[error(transparent)] + Halo2(#[from] halo2_proofs::plonk::Error), + /// Error indicating that the verifying key found post proof generation does not match the + /// expected verifying key. + #[error("verifying key mismatch: found={0}, expected={1}")] + VerifyingKeyMismatch(String, String), + /// Error indicating that no verifying key was found post proof generation. + #[error("verifying key not found: expected={0}")] + VerifyingKeyNotFound(String), + /// Error indicating that proof verification failed. + #[error("proof verification failure")] + Verification, + /// Represents all other custom errors. + #[error("custom error: {0}")] + Custom(String), +} + +impl From for ChunkProverError { + fn from(value: String) -> Self { + Self::Custom(value) + } +} diff --git a/prover/src/zkevm/mod.rs b/prover/src/zkevm/mod.rs new file mode 100644 index 0000000000..f1069bf660 --- /dev/null +++ b/prover/src/zkevm/mod.rs @@ -0,0 +1,21 @@ +#[cfg(feature = "scroll")] +mod capacity_checker; +#[cfg(feature = "scroll")] +pub use capacity_checker::{CircuitCapacityChecker, RowUsage, SubCircuitRowUsage}; + +pub mod circuit; + +mod error; +pub use error::ChunkProverError; + +mod prover; +pub use prover::Prover; + +mod verifier; +pub use verifier::Verifier; + +/// Alias for convenience. +pub type ChunkProver<'a> = Prover<'a>; + +/// Alias for convenience. +pub type ChunkVerifier<'a> = Verifier<'a>; diff --git a/prover/src/zkevm/prover.rs b/prover/src/zkevm/prover.rs index 40fb805ed9..43f6e421d9 100644 --- a/prover/src/zkevm/prover.rs +++ b/prover/src/zkevm/prover.rs @@ -1,33 +1,56 @@ -use std::collections::BTreeMap; +use std::{collections::BTreeMap, path::PathBuf}; -use crate::{ - common, config::LayerId, consts::CHUNK_VK_FILENAME, io::try_to_read, proof::compare_chunk_info, - types::ChunkProvingTask, utils::chunk_trace_to_witness_block, - zkevm::circuit::calculate_row_usage_of_witness_block, ChunkProof, -}; use aggregator::ChunkInfo; -use anyhow::Result; use halo2_proofs::{halo2curves::bn256::Bn256, poly::kzg::commitment::ParamsKZG}; +use snark_verifier_sdk::Snark; + +use crate::{ + common, + config::LayerId, + consts::CHUNK_VK_FILENAME, + proof::compare_chunk_info, + types::ChunkProvingTask, + utils::try_read, + zkevm::{ + circuit::{calculate_row_usage_of_witness_block, chunk_trace_to_witness_block}, + ChunkProverError, ChunkVerifier, RowUsage, + }, + ChunkKind, ChunkProofV2, ChunkProofV2Metadata, ProverError, +}; +/// Prover responsible for generating [`chunk proofs`][ChunkProof]. #[derive(Debug)] pub struct Prover<'params> { - // Make it public for testing with inner functions (unnecessary for FFI). + /// Encapsulates the common prover. pub prover_impl: common::Prover<'params>, - verifier: Option>, + /// The chunk proof verifier. + /// + /// The verifier is optional in dev-scenarios where the verifier is generated on-the-fly. For + /// production environments, we already have the verifying key available. + verifier: Option>, + /// The [`VerifyingKey`][halo2_proofs::plonk::VerifyingKey] in its raw bytes form, as read from + /// disk. For the same reasons as the [Self::verifier] field, this too is optional. raw_vk: Option>, } impl<'params> Prover<'params> { + /// Construct a chunk prover given a map of degree to KZG setup params and a path to a + /// directory to find stored assets. pub fn from_params_and_assets( params_map: &'params BTreeMap>, assets_dir: &str, ) -> Self { + // Try to read the verifying key from disk, but don't panic if not found. + let path = PathBuf::from(assets_dir).join(CHUNK_VK_FILENAME.clone()); + let raw_vk = try_read(&path); + + // Build the inner prover. let prover_impl = common::Prover::from_params_map(params_map); - let raw_vk = try_to_read(assets_dir, &CHUNK_VK_FILENAME); + // Build an optional verifier if the verifying key has been located on disk. let verifier = if raw_vk.is_none() { log::warn!( - "zkevm-prover: {} doesn't exist in {}", + "ChunkProver setup without verifying key (dev mode): {} doesn't exist in {}", *CHUNK_VK_FILENAME, assets_dir ); @@ -38,115 +61,203 @@ impl<'params> Prover<'params> { assets_dir, )) }; + Self { prover_impl, - raw_vk, verifier, + raw_vk, } } + /// Returns the optional [`VerifyingKey`][halo2_proofs::plonk::VerifyingKey] in its raw form. pub fn get_vk(&self) -> Option> { self.prover_impl .raw_vk(LayerId::Layer2.id()) .or_else(|| self.raw_vk.clone()) } - /// Generate proof for a chunk. This method usually takes ~10minutes. - /// Meaning of each parameter: - /// output_dir: - /// If `output_dir` is not none, the dir will be used to save/load proof or intermediate results. - /// If proof or intermediate results can be loaded from `output_dir`, - /// then they will not be computed again. - /// If `output_dir` is not none, computed intermediate results and proof will be written - /// into this dir. - /// chunk_identifier: - /// used to distinguish different chunk files located in output_dir. - /// If it is not set, default value(first block number of this chuk) will be used. - /// id: - /// TODO(zzhang). clean this. I think it can only be None or Some(0)... - pub fn gen_chunk_proof( + /// Generate a proof for a chunk via the halo2-route, i.e. the inner SNARK is generated using the + /// halo2-based [`SuperCircuit`][zkevm_circuits::super_circuit::SuperCircuit]. + pub fn gen_halo2_chunk_proof( &mut self, chunk: ChunkProvingTask, - chunk_identifier: Option<&str>, + chunk_id: Option<&str>, inner_id: Option<&str>, output_dir: Option<&str>, - ) -> Result { + ) -> Result { + // Panic if the chunk is empty, i.e. no traces were found. assert!(!chunk.is_empty()); - let chunk_identifier = - chunk_identifier.map_or_else(|| chunk.identifier(), |name| name.to_string()); - - let chunk_proof = match output_dir - .and_then(|output_dir| ChunkProof::from_json_file(output_dir, &chunk_identifier).ok()) - { - Some(proof) => Ok(proof), - None => { - let witness_block = chunk_trace_to_witness_block(chunk.block_traces)?; - let row_usage = calculate_row_usage_of_witness_block(&witness_block)?; - log::info!("Got witness block"); - - let chunk_info = ChunkInfo::from_witness_block(&witness_block, false); - if let Some(chunk_info_input) = chunk.chunk_info.as_ref() { - compare_chunk_info( - &format!("gen_chunk_proof {chunk_identifier:?}"), - &chunk_info, - chunk_info_input, - )?; - } - let snark = self.prover_impl.load_or_gen_final_chunk_snark( - &chunk_identifier, - &witness_block, - inner_id, - output_dir, - )?; - - self.check_vk(); - - let result = ChunkProof::new( - snark, - self.prover_impl.pk(LayerId::Layer2.id()), - chunk_info, - chunk.chunk_kind, - row_usage, - ); - - if let (Some(output_dir), Ok(proof)) = (output_dir, &result) { - proof.dump(output_dir, &chunk_identifier)?; - } + // The chunk identifier is either the specified identifier or we calculate it on-the-fly. + let chunk_id = chunk_id.map_or_else(|| chunk.identifier(), |name| name.to_string()); - result + // Try to locate a cached chunk proof for the same identifier. + if let Some(dir) = output_dir.as_ref() { + if let Ok(chunk_proof) = ChunkProofV2::from_json(dir, &chunk_id) { + return Ok(chunk_proof); } - }?; + } + + // Generate the proof if proof was not found in cache. + // + // Construct the chunk as witness and check circuit capacity for the halo2-based super + // circuit. + let witness_block = chunk_trace_to_witness_block(chunk.block_traces)?; + let sub_circuit_row_usages = calculate_row_usage_of_witness_block(&witness_block)?; + let row_usage = RowUsage::from_row_usage_details(sub_circuit_row_usages.clone()); + + // If the circuit-capacity checker (ccc) overflows, early-return with appropriate + // error. + if !row_usage.is_ok { + return Err(ChunkProverError::CircuitCapacityOverflow(row_usage).into()); + } + + // Build the chunk information required by the inner circuit for SNARK generation. + let chunk_info_reconstructed = ChunkInfo::from_witness_block(&witness_block, false); + + // Sanity check: if chunk information was already provided, make sure it exactly + // matches the chunk information reconstructed from the block traces of the chunk. + if let Some(chunk_info_provided) = chunk.chunk_info.as_ref() { + compare_chunk_info( + &format!("gen_halo2_chunk_proof {chunk_id:?}"), + &chunk_info_reconstructed, + chunk_info_provided, + ) + .map_err(ChunkProverError::Custom)?; + } + + // Generate the final Layer-2 SNARK. + let snark = self + .prover_impl + .load_or_gen_final_chunk_snark(&chunk_id, &witness_block, inner_id, output_dir) + .map_err(|e| ChunkProverError::Custom(e.to_string()))?; + + // Sanity check on the verifying key used at Layer-2. + self.check_vk()?; + + // Construct the chunk proof. + let chunk_proof_metadata = ChunkProofV2Metadata::new( + &snark, + ChunkKind::Halo2, + chunk_info_reconstructed, + Some(row_usage), + )?; + let chunk_proof = ChunkProofV2::new( + snark, + self.prover_impl.pk(LayerId::Layer2.id()), + chunk_proof_metadata, + )?; + + // If the output directory was provided, write the proof to disk. + if let Some(output_dir) = output_dir { + chunk_proof.dump(output_dir, &chunk_id)?; + } + // If the verifier was set, i.e. production environments, we also do a sanity verification + // of the proof that was generated above. if let Some(verifier) = &self.verifier { - if !verifier.verify_chunk_proof(chunk_proof.clone()) { - anyhow::bail!("chunk prover cannot generate valid proof"); - } - log::info!("verify_chunk_proof done"); + verifier.verify_chunk_proof(&chunk_proof)?; } Ok(chunk_proof) } - /// Check vk generated is same with vk loaded from assets - fn check_vk(&self) { - if self.raw_vk.is_some() { - let gen_vk = self - .prover_impl - .raw_vk(LayerId::Layer2.id()) - .unwrap_or_default(); - if gen_vk.is_empty() { - log::warn!("no gen_vk found, skip check_vk"); - return; - } - let init_vk = self.raw_vk.clone().unwrap_or_default(); - if gen_vk != init_vk { - log::error!( - "zkevm-prover: generated VK is different with init one - gen_vk = {}, init_vk = {}", - base64::encode(gen_vk), - base64::encode(init_vk), - ); + /// Generates a chunk proof by compressing the provided SNARK. The generated proof uses the + /// [`CompressionCircuit`][aggregator::CompressionCircuit] to compress the supplied + /// [`SNARK`][snark_verifier_sdk::Snark] only once using thin-compression parameters. + /// + /// The [`ChunkProof`] represents the Layer-2 proof in Scroll's proving pipeline and the + /// generated SNARK can then be used as inputs to the [`BatchCircuit`][aggregator::BatchCircuit]. + /// + /// This method should be used iff the input SNARK was generated from a halo2-backend for Sp1. + /// In order to construct a chunk proof via the halo2-based + /// [`SuperCircuit`][zkevm_circuits::super_circuit::SuperCircuit], please use [`gen_chunk_proof`][Self::gen_chunk_proof]. + pub fn gen_sp1_chunk_proof( + &mut self, + inner_snark: Snark, + chunk: ChunkProvingTask, + chunk_id: Option<&str>, + output_dir: Option<&str>, + ) -> Result { + // Panic if the chunk is empty, i.e. no traces were found. + assert!(!chunk.is_empty()); + + // The chunk identifier is either the specified identifier or we calculate it on-the-fly. + let chunk_id = chunk_id.map_or_else(|| chunk.identifier(), |name| name.to_string()); + + // Generate a Layer-2 compression SNARK for the provided inner SNARK. + let snark = self + .prover_impl + .load_or_gen_comp_snark( + &chunk_id, + LayerId::Layer2.id(), + true, + LayerId::Layer2.degree(), + inner_snark, + output_dir, + ) + .map_err(|e| ChunkProverError::Custom(e.to_string()))?; + + // Sanity check on the verifying key used at Layer-2. + self.check_vk()?; + + // We reconstruct some metadata to be attached with the chunk proof. + let chunk_info = chunk.chunk_info.unwrap_or({ + let witness_block = chunk_trace_to_witness_block(chunk.block_traces)?; + ChunkInfo::from_witness_block(&witness_block, false) + }); + + // Construct a chunk proof. + // + // Note that the `row_usage` has been set to an empty vector, because in the sp1-route we + // don't have the notion of rows being allocated to sub-circuits, as in the case of the + // halo2-route. + let chunk_proof_metadata = + ChunkProofV2Metadata::new(&snark, ChunkKind::Sp1, chunk_info, None)?; + let chunk_proof = ChunkProofV2::new( + snark, + self.prover_impl.pk(LayerId::Layer2.id()), + chunk_proof_metadata, + )?; + + // If the output directory was provided, write the proof to disk. + if let Some(output_dir) = output_dir { + chunk_proof.dump(output_dir, &chunk_id)?; + } + + // If the verifier was set, i.e. production environments, we also do a sanity verification + // of the proof that was generated above. + if let Some(verifier) = &self.verifier { + verifier.verify_chunk_proof(&chunk_proof)?; + } + + Ok(chunk_proof) + } + + /// Sanity check for the [`VerifyinKey`][halo2_proofs::plonk::VerifyingKey] used to generate + /// Layer-2 SNARK that is wrapped inside the [`ChunkProof`]. The prover generated VK is + /// expected to match the VK used to initialise the prover. + fn check_vk(&self) -> Result<(), ChunkProverError> { + if let Some(expected_vk) = self.raw_vk.as_ref() { + let base64_exp_vk = base64::encode(expected_vk); + if let Some(generated_vk) = self.prover_impl.raw_vk(LayerId::Layer2.id()).as_ref() { + let base64_gen_vk = base64::encode(generated_vk); + if generated_vk.ne(expected_vk) { + log::error!( + "ChunkProver: VK mismatch! found={}, expected={}", + base64_gen_vk, + base64_exp_vk, + ); + return Err(ChunkProverError::VerifyingKeyMismatch( + base64_gen_vk, + base64_exp_vk, + )); + } + } else { + return Err(ChunkProverError::VerifyingKeyNotFound(base64_exp_vk)); } } + + Ok(()) } } diff --git a/prover/src/zkevm/verifier.rs b/prover/src/zkevm/verifier.rs index f382cd1199..d9e1a2f4f5 100644 --- a/prover/src/zkevm/verifier.rs +++ b/prover/src/zkevm/verifier.rs @@ -1,21 +1,24 @@ -use crate::{ - common, - config::{LAYER2_CONFIG_PATH, LAYER2_DEGREE}, - consts::chunk_vk_filename, - io::force_to_read, - ChunkProof, -}; +use std::{env, path::PathBuf}; + use aggregator::CompressionCircuit; use halo2_proofs::{ halo2curves::bn256::{Bn256, G1Affine}, plonk::VerifyingKey, poly::kzg::commitment::ParamsKZG, }; -use std::{collections::BTreeMap, env}; +use crate::{ + common, + config::{LAYER2_CONFIG_PATH, LAYER2_DEGREE}, + consts::chunk_vk_filename, + utils::force_read, + ChunkProofV2, ChunkProverError, ParamsMap, ProverError, +}; + +/// Verifier capable of verifying a [`ChunkProof`]. #[derive(Debug)] pub struct Verifier<'params> { - // Make it public for testing with inner functions (unnecessary for FFI). + /// Encapsulates the common verifier. pub inner: common::Verifier<'params, CompressionCircuit>, } @@ -26,22 +29,42 @@ impl<'params> From> for Verifier<' } impl<'params> Verifier<'params> { + /// Construct a new Verifier given the KZG parameters and a Verifying Key. pub fn new(params: &'params ParamsKZG, vk: VerifyingKey) -> Self { common::Verifier::new(params, vk).into() } - pub fn from_params_and_assets( - params_map: &'params BTreeMap>, - assets_dir: &str, - ) -> Self { - let raw_vk = force_to_read(assets_dir, &chunk_vk_filename()); + /// Construct a new Verifier given the path to an assets directory where the [`VerifyingKey`] + /// is stored on disk. This method accepts a map of degree to the KZG parameters for that + /// degree, and picks the appropriate parameters based on the degree of the + /// [`Layer-2`][crate::config::LayerId::Layer2] [`CompressionCircuit`]. + /// + /// Panics if the verifying key cannot be located in the given assets directory. + pub fn from_params_and_assets(params_map: &'params ParamsMap, assets_dir: &str) -> Self { + // Read the verifying key or panic. + let path = PathBuf::from(assets_dir).join(chunk_vk_filename()); + let raw_vk = force_read(&path); + + // The Layer-2 compression circuit is configured with the shape as per + // [`LAYER2_CONFIG_PATH`]. env::set_var("COMPRESSION_CONFIG", &*LAYER2_CONFIG_PATH); - let params = params_map.get(&*LAYER2_DEGREE).expect("should be loaded"); - let verifier = common::Verifier::from_params(params, &raw_vk); - verifier.into() + + let params = params_map + .get(&*LAYER2_DEGREE) + .unwrap_or_else(|| panic!("KZG params don't contain degree={:?}", LAYER2_DEGREE)); + + Self { + inner: common::Verifier::from_params(params, &raw_vk), + } } - pub fn verify_chunk_proof(&self, proof: ChunkProof) -> bool { - self.inner.verify_snark(proof.to_snark()) + /// Verify a chunk proof. Returns true if the verification is successful. + pub fn verify_chunk_proof(&self, proof: &ChunkProofV2) -> Result<(), ProverError> { + let snark = proof.try_into()?; + if self.inner.verify_snark(snark) { + Ok(()) + } else { + Err(ChunkProverError::Verification.into()) + } } } diff --git a/prover/test_data/evm-proof.json b/prover/test_data/evm-proof.json new file mode 100644 index 0000000000..1ef86b2551 --- /dev/null +++ b/prover/test_data/evm-proof.json @@ -0,0 +1 @@ +{"proof":{"proof":"H9nMdYzj5J/33rD6goSN9lyy4Jr666ENcw2pHAGn1FkEsFvhOT3NDdgsgfuiDoNWIadyYYUdmHONHTwH7ebzmB+lQSAvm0OFmwElTI/LjM6IIVdAb1RHPEabJdbr2iwbL/Anwh0qGMuhVu/KxZYddBTI49kkMylk3rDC2/ezd8whHpfxlQrBKQ4YbOuCe67ibS2HJuvPNbcrVc6rduvdmhzSwe+0+ibrpqKlZDCTRiIFMf9vvl/xDCX2MItcMTquEkOLzwDz/ZpQOzXeU4bKHQJ2o/uIuHPuxZGz1JRsbtMsC8WkxGkEdI9wYWCzb1RTAF04ysuEB2fz5Rs1AcKwNgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAIVha6PsHnYu2BMF65H1r5w3t8TDIQpkpd995gpNT+DlC6kAeR8PfuaYNInRCz+bZ+v+WudB+JkLAaSViDf6NzXIw1csn4q0bjhiOw2O1CkHlSP0fs6BYUXfadwSDiPJ3gexxH60j3mSuo4tq/dJKs0YDz65yYAkXiEjzzYoaCXzg91GCdfm371zZ7YX3tTT4f3Xf90sC+6GOSlDlycCUL/CaCjm576YHtlW9A9jMuvrTx9rvAjHvIdX9kvKj+P87cZ0jJB0tfogiUUYygyxerjdzf9qALmyEpYCtoaRzRhMg2POREZV3dSL6Ig5ABpn0ho3bwRzFKo3HtSKwWuI6amFY8H9ZAh4Y52euPMCfV7q/QuwgXppXuOOxs0Hdjyg5oSgt/476d8RhM0NYdcj4qQ/8jeUmuzJRJl+1B/AaIEEBoLFRvstt2BjTP2UqvGtSxmSt/d48ENpTXzUbZLN4/0K57FWnMJcMZ66soZtR08Wa7W1DNugZa2j0RzZftR25AWWKVZBxuCxCKt8vbXWG4+NdXK2MFpt0SO7lplFp7b5Qe7bpb4LSttaKFhpDGnAb1zbh+7/6IDnVV6uFGM5WdbFSyOIAPdI7bxRIN7quKNsZrRuGG4MGhjivu2b2XNrT0GEQ31QFiYIMWWQCfAD3RaTTgCVkEacNr0afmtJdDHkQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABGvjx4l2EnqDG51SVmUPBMGFSv/BlgRVBFvbnedZmmGIwKiZd+x176Npu9HfELgNk0Qd8FX+1zO4UhZxuLwB92A5Bx53+YGkeFiKN1Hcv1c/XNEL8nIDW+RrbjgJYNCAlJr3v84qH0QrdD/H6Y4EIIe7J1FBvcaNpjV2ahYpebq4sO1DUWcELW0jibLZGkuQmotGtgUkAYZ3tfv669SadwQx3dkZQV/iTP4ELf7DuEXeZy4zlq89uijd1xiklf0MVLrclJnwNwkvXrTjaYH8m6lA9UlAhTBFItuvseQek7qAcWbyt4pE6G/93cyYIsV6iWWQKDuosoKNKyE+NwsyAii8Llv2TuPdmRMSYTd8Y+0BCzjhqrmOOmEmu5COZem/sCwQH92CC2W2Oq2Jtvik9/bwvSMfdOd+sOfbWM/scRwQLxqk1yPYeGtPnB4wcTDV5xEX2TdpD2IVCEf863NnOhSZOwDvbZNgDvA3LJjCJ04C+O2VoGzkNdpTgMJ84E5lm","instances":"AAAAAAAAAAAAAAAAAAAAAAAAAAAATsr77L180+rltu8AAAAAAAAAAAAAAAAAAAAAAAAAAABefY5cxwF38vJYzgAAAAAAAAAAAAAAAAAAAAAAAAAAAAArCDECPfov4UFWAAAAAAAAAAAAAAAAAAAAAAAAAAAALenFkSCzypvd/0cAAAAAAAAAAAAAAAAAAAAAAAAAAABMjhObP1Tk4LaWwwAAAAAAAAAAAAAAAAAAAAAAAAAAAAAZxrJuGpKtbvScAAAAAAAAAAAAAAAAAAAAAAAAAAAA9zQeyDLK1z8vwKAAAAAAAAAAAAAAAAAAAAAAAAAAAABiemRbiOBgZTMKvAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAPHDamTZozsaVMAAAAAAAAAAAAAAAAAAAAAAAAAAAAUn2flHFfjCUj8UAAAAAAAAAAAAAAAAAAAAAAAAAAAABuIYooZldvq5UVqAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACQnf90hS3Tv9G91vMloktkEfTw520n9B8XWr92uzMdjzvot29jacqJcAAAAAAAAAAAAAAAAAAAAAIY+4Be1mT6hzdgY13HkZNAAAAAAAAAAAAAAAAAAAAAAOYRdBTeI/h1ABow+afQfsAAAAAAAAAAAAAAAAAAAAAKusra6vAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAkBSU3y4mikWSpekX65K5IAAAAAAAAAAAAAAAAAAAAAFwhiTka2U7nZgae8vzYraAAAAAAAAAAAAAAAAAAAAAAz7n6jeqFPuxKFRp/OqlKdgAAAAAAAAAAAAAAAAAAAAAkccTpus5BBCOxH2Be8HGTAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAIJ1AAAAAAAAAAAAAAAAAAAAAAsG6UZ0iMzttF8QL7yOb5WAAAAAAAAAAAAAAAAAAAAAAf8LIRd7c5Qinatn5XK//xAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAE=","vk":"AAAAGgAAAARX2S0K1wF333B1waOsnG/vcASJmWG9YM6SNWCBy1ywDypuNyCQrqoPJvIVXOq6I7k0FOLtC23/wUlpV5XZJVYRvnTQT732GsQk9SbLYKO0HXDQX74zc4W6+04/WbBby2RE5Xpxs7SS+QefbTL8QYR7MnzElOK8gyZFf346gyd1DEa15Vck1FpZYwhhPwHa8a9F3fkVWmKrWs8XIHYU4hNSiyVw+UEuc4FYFVc7I6aoactklYnAfIPu3D9rzH4+i1+gXJJEqDXgfWfK+9szR4ysEnkuzgFrCcTvIG1U2mAJJg==","git_version":"33191d8"},"num_instance":[25]} \ No newline at end of file diff --git a/prover/test_data/evm-verifier.bin b/prover/test_data/evm-verifier.bin new file mode 100644 index 0000000000..2e94c1a343 Binary files /dev/null and b/prover/test_data/evm-verifier.bin differ diff --git a/testool/src/statetest/executor.rs b/testool/src/statetest/executor.rs index 34dbbc4485..f32650b13c 100644 --- a/testool/src/statetest/executor.rs +++ b/testool/src/statetest/executor.rs @@ -637,15 +637,12 @@ pub fn run_test( #[cfg(feature = "inner-prove")] { eth_types::constants::set_env_coinbase(&st.env.current_coinbase); - prover::test::inner_prove(&test_id, &witness_block); + prover::inner_prove(&test_id, &witness_block); } #[cfg(feature = "chunk-prove")] { eth_types::constants::set_env_coinbase(&st.env.current_coinbase); - prover::test::chunk_prove( - &test_id, - prover::ChunkProvingTask::new(vec![_scroll_trace], prover::ChunkKind::Halo2), - ); + prover::chunk_prove(&test_id, prover::ChunkProvingTask::new(vec![_scroll_trace])); } #[cfg(not(any(feature = "inner-prove", feature = "chunk-prove")))]