diff --git a/backend/Cargo.toml b/backend/Cargo.toml index eb433dc1f..f158106bb 100644 --- a/backend/Cargo.toml +++ b/backend/Cargo.toml @@ -15,8 +15,14 @@ halo2 = [ "dep:snark-verifier", "dep:halo2_solidity_verifier", ] +estark-starky = ["dep:starky"] estark-polygon = ["dep:pil-stark-prover"] -plonky3 = ["dep:powdr-plonky3", "dep:p3-commit", "dep:p3-matrix", "dep:p3-uni-stark"] +plonky3 = [ + "dep:powdr-plonky3", + "dep:p3-commit", + "dep:p3-matrix", + "dep:p3-uni-stark", +] stwo = ["dep:stwo-prover"] # Enable AVX or Neon accordingly in backends that support them. @@ -35,25 +41,29 @@ powdr-backend-utils.workspace = true powdr-plonky3 = { path = "../plonky3", optional = true } -starky = { git = "https://github.com/0xEigenLabs/eigen-zkvm.git", rev = "cf405b2e2cecb8567cfd083a55936b71722276d5" } +starky = { git = "https://github.com/0xEigenLabs/eigen-zkvm.git", rev = "cf405b2e2cecb8567cfd083a55936b71722276d5", optional = true } pil-stark-prover = { git = "https://github.com/powdr-labs/pil-stark-prover.git", rev = "769b1153f3ae2d7cbab4c8acf33865ed13f8a823", optional = true } # TODO change this once Halo2 releases 0.3.1 #halo2_proofs = { git = "https://github.com/privacy-scaling-explorations/halo2", tag = "v0.3.0", features = ["circuit-params"] } -halo2_proofs = { git = "https://github.com/powdr-labs/halo2.git", rev = "fb8087565115ff38da4074b9d1777e9a97222caa", features = ["circuit-params"], optional = true } -halo2_curves = { version = "0.6.1", package = "halo2curves", optional = true} +halo2_proofs = { git = "https://github.com/powdr-labs/halo2.git", rev = "fb8087565115ff38da4074b9d1777e9a97222caa", features = [ + "circuit-params", +], optional = true } +halo2_curves = { version = "0.6.1", package = "halo2curves", optional = true } # TODO change this once Halo2 releases 0.3.1 and snark-verifier uses it #snark-verifier = { git = "https://github.com/privacy-scaling-explorations/snark-verifier", tag = "v2024_01_31" } snark-verifier = { git = "https://github.com/powdr-labs/snark-verifier.git", rev = "55012261fd4b0b8d21b581a9782d05258afe4104", optional = true } -halo2_solidity_verifier = { git = "https://github.com/powdr-labs/halo2-solidity-verifier.git", rev = "ecae7fd2f62178c18b5fe18011630aa71da3371f", features = ["evm"], optional = true } +halo2_solidity_verifier = { git = "https://github.com/powdr-labs/halo2-solidity-verifier.git", rev = "ecae7fd2f62178c18b5fe18011630aa71da3371f", features = [ + "evm", +], optional = true } p3-commit = { git = "https://github.com/plonky3/Plonky3.git", rev = "2192432ddf28e7359dd2c577447886463e6124f0", features = [ - "test-utils", + "test-utils", ], optional = true } -p3-matrix = { git = "https://github.com/plonky3/Plonky3.git", rev = "2192432ddf28e7359dd2c577447886463e6124f0", optional = true } -p3-uni-stark = { git = "https://github.com/plonky3/Plonky3.git", rev = "2192432ddf28e7359dd2c577447886463e6124f0", optional = true } +p3-matrix = { git = "https://github.com/plonky3/Plonky3.git", rev = "2192432ddf28e7359dd2c577447886463e6124f0", optional = true } +p3-uni-stark = { git = "https://github.com/plonky3/Plonky3.git", rev = "2192432ddf28e7359dd2c577447886463e6124f0", optional = true } # TODO: Change this to main branch when the `andrew/dev/update-toolchain` branch is merged,the main branch is using "nightly-2024-01-04", not compatiable with plonky3 -stwo-prover = { git= "https://github.com/starkware-libs/stwo.git",optional=true, rev="52d050c18b5dbc74af40214b3b441a6f60a20d41" } +stwo-prover = { git = "https://github.com/starkware-libs/stwo.git", optional = true, rev = "52d050c18b5dbc74af40214b3b441a6f60a20d41" } strum = { version = "0.24.1", features = ["derive"] } log = "0.4.17" diff --git a/backend/src/lib.rs b/backend/src/lib.rs index d7e8e7790..21a33fe36 100644 --- a/backend/src/lib.rs +++ b/backend/src/lib.rs @@ -1,14 +1,16 @@ #![deny(clippy::print_stdout)] +#[cfg(feature = "estark-starky")] mod estark; #[cfg(feature = "halo2")] mod halo2; #[cfg(feature = "plonky3")] mod plonky3; +#[cfg(feature = "stwo")] +mod stwo; mod composite; mod field_filter; -mod stwo; use powdr_ast::analyzed::Analyzed; use powdr_executor::{constant_evaluator::VariablySizedColumn, witgen::WitgenCallback}; @@ -36,12 +38,16 @@ pub enum BackendType { #[cfg(feature = "estark-polygon")] #[strum(serialize = "estark-polygon-composite")] EStarkPolygonComposite, + #[cfg(feature = "estark-starky")] #[strum(serialize = "estark-starky")] EStarkStarky, + #[cfg(feature = "estark-starky")] #[strum(serialize = "estark-starky-composite")] EStarkStarkyComposite, + #[cfg(feature = "estark-starky")] #[strum(serialize = "estark-dump")] EStarkDump, + #[cfg(feature = "estark-starky")] #[strum(serialize = "estark-dump-composite")] EStarkDumpComposite, #[cfg(feature = "plonky3")] @@ -78,11 +84,16 @@ impl BackendType { BackendType::EStarkPolygonComposite => Box::new( composite::CompositeBackendFactory::new(estark::polygon_wrapper::Factory), ), + #[cfg(feature = "estark-starky")] BackendType::EStarkStarky => Box::new(estark::starky_wrapper::Factory), + #[cfg(feature = "estark-starky")] BackendType::EStarkStarkyComposite => Box::new( composite::CompositeBackendFactory::new(estark::starky_wrapper::Factory), ), + // We need starky here because the dump backend uses some types that come from starky. + #[cfg(feature = "estark-starky")] BackendType::EStarkDump => Box::new(estark::DumpFactory), + #[cfg(feature = "estark-starky")] BackendType::EStarkDumpComposite => { Box::new(composite::CompositeBackendFactory::new(estark::DumpFactory)) } @@ -90,6 +101,14 @@ impl BackendType { BackendType::Plonky3 => Box::new(plonky3::Factory), #[cfg(feature = "stwo")] BackendType::Stwo => Box::new(stwo::StwoProverFactory), + #[cfg(not(any( + feature = "halo2", + feature = "estark-polygon", + feature = "estark-starky", + feature = "plonky3", + feature = "stwo" + )))] + _ => panic!("Empty backend."), } } } diff --git a/cli-rs/Cargo.toml b/cli-rs/Cargo.toml index cf5d8d760..a51599107 100644 --- a/cli-rs/Cargo.toml +++ b/cli-rs/Cargo.toml @@ -8,9 +8,10 @@ homepage = { workspace = true } repository = { workspace = true } [features] -default = [] +default = ["plonky3"] halo2 = ["powdr-pipeline/halo2"] plonky3 = ["powdr-pipeline/plonky3"] +estark-starky = ["powdr-pipeline/estark-starky"] estark-polygon = ["powdr-pipeline/estark-polygon", "powdr-riscv/estark-polygon"] [dependencies] diff --git a/cli/Cargo.toml b/cli/Cargo.toml index c3b94d487..4484ff865 100644 --- a/cli/Cargo.toml +++ b/cli/Cargo.toml @@ -9,14 +9,15 @@ repository = { workspace = true } default-run = "powdr" [features] -default = [] # halo2 is disabled by default +default = ["plonky3"] halo2 = ["powdr-backend/halo2", "powdr-pipeline/halo2"] plonky3 = ["powdr-backend/plonky3", "powdr-pipeline/plonky3"] +estark-starky = ["powdr-backend/estark-starky", "powdr-pipeline/estark-starky"] estark-polygon = [ "powdr-backend/estark-polygon", "powdr-pipeline/estark-polygon", ] -stwo=["powdr-backend/stwo","powdr-pipeline/stwo"] +stwo = ["powdr-backend/stwo", "powdr-pipeline/stwo"] simd = ["powdr-backend/simd", "powdr-pipeline/simd"] @@ -37,6 +38,8 @@ tracing-subscriber = { version = "0.3.17", features = ["std", "env-filter"] } tracing-forest = { version = "0.1.6", features = ["ansi", "smallvec"] } [dev-dependencies] +powdr-pipeline = { workspace = true, features = ["estark-starky"] } + tempfile = "3.6" test-log = "0.2.12" diff --git a/pipeline/Cargo.toml b/pipeline/Cargo.toml index 2136661c8..0aba7d6d3 100644 --- a/pipeline/Cargo.toml +++ b/pipeline/Cargo.toml @@ -8,9 +8,10 @@ homepage = { workspace = true } repository = { workspace = true } [features] -default = [] # halo2 is disabled by default +default = [] halo2 = ["powdr-backend/halo2"] plonky3 = ["powdr-backend/plonky3"] +estark-starky = ["powdr-backend/estark-starky"] estark-polygon = ["powdr-backend/estark-polygon"] stwo = ["powdr-backend/stwo"] diff --git a/pipeline/src/test_util.rs b/pipeline/src/test_util.rs index c91005941..0236b00dc 100644 --- a/pipeline/src/test_util.rs +++ b/pipeline/src/test_util.rs @@ -5,13 +5,17 @@ use powdr_number::{ KoalaBearField, }; use powdr_pil_analyzer::evaluator::{self, SymbolLookup}; +use std::env; use std::path::PathBuf; -use std::{env, fs}; use std::sync::Arc; use crate::pipeline::Pipeline; + +#[cfg(feature = "estark-starky")] use crate::verify::verify; +#[cfg(feature = "estark-starky")] +use std::fs; pub fn resolve_test_file(file_name: &str) -> PathBuf { PathBuf::from(format!("../test_data/{file_name}")) @@ -102,6 +106,15 @@ pub fn asm_string_to_pil(contents: &str) -> Arc> { .unwrap() } +#[cfg(not(feature = "estark-starky"))] +pub fn run_pilcom_with_backend_variant( + _pipeline: Pipeline, + _backend_variant: BackendVariant, +) -> Result<(), String> { + Ok(()) +} + +#[cfg(feature = "estark-starky")] pub fn run_pilcom_with_backend_variant( pipeline: Pipeline, backend_variant: BackendVariant, @@ -153,6 +166,14 @@ pub fn gen_estark_proof(pipeline: Pipeline) { } } +#[cfg(not(feature = "estark-starky"))] +pub fn gen_estark_proof_with_backend_variant( + _pipeline: Pipeline, + _backend_variant: BackendVariant, +) { +} + +#[cfg(feature = "estark-starky")] pub fn gen_estark_proof_with_backend_variant( pipeline: Pipeline, backend_variant: BackendVariant, @@ -438,6 +459,14 @@ pub fn assert_proofs_fail_for_invalid_witnesses_pilcom( assert!(run_pilcom_with_backend_variant(pipeline, BackendVariant::Composite).is_err()); } +#[cfg(not(feature = "estark-starky"))] +pub fn assert_proofs_fail_for_invalid_witnesses_estark( + _file_name: &str, + _witness: &[(String, Vec)], +) { +} + +#[cfg(feature = "estark-starky")] pub fn assert_proofs_fail_for_invalid_witnesses_estark( file_name: &str, witness: &[(String, Vec)], diff --git a/pipeline/tests/asm.rs b/pipeline/tests/asm.rs index 6590682f5..d963bace9 100644 --- a/pipeline/tests/asm.rs +++ b/pipeline/tests/asm.rs @@ -334,6 +334,7 @@ fn pil_at_module_level() { regular_test(f, Default::default()); } +#[cfg(feature = "estark-starky")] #[test] fn read_poly_files() { let asm_files = ["asm/vm_to_block_unique_interface.asm", "asm/empty.asm"]; diff --git a/pipeline/tests/powdr_std.rs b/pipeline/tests/powdr_std.rs index b9c7b8125..45ca57f2f 100644 --- a/pipeline/tests/powdr_std.rs +++ b/pipeline/tests/powdr_std.rs @@ -105,6 +105,7 @@ fn arith_large_test() { // Running gen_estark_proof(f, Default::default()) // is too slow for the PR tests. This will only create a single // eStark proof instead of 3. + #[cfg(feature = "estark-starky")] pipeline .with_backend(powdr_backend::BackendType::EStarkStarky, None) .compute_proof() diff --git a/plonky3/src/prover.rs b/plonky3/src/prover.rs index a143d3e3c..85eef3a34 100644 --- a/plonky3/src/prover.rs +++ b/plonky3/src/prover.rs @@ -77,10 +77,14 @@ where .sum() } - /// Commit to the quotient polynomial across all tables. + /// Computes the quotient polynomials for each table and commits to them. /// - /// Returns a single commitment and the prover data. - fn commit_to_quotient( + /// # Side effects + /// Samples a random challenge and observes the commitment. + /// + /// # Returns + /// The commitment and the prover data. + fn compute_and_commit_to_quotient( &self, state: &mut ProverState<'a, T>, proving_key: Option<&StarkProvingKey>, @@ -92,8 +96,8 @@ where .tables .iter() .enumerate() - .flat_map(|(index, (name, i))| { - i.quotient_domains_and_chunks( + .flat_map(|(index, (name, table))| { + table.quotient_domains_and_chunks( index, state, proving_key @@ -461,15 +465,16 @@ where multi_table.observe_instances(challenger); let mut state = ProverState::new(&multi_table, pcs, challenger); - let mut stage = Stage { + + // run the first stage + state = state.run_stage(Stage { id: 0, air_stages: stage_0, - }; + }); assert!(stage_count >= 1); // generate all stages starting from the second one based on the witgen callback for stage_id in 1..stage_count { - state = state.run_stage(stage); // get the challenges drawn at the end of the previous stage let local_challenges = &state.processed_stages.last().unwrap().challenge_values; let CallbackResult { air_stages } = @@ -478,15 +483,12 @@ where assert_eq!(air_stages.len(), multi_table.table_count()); // go to the next stage - stage = Stage { + state = state.run_stage(Stage { id: stage_id, air_stages, - }; + }); } - // run the last stage - state = state.run_stage(stage); - // sanity check that the last stage did not create any challenges assert!(state .processed_stages @@ -497,7 +499,8 @@ where // sanity check that we processed as many stages as expected assert_eq!(state.processed_stages.len() as u8, stage_count); - let (quotient_commit, quotient_data) = multi_table.commit_to_quotient(&mut state, proving_key); + let (quotient_commit, quotient_data) = + multi_table.compute_and_commit_to_quotient(&mut state, proving_key); let commitments = Commitments { traces_by_stage: state diff --git a/plonky3/src/symbolic_builder.rs b/plonky3/src/symbolic_builder.rs index a19716269..e8f0ab01f 100644 --- a/plonky3/src/symbolic_builder.rs +++ b/plonky3/src/symbolic_builder.rs @@ -12,6 +12,8 @@ use p3_uni_stark::Entry; use p3_uni_stark::SymbolicExpression; use p3_uni_stark::SymbolicVariable; +// TODO: This function iterates over all constraints, we should instead store the +// max degree in `ConstraintSystem`. #[instrument(name = "infer log of constraint degree", skip_all)] pub fn get_log_quotient_degree(air: &A, public_values_counts: &[usize]) -> usize where diff --git a/plonky3/src/verifier.rs b/plonky3/src/verifier.rs index b7ac6d608..b54a626eb 100644 --- a/plonky3/src/verifier.rs +++ b/plonky3/src/verifier.rs @@ -133,6 +133,8 @@ where let pcs = config.pcs(); + // TODO: Instead of hashing each commit separately, we could hash a summary of all the commitments, + // like a hash that is precomputed at setup phase. for table in tables.values() { if let Some(preprocessed_commit) = table.preprocessed_commit() { challenger.observe(preprocessed_commit.clone()); @@ -211,7 +213,7 @@ where move |StageOpenedValues { local, next }| { ( // choose the correct preprocessed commitment based on the degree in the proof - // this could be optimized by putting the preproccessed commitments in a merkle tree + // this could be optimized by putting the preprocessed commitments in a merkle tree // and have the prover prove that it used commitments matching the lengths of the traces // this way the verifier does not need to have all the preprocessed commitments for all sizes table.preprocessed_commit().expect("a preprocessed commitment was expected because a preprocessed opening was found").clone(),