From bdd755e635a0d89e9ca6ad2c26d87552514a8bc0 Mon Sep 17 00:00:00 2001 From: anupsv Date: Thu, 30 Jan 2025 12:43:01 -0800 Subject: [PATCH] Splitting to Multiple files and Optimizing (#43) * major changes * moving to non struct functions * moving verification functions to separate file, cleanup, optimize * more optimizations * bench code changes * cleanup * cargo fmt * moving to workspaces, splitting tests, other things * cargo fmt * clippy fixes * some version updates * fixing tests * moving to prover and verifier crates * removing duplicate functions * moving process_chunks * unused deps * fixing crate name * fixing package names --------- Co-authored-by: anupsv <6407789+anupsv@users.noreply.github.com> --- .github/workflows/rust.yml | 9 +- Cargo.toml | 113 +- primitives/Cargo.toml | 32 + primitives/README.md | 9 + {src => primitives/src}/arith.rs | 0 {src => primitives/src}/blob.rs | 0 {src => primitives/src}/consts.rs | 15 +- {src => primitives/src}/errors.rs | 0 {src => primitives/src}/helpers.rs | 319 ++++- primitives/src/lib.rs | 7 + {src => primitives/src}/polynomial.rs | 3 +- {src => primitives/src}/traits.rs | 0 {tests => primitives/tests}/blob_test.rs | 3 +- {tests => primitives/tests}/helpers_test.rs | 176 ++- .../tests}/polynomial_test.rs | 2 +- .../tests}/test-files/blobs-from-fr.txt | 0 .../tests}/test-files/blobs.txt | Bin prover/Cargo.toml | 58 + README.md => prover/README.md | 0 {benches => prover/benches}/bench_g1_ifft.rs | 13 +- .../benches}/bench_kzg_commit.rs | 16 +- .../benches}/bench_kzg_commit_large_blobs.rs | 10 +- .../benches}/bench_kzg_proof.rs | 16 +- .../benches}/bench_kzg_setup.rs | 12 +- .../kzg_commitment_diagram.png | Bin prover/src/kzg.rs | 314 +++++ {src => prover/src}/lib.rs | 18 +- prover/src/srs.rs | 241 ++++ prover/tests/kzg_test.rs | 75 ++ {tests => prover/tests}/test-files/g1.point | Bin .../tests}/test-files/kzg.proof.eq.input | 0 .../tests}/test-files/lagrangeG1SRS.txt | 0 .../test-files/mainnet-data/g1.131072.point | Bin .../test-files/mainnet-data/g1.32mb.point | Bin .../test-files/mainnet-data/g2.point.powerOf2 | Bin .../tests}/test-files/srs.g1.points.string | 0 src/kzg.rs | 1133 ----------------- verifier/Cargo.toml | 26 + verifier/README.md | 9 + .../benches}/bench_kzg_verify.rs | 29 +- verifier/src/batch.rs | 272 ++++ verifier/src/lib.rs | 2 + verifier/src/verify.rs | 85 ++ tests/kzg_test.rs => verifier/tests/tests.rs | 380 ++---- 44 files changed, 1727 insertions(+), 1670 deletions(-) create mode 100644 primitives/Cargo.toml create mode 100644 primitives/README.md rename {src => primitives/src}/arith.rs (100%) rename {src => primitives/src}/blob.rs (100%) rename {src => primitives/src}/consts.rs (87%) rename {src => primitives/src}/errors.rs (100%) rename {src => primitives/src}/helpers.rs (58%) create mode 100644 primitives/src/lib.rs rename {src => primitives/src}/polynomial.rs (99%) rename {src => primitives/src}/traits.rs (100%) rename {tests => primitives/tests}/blob_test.rs (99%) rename {tests => primitives/tests}/helpers_test.rs (74%) rename {tests => primitives/tests}/polynomial_test.rs (99%) rename {tests => primitives/tests}/test-files/blobs-from-fr.txt (100%) rename {tests => primitives/tests}/test-files/blobs.txt (100%) create mode 100644 prover/Cargo.toml rename README.md => prover/README.md (100%) rename {benches => prover/benches}/bench_g1_ifft.rs (75%) rename {benches => prover/benches}/bench_kzg_commit.rs (80%) rename {benches => prover/benches}/bench_kzg_commit_large_blobs.rs (83%) rename {benches => prover/benches}/bench_kzg_proof.rs (88%) rename {benches => prover/benches}/bench_kzg_setup.rs (72%) rename kzg_commitment_diagram.png => prover/kzg_commitment_diagram.png (100%) create mode 100644 prover/src/kzg.rs rename {src => prover/src}/lib.rs (91%) create mode 100644 prover/src/srs.rs create mode 100644 prover/tests/kzg_test.rs rename {tests => prover/tests}/test-files/g1.point (100%) rename {tests => prover/tests}/test-files/kzg.proof.eq.input (100%) rename {tests => prover/tests}/test-files/lagrangeG1SRS.txt (100%) rename {tests => prover/tests}/test-files/mainnet-data/g1.131072.point (100%) rename {tests => prover/tests}/test-files/mainnet-data/g1.32mb.point (100%) rename {tests => prover/tests}/test-files/mainnet-data/g2.point.powerOf2 (100%) rename {tests => prover/tests}/test-files/srs.g1.points.string (100%) delete mode 100644 src/kzg.rs create mode 100644 verifier/Cargo.toml create mode 100644 verifier/README.md rename {benches => verifier/benches}/bench_kzg_verify.rs (77%) create mode 100644 verifier/src/batch.rs create mode 100644 verifier/src/lib.rs create mode 100644 verifier/src/verify.rs rename tests/kzg_test.rs => verifier/tests/tests.rs (51%) diff --git a/.github/workflows/rust.yml b/.github/workflows/rust.yml index b9d69b1..5bb0da8 100644 --- a/.github/workflows/rust.yml +++ b/.github/workflows/rust.yml @@ -15,15 +15,14 @@ jobs: runs-on: ubuntu-latest timeout-minutes: 30 steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 - name: Build run: cargo build --verbose + - name: Fmt check + run: cargo fmt --all -- --check - name: Clippy Format test run: cargo clippy --all --manifest-path Cargo.toml -- -D warnings - name: Run tests run: cargo test --verbose - - name: Run tests with mainnet data - run: KZG_ENV=mainnet-data cargo test --verbose - - name: Fmt check - run: cargo fmt --all -- --check + diff --git a/Cargo.toml b/Cargo.toml index b922614..2d4cbcd 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,91 +1,20 @@ -[package] -name = "rust-kzg-bn254" -version = "0.2.1" +[workspace] +members = [ + "verifier", + "prover", + "primitives", +] +resolver = "2" + +[workspace.dependencies] +thiserror = "2.0.11" + +[workspace.package] edition = "2021" -authors = ["Anup Swamy Veena", "Teddy Knox"] rust-version = "1.81" -description = "This library offers a set of functions for generating and interacting with bn254 KZG commitments and proofs in rust, with the motivation of supporting fraud and validity proof logic in EigenDA rollup integrations." -readme = "README.md" repository = "https://github.com/Layr-Labs/rust-kzg-bn254" +homepage = "" license-file = "LICENSE" -exclude = ["tests/*", "benches/*"] -# TODO: is this needed for the image to show up in the rust docs? -include = ["./kzg_commitment_diagram.png"] - -[dependencies] -ark-bn254 = "0.5.0" -ark-ec = { version = "0.5.0", features = ["parallel"] } -ark-ff = { version = "0.5.0", features = ["parallel"] } -ark-serialize = "0.5.0" -ark-std = { version = "0.5.0", features = ["parallel"] } -directories = "5.0.1" -hex-literal = "0.4.1" -rand = "0.8.5" -sha2 = "0.10.8" -ureq = "2.12.1" -num-bigint = "0.4" -rayon = "1.10" -num-traits = "0.2" -byteorder = "1.5" -ark-poly = { version = "0.5.0", features = ["parallel"] } -crossbeam-channel = "0.5" -num_cpus = "1.16.0" -sys-info = "0.9" -itertools = "0.13.0" -thiserror = "2.0.10" - -[dev-dependencies] -criterion = "0.5" -lazy_static = "1.5" -tracing = { version = "0.1.41", features = ["log"] } -tracing-subscriber = "0.3.19" - -[[test]] -name = "kzg" -path = "tests/kzg_test.rs" - -[[test]] -name = "blob" -path = "tests/blob_test.rs" - -[[test]] -name = "polynomial" -path = "tests/polynomial_test.rs" - -[[test]] -name = "helpers" -path = "tests/helpers_test.rs" - -[[bench]] -name = "bench_g1_ifft" -harness = false -path = "benches/bench_g1_ifft.rs" - - -[[bench]] -name = "bench_kzg_setup" -harness = false -path = "benches/bench_kzg_setup.rs" - -[[bench]] -name = "bench_kzg_commit" -harness = false -path = "benches/bench_kzg_commit.rs" - -[[bench]] -name = "bench_kzg_commit_large_blobs" -harness = false -path = "benches/bench_kzg_commit_large_blobs.rs" - -[[bench]] -name = "bench_kzg_proof" -harness = false -path = "benches/bench_kzg_proof.rs" - -[[bench]] -name = "bench_kzg_verify" -harness = false -path = "benches/bench_kzg_verify.rs" [profile.bench] opt-level = 3 @@ -94,7 +23,7 @@ strip = "none" debug-assertions = false overflow-checks = false lto = false -panic = 'unwind' +panic = 'abort' incremental = false codegen-units = 16 rpath = false @@ -106,7 +35,21 @@ strip = "none" debug-assertions = false overflow-checks = false lto = false +panic = 'abort' +incremental = false +codegen-units = 16 +rpath = false + +[profile.release] +opt-level = 3 +debug = false +strip = "none" +debug-assertions = false +overflow-checks = false +lto = true panic = 'unwind' incremental = false codegen-units = 16 rpath = false + + diff --git a/primitives/Cargo.toml b/primitives/Cargo.toml new file mode 100644 index 0000000..495bfcc --- /dev/null +++ b/primitives/Cargo.toml @@ -0,0 +1,32 @@ +[package] +name = "rust-kzg-bn254-primitives" +version = "0.1.0" +edition.workspace = true +repository.workspace = true +license-file.workspace = true + +[dependencies] +ark-bn254 = "0.5.0" +ark-ec = { version = "0.5.0", features = ["parallel"] } +ark-ff = { version = "0.5.0", features = ["parallel"] } +ark-serialize = "0.5.0" +ark-std = { version = "0.5.0", features = ["parallel"] } +ark-poly = { version = "0.5.0", features = ["parallel"] } +sha2 = "0.10.8" +num-traits = "0.2" +thiserror = "2.0.11" + +[dev-dependencies] +rand = "0.8.5" + +[[test]] +name = "blob" +path = "tests/blob_test.rs" + +[[test]] +name = "polynomial" +path = "tests/polynomial_test.rs" + +[[test]] +name = "helpers" +path = "tests/helpers_test.rs" diff --git a/primitives/README.md b/primitives/README.md new file mode 100644 index 0000000..973832d --- /dev/null +++ b/primitives/README.md @@ -0,0 +1,9 @@ +# rust-kzg-bn254-primitives + +[![Crate](https://img.shields.io/crates/v/rust-kzg-bn254.svg)](https://crates.io/crates/rust-kzg-bn254-primitives) + +This library offers primitive set of structures and functions for generating and interacting with bn254 KZG commitments and proofs in rust. + +## Warning & Disclaimer + +This code is unaudited and under construction. This is experimental software and is provided on an "as is" and "as available" basis and may not work at all. It should not be used in production. \ No newline at end of file diff --git a/src/arith.rs b/primitives/src/arith.rs similarity index 100% rename from src/arith.rs rename to primitives/src/arith.rs diff --git a/src/blob.rs b/primitives/src/blob.rs similarity index 100% rename from src/blob.rs rename to primitives/src/blob.rs diff --git a/src/consts.rs b/primitives/src/consts.rs similarity index 87% rename from src/consts.rs rename to primitives/src/consts.rs index 227d346..e9d64e6 100644 --- a/src/consts.rs +++ b/primitives/src/consts.rs @@ -51,18 +51,7 @@ pub const PRIMITIVE_ROOTS_OF_UNITY: [Fr; 29] = [ MontFp!("19103219067921713944291392827692070036145651957329286315305642004821462161904"), ]; -pub const G2_TAU_FOR_TEST_SRS_3000: G2Affine = G2Affine::new_unchecked( - Fq2::new( - MontFp!("7912312892787135728292535536655271843828059318189722219035249994421084560563"), - MontFp!("21039730876973405969844107393779063362038454413254731404052240341412356318284"), - ), - Fq2::new( - MontFp!("18697407556011630376420900106252341752488547575648825575049647403852275261247"), - MontFp!("7586489485579523767759120334904353546627445333297951253230866312564920951171"), - ), -); - -pub const G2_TAU_FOR_MAINNET_SRS: G2Affine = G2Affine::new_unchecked( +pub const G2_TAU: G2Affine = G2Affine::new_unchecked( Fq2::new( MontFp!("19394299006376106554626551996044114846855237028623244664226757033024550999552"), MontFp!("10478571113809844268398751534081669357808742555529167819607714577862447855483"), @@ -73,5 +62,5 @@ pub const G2_TAU_FOR_MAINNET_SRS: G2Affine = G2Affine::new_unchecked( ), ); -// This is the G2 Tau for the MAINNET SRS points. +// This is the G2 Tau for the EigenDA MAINNET SRS points. pub const MAINNET_SRS_G1_SIZE: usize = 131072; diff --git a/src/errors.rs b/primitives/src/errors.rs similarity index 100% rename from src/errors.rs rename to primitives/src/errors.rs diff --git a/src/helpers.rs b/primitives/src/helpers.rs similarity index 58% rename from src/helpers.rs rename to primitives/src/helpers.rs index 9e58fd3..9aa6279 100644 --- a/src/helpers.rs +++ b/primitives/src/helpers.rs @@ -1,15 +1,21 @@ -use ark_bn254::{Fq, Fq2, Fr, G1Affine, G1Projective, G2Projective}; -use ark_ec::{AffineRepr, CurveGroup, VariableBaseMSM}; +use ark_bn254::{Bn254, Fq, Fq2, Fr, G1Affine, G1Projective, G2Affine, G2Projective}; +use ark_ec::{pairing::Pairing, AffineRepr, CurveGroup, VariableBaseMSM}; use ark_ff::{sbb, BigInt, BigInteger, Field, PrimeField}; +use ark_serialize::CanonicalSerialize; use ark_std::{str::FromStr, vec::Vec, One, Zero}; -use crossbeam_channel::Receiver; +use num_traits::ToPrimitive; +use sha2::{Digest, Sha256}; use std::cmp; use crate::{ arith, - consts::{BYTES_PER_FIELD_ELEMENT, PRIMITIVE_ROOTS_OF_UNITY, SIZE_OF_G1_AFFINE_COMPRESSED}, + blob::Blob, + consts::{ + BYTES_PER_FIELD_ELEMENT, FIAT_SHAMIR_PROTOCOL_DOMAIN, MAINNET_SRS_G1_SIZE, + PRIMITIVE_ROOTS_OF_UNITY, SIZE_OF_G1_AFFINE_COMPRESSED, + }, errors::KzgError, - traits::ReadPointFromBytes, + polynomial::PolynomialEvalForm, }; use ark_ec::AdditiveGroup; @@ -89,7 +95,7 @@ pub fn convert_by_padding_empty_byte(data: &[u8]) -> Vec { /// ``` /// /// ``` -/// # use rust_kzg_bn254::helpers::remove_empty_byte_from_padded_bytes_unchecked; +/// # use rust_kzg_bn254_primitives::helpers::remove_empty_byte_from_padded_bytes_unchecked; /// let mut input = vec![1u8; 70]; // Two complete 32-byte element plus 6 bytes /// input[0] = 0; input[32] = 0; /// @@ -166,16 +172,11 @@ pub fn to_fr_array(data: &[u8]) -> Vec { /// /// # Example /// ``` -/// use rust_kzg_bn254::kzg::KZG; -/// use rust_kzg_bn254::blob::Blob; -/// -/// let mut kzg = KZG::setup( -/// "tests/test-files/mainnet-data/g1.131072.point", -/// 268435456, -/// 131072, -/// ).unwrap(); +/// use rust_kzg_bn254_primitives::blob::Blob; +/// use rust_kzg_bn254_primitives::helpers; + /// let input = Blob::from_raw_data(b"random data for blob"); -/// kzg.calculate_and_store_roots_of_unity(input.len().try_into().unwrap()).unwrap(); +/// helpers::calculate_roots_of_unity(input.len().try_into().unwrap()).unwrap(); /// ``` pub fn to_byte_array(data_fr: &[Fr], max_output_size: usize) -> Vec { // Calculate the number of field elements in input @@ -314,25 +315,6 @@ pub fn read_g1_point_from_bytes_be(g1_bytes_be: &[u8]) -> Result Ok(point) } -pub fn process_chunks(receiver: Receiver<(Vec, usize, bool)>) -> Vec<(T, usize)> -where - T: ReadPointFromBytes, -{ - // TODO: should we use rayon to process this in parallel? - receiver - .iter() - .map(|(chunk, position, is_native)| { - let point: T = if is_native { - T::read_point_from_bytes_native_compressed(&chunk) - .expect("Failed to read point from bytes") - } else { - T::read_point_from_bytes_be(&chunk).expect("Failed to read point from bytes") - }; - (point, position) - }) - .collect() -} - fn get_b_twist_curve_coeff() -> Fq2 { let twist_c0 = Fq::from(9); let twist_c1 = Fq::from(1); @@ -457,7 +439,7 @@ pub fn g1_lincomb(points: &[G1Affine], scalars: &[Fr]) -> Result Result { .ok_or_else(|| KzgError::GenericError("power must be <= 28".to_string())) .copied() } + +/// Maps a byte slice to a field element (`Fr`) using SHA-256 from SHA3 family as the +/// hash function. +/// +/// # Arguments +/// +/// * `msg` - The input byte slice to hash. +/// +/// # Returns +/// +/// * `Fr` - The resulting field element. +pub fn hash_to_field_element(msg: &[u8]) -> Fr { + // Perform the hash operation. + let msg_digest = Sha256::digest(msg); + let hash_elements = msg_digest.as_slice(); + + let fr_element: Fr = Fr::from_be_bytes_mod_order(hash_elements); + + fr_element +} + +pub fn pairings_verify(a1: G1Affine, a2: G2Affine, b1: G1Affine, b2: G2Affine) -> bool { + let neg_b1 = -b1; + let p = [a1, neg_b1]; + let q = [a2, b2]; + let result = Bn254::multi_pairing(p, q); + result.is_zero() +} + +/// Computes the Fiat-Shamir challenge from a blob and its commitment. +/// +/// # Arguments +/// +/// * `blob` - A reference to the `Blob` struct. +/// * `commitment` - A reference to the `G1Affine` commitment. +/// +/// # Returns +/// +/// * `Ok(Fr)` - The resulting field element challenge. +/// * `Err(KzgError)` - If any step fails. +pub fn compute_challenge(blob: &Blob, commitment: &G1Affine) -> Result { + // Convert the blob to a polynomial in evaluation form + // This is needed to process the blob data for the challenge + let blob_poly = blob.to_polynomial_eval_form(); + + // Calculate total size needed for the challenge input buffer: + // - Length of domain separator + // - 8 bytes for number of field elements + // - Size of blob data (number of field elements * bytes per element) + // - Size of compressed G1 point (commitment) + let challenge_input_size = FIAT_SHAMIR_PROTOCOL_DOMAIN.len() + + 8 + + (blob_poly.len() * BYTES_PER_FIELD_ELEMENT) + + SIZE_OF_G1_AFFINE_COMPRESSED; + + // Initialize buffer to store all data that will be hashed + let mut digest_bytes = vec![0; challenge_input_size]; + let mut offset = 0; + + // Step 1: Copy the Fiat-Shamir domain separator + // This provides domain separation for the hash function to prevent + // attacks that try to confuse different protocol messages + digest_bytes[offset..offset + FIAT_SHAMIR_PROTOCOL_DOMAIN.len()] + .copy_from_slice(FIAT_SHAMIR_PROTOCOL_DOMAIN); + offset += FIAT_SHAMIR_PROTOCOL_DOMAIN.len(); + + // Step 2: Copy the number of field elements (blob polynomial length) + // Convert to bytes using the configured endianness + let number_of_field_elements = blob_poly.len().to_be_bytes(); + digest_bytes[offset..offset + 8].copy_from_slice(&number_of_field_elements); + offset += 8; + + // Step 3: Copy the blob data + // Convert polynomial to bytes using helper function + let blob_data = to_byte_array( + blob_poly.evaluations(), + blob_poly.len() * BYTES_PER_FIELD_ELEMENT, + ); + digest_bytes[offset..offset + blob_data.len()].copy_from_slice(&blob_data); + offset += blob_data.len(); + + // Step 4: Copy the commitment (compressed G1 point) + // Serialize the commitment point in compressed form + let mut commitment_bytes = Vec::with_capacity(SIZE_OF_G1_AFFINE_COMPRESSED); + commitment + .serialize_compressed(&mut commitment_bytes) + .map_err(|_| KzgError::SerializationError("Failed to serialize commitment".to_string()))?; + digest_bytes[offset..offset + SIZE_OF_G1_AFFINE_COMPRESSED].copy_from_slice(&commitment_bytes); + + // Verify that we wrote exactly the amount of bytes we expected + // This helps catch any buffer overflow/underflow bugs + if offset + SIZE_OF_G1_AFFINE_COMPRESSED != challenge_input_size { + return Err(KzgError::InvalidInputLength); + } + + // Hash all the data to generate the challenge field element + // This implements the Fiat-Shamir transform to generate a "random" challenge + Ok(hash_to_field_element(&digest_bytes)) +} + +/// Ref: https://github.com/ethereum/consensus-specs/blob/master/specs/deneb/polynomial-commitments.md#evaluate_polynomial_in_evaluation_form +pub fn evaluate_polynomial_in_evaluation_form( + polynomial: &PolynomialEvalForm, + z: &Fr, +) -> Result { + let blob_size = polynomial.len_underlying_blob_bytes(); + + // Step 2: Calculate roots of unity for the given blob size and SRS order + let roots_of_unity = calculate_roots_of_unity(blob_size as u64)?; + + // Step 3: Ensure the polynomial length matches the domain length + if polynomial.len() != roots_of_unity.len() { + return Err(KzgError::InvalidInputLength); + } + + let width = polynomial.len(); + + // Step 4: Compute inverse_width = 1 / width + let inverse_width = Fr::from(width as u64) + .inverse() + .ok_or(KzgError::InvalidDenominator)?; + + // Step 5: Check if `z` is in the domain + if let Some(index) = roots_of_unity.iter().position(|&domain_i| domain_i == *z) { + return polynomial + .get_evalualtion(index) + .cloned() + .ok_or(KzgError::GenericError( + "Polynomial element missing at the found index.".to_string(), + )); + } + + // Step 6: Use the barycentric formula to compute the evaluation + let sum = polynomial + .evaluations() + .iter() + .zip(roots_of_unity.iter()) + .map(|(f_i, &domain_i)| { + let a = *f_i * domain_i; + let b = *z - domain_i; + // Since `z` is not in the domain, `b` should never be zero + a / b + }) + .fold(Fr::zero(), |acc, val| acc + val); + + // Step 7: Compute r = z^width - 1 + let r = z.pow([width as u64]) - Fr::one(); + + // Step 8: Compute f(z) = (z^width - 1) / width * sum + let f_z = sum * r * inverse_width; + + Ok(f_z) +} + +/// Calculates the roots of unities but doesn't assign it to the struct +/// Used in batch verification process as the roots need to be calculated for each blob +/// because of different length. +/// +/// # Arguments +/// * `length_of_data_after_padding` - Length of the blob data after padding in bytes. +/// +/// # Returns +/// * `Result<(Params, Vec), KzgError>` - Tuple containing: +/// - Params: KZG library operational parameters +/// - Vec: Vector of roots of unity +/// +/// # Details +/// - Generates roots of unity needed for FFT operations +/// - Calculates KZG operational parameters for commitment scheme +/// ``` +pub fn calculate_roots_of_unity(length_of_data_after_padding: u64) -> Result, KzgError> { + // Calculate log2 of the next power of two of the length of data after padding + let log2_of_evals = (length_of_data_after_padding + .div_ceil(32) + .next_power_of_two() as f64) + .log2() + .to_u8() + .ok_or_else(|| { + KzgError::GenericError( + "Failed to convert length_of_data_after_padding to u8".to_string(), + ) + })?; + + // Check if the length of data after padding is valid with respect to the SRS order + if length_of_data_after_padding + .div_ceil(BYTES_PER_FIELD_ELEMENT as u64) + .next_power_of_two() + > MAINNET_SRS_G1_SIZE as u64 + { + return Err(KzgError::SerializationError( + "the supplied encoding parameters are not valid with respect to the SRS.".to_string(), + )); + } + + // Find the root of unity corresponding to the calculated log2 value + let root_of_unity = get_primitive_root_of_unity(log2_of_evals.into())?; + + // Expand the root to get all the roots of unity + let mut expanded_roots_of_unity = expand_root_of_unity(&root_of_unity); + + // Remove the last element to avoid duplication + expanded_roots_of_unity.truncate(expanded_roots_of_unity.len() - 1); + + // Return the parameters and the expanded roots of unity + Ok(expanded_roots_of_unity) +} + +/// function to expand the roots based on the configuration +fn expand_root_of_unity(root_of_unity: &Fr) -> Vec { + let mut roots = vec![Fr::one()]; // Initialize with 1 + roots.push(*root_of_unity); // Add the root of unity + + let mut i = 1; + while !roots[i].is_one() { + // Continue until the element cycles back to one + let this = &roots[i]; + i += 1; + roots.push(this * root_of_unity); // Push the next power of the root + // of unity + } + roots +} + +/// A helper function for the `verify_blob_kzg_proof_batch` function. +pub fn compute_challenges_and_evaluate_polynomial( + blobs: &[Blob], + commitments: &[G1Affine], +) -> Result<(Vec, Vec), KzgError> { + // Pre-allocate vectors to store: + // - evaluation_challenges: Points where polynomials will be evaluated + // - ys: Results of polynomial evaluations at challenge points + let mut evaluation_challenges = Vec::with_capacity(blobs.len()); + let mut ys = Vec::with_capacity(blobs.len()); + + // Process each blob sequentially + // TODO: Potential optimizations: + // 1. Cache roots of unity calculations across iterations + // 2. Parallelize processing for large numbers of blobs + // 3. Batch polynomial conversions if possible + for i in 0..blobs.len() { + // Step 1: Convert blob to polynomial form + // This is necessary because we need to evaluate the polynomial + let polynomial = blobs[i].to_polynomial_eval_form(); + + // Step 2: Generate Fiat-Shamir challenge + // This creates a "random" evaluation point based on the blob and commitment + // The challenge is deterministic but unpredictable, making the proof non-interactive + let evaluation_challenge = compute_challenge(&blobs[i], &commitments[i])?; + + // Step 3: Evaluate the polynomial at the challenge point + // This uses the evaluation form for efficient computation + // The srs_order parameter ensures compatibility with the trusted setup + let y = evaluate_polynomial_in_evaluation_form(&polynomial, &evaluation_challenge)?; + + // Store both: + // - The challenge point (where we evaluated) + // - The evaluation result (what the polynomial equals at that point) + evaluation_challenges.push(evaluation_challenge); + ys.push(y); + } + + // Return tuple of: + // 1. Vector of evaluation points (challenges) + // 2. Vector of polynomial evaluations at those points + // These will be used in the KZG proof verification process + Ok((evaluation_challenges, ys)) +} diff --git a/primitives/src/lib.rs b/primitives/src/lib.rs new file mode 100644 index 0000000..8ee8c4d --- /dev/null +++ b/primitives/src/lib.rs @@ -0,0 +1,7 @@ +mod arith; +pub mod blob; +pub mod consts; +pub mod errors; +pub mod helpers; +pub mod polynomial; +pub mod traits; diff --git a/src/polynomial.rs b/primitives/src/polynomial.rs similarity index 99% rename from src/polynomial.rs rename to primitives/src/polynomial.rs index 2712475..fb62e65 100644 --- a/src/polynomial.rs +++ b/primitives/src/polynomial.rs @@ -1,8 +1,9 @@ -use crate::{consts::BYTES_PER_FIELD_ELEMENT, errors::PolynomialError, helpers}; use ark_bn254::Fr; use ark_poly::{EvaluationDomain, GeneralEvaluationDomain}; use ark_std::Zero; +use crate::{consts::BYTES_PER_FIELD_ELEMENT, errors::PolynomialError, helpers}; + #[derive(Clone, Debug, PartialEq)] pub struct PolynomialEvalForm { /// evaluations contains the evaluations of the polynomial, padded with 0s diff --git a/src/traits.rs b/primitives/src/traits.rs similarity index 100% rename from src/traits.rs rename to primitives/src/traits.rs diff --git a/tests/blob_test.rs b/primitives/tests/blob_test.rs similarity index 99% rename from tests/blob_test.rs rename to primitives/tests/blob_test.rs index 418eb66..61f95b3 100644 --- a/tests/blob_test.rs +++ b/primitives/tests/blob_test.rs @@ -1,6 +1,7 @@ #[cfg(test)] mod tests { - use rust_kzg_bn254::blob::Blob; + use rust_kzg_bn254_primitives::blob::Blob; + const GETTYSBURG_ADDRESS_BYTES: &[u8] = "Fourscore and seven years ago our fathers brought forth, on this continent, a new nation, conceived in liberty, and dedicated to the proposition that all men are created equal. Now we are engaged in a great civil war, testing whether that nation, or any nation so conceived, and so dedicated, can long endure. We are met on a great battle-field of that war. We have come to dedicate a portion of that field, as a final resting-place for those who here gave their lives, that that nation might live. It is altogether fitting and proper that we should do this. But, in a larger sense, we cannot dedicate, we cannot consecrate—we cannot hallow—this ground. The brave men, living and dead, who struggled here, have consecrated it far above our poor power to add or detract. The world will little note, nor long remember what we say here, but it can never forget what they did here. It is for us the living, rather, to be dedicated here to the unfinished work which they who fought here have thus far so nobly advanced. It is rather for us to be here dedicated to the great task remaining before us—that from these honored dead we take increased devotion to that cause for which they here gave the last full measure of devotion—that we here highly resolve that these dead shall not have died in vain—that this nation, under God, shall have a new birth of freedom, and that government of the people, by the people, for the people, shall not perish from the earth.".as_bytes(); #[test] diff --git a/tests/helpers_test.rs b/primitives/tests/helpers_test.rs similarity index 74% rename from tests/helpers_test.rs rename to primitives/tests/helpers_test.rs index 88dc72b..b24a45d 100644 --- a/tests/helpers_test.rs +++ b/primitives/tests/helpers_test.rs @@ -1,12 +1,17 @@ +use std::{ + fs::File, + io::{BufRead, BufReader, Read}, +}; + use ark_bn254::{Fq, Fq2, Fr, G1Affine, G1Projective, G2Affine, G2Projective}; use ark_ff::{PrimeField, UniformRand}; use ark_std::{str::FromStr, One}; -use rust_kzg_bn254::{ - consts::{BYTES_PER_FIELD_ELEMENT, SIZE_OF_G1_AFFINE_COMPRESSED}, +use rust_kzg_bn254_primitives::{ + consts::{BYTES_PER_FIELD_ELEMENT, PRIMITIVE_ROOTS_OF_UNITY, SIZE_OF_G1_AFFINE_COMPRESSED}, helpers::{ blob_to_polynomial, convert_by_padding_empty_byte, get_num_element, is_on_curve_g1, - is_on_curve_g2, remove_empty_byte_from_padded_bytes_unchecked, set_bytes_canonical, - set_bytes_canonical_manual, to_byte_array, to_fr_array, + is_on_curve_g2, is_zeroed, remove_empty_byte_from_padded_bytes_unchecked, + set_bytes_canonical, set_bytes_canonical_manual, to_byte_array, to_fr_array, }, }; @@ -43,12 +48,6 @@ fn test_g2_is_on_curve() { // Tests deserialization of data and equivalence. #[test] fn test_blob_to_polynomial() { - use ark_serialize::Read; - use std::{ - fs::File, - io::{BufRead, BufReader}, - }; - let file = File::open("tests/test-files/blobs.txt").unwrap(); let mut reader = BufReader::new(file); let mut buffer = [0u8; SIZE_OF_G1_AFFINE_COMPRESSED]; @@ -235,70 +234,109 @@ fn test_convert_by_padding_empty_byte() { ); } -#[cfg(test)] -mod tests { - use rust_kzg_bn254::helpers::is_zeroed; +#[test] +fn test_is_zeroed_all_zeroes() { + // Case where the first byte and the buffer are all zeroes + let first_byte = 0; + let buf = vec![0, 0, 0, 0, 0]; + assert!(is_zeroed(first_byte, buf), "Expected true for all zeroes"); +} - #[test] - fn test_is_zeroed_all_zeroes() { - // Case where the first byte and the buffer are all zeroes - let first_byte = 0; - let buf = vec![0, 0, 0, 0, 0]; - assert!(is_zeroed(first_byte, buf), "Expected true for all zeroes"); - } +#[test] +fn test_is_zeroed_first_byte_non_zero() { + // Case where the first byte is non-zero + let first_byte = 1; + let buf = vec![0, 0, 0, 0, 0]; + assert!( + !is_zeroed(first_byte, buf), + "Expected false when the first byte is non-zero" + ); +} - #[test] - fn test_is_zeroed_first_byte_non_zero() { - // Case where the first byte is non-zero - let first_byte = 1; - let buf = vec![0, 0, 0, 0, 0]; - assert!( - !is_zeroed(first_byte, buf), - "Expected false when the first byte is non-zero" - ); - } +#[test] +fn test_is_zeroed_buffer_non_zero() { + // Case where the buffer contains non-zero elements + let first_byte = 0; + let buf = vec![0, 0, 1, 0, 0]; + assert!( + !is_zeroed(first_byte, buf), + "Expected false when the buffer contains non-zero elements" + ); +} - #[test] - fn test_is_zeroed_buffer_non_zero() { - // Case where the buffer contains non-zero elements - let first_byte = 0; - let buf = vec![0, 0, 1, 0, 0]; - assert!( - !is_zeroed(first_byte, buf), - "Expected false when the buffer contains non-zero elements" - ); - } +#[test] +fn test_is_zeroed_first_byte_and_buffer_non_zero() { + // Case where both the first byte and buffer contain non-zero elements + let first_byte = 1; + let buf = vec![0, 1, 0, 0, 0]; + assert!( + !is_zeroed(first_byte, buf), + "Expected false when both the first byte and buffer contain non-zero elements" + ); +} - #[test] - fn test_is_zeroed_first_byte_and_buffer_non_zero() { - // Case where both the first byte and buffer contain non-zero elements - let first_byte = 1; - let buf = vec![0, 1, 0, 0, 0]; - assert!( - !is_zeroed(first_byte, buf), - "Expected false when both the first byte and buffer contain non-zero elements" - ); - } +#[test] +fn test_is_zeroed_empty_buffer() { + // Case where the buffer is empty but the first byte is zero + let first_byte = 0; + let buf: Vec = Vec::new(); + assert!( + is_zeroed(first_byte, buf), + "Expected true for an empty buffer with a zero first byte" + ); +} - #[test] - fn test_is_zeroed_empty_buffer() { - // Case where the buffer is empty but the first byte is zero - let first_byte = 0; - let buf: Vec = Vec::new(); - assert!( - is_zeroed(first_byte, buf), - "Expected true for an empty buffer with a zero first byte" - ); - } +#[test] +fn test_is_zeroed_empty_buffer_non_zero_first_byte() { + // Case where the buffer is empty and the first byte is non-zero + let first_byte = 1; + let buf: Vec = Vec::new(); + assert!( + !is_zeroed(first_byte, buf), + "Expected false for an empty buffer with a non-zero first byte" + ); +} + +#[test] +fn test_primitive_roots_from_bigint_to_fr() { + let data: [&str; 29] = [ + "1", + "21888242871839275222246405745257275088548364400416034343698204186575808495616", + "21888242871839275217838484774961031246007050428528088939761107053157389710902", + "19540430494807482326159819597004422086093766032135589407132600596362845576832", + "14940766826517323942636479241147756311199852622225275649687664389641784935947", + "4419234939496763621076330863786513495701855246241724391626358375488475697872", + "9088801421649573101014283686030284801466796108869023335878462724291607593530", + "10359452186428527605436343203440067497552205259388878191021578220384701716497", + "3478517300119284901893091970156912948790432420133812234316178878452092729974", + "6837567842312086091520287814181175430087169027974246751610506942214842701774", + "3161067157621608152362653341354432744960400845131437947728257924963983317266", + "1120550406532664055539694724667294622065367841900378087843176726913374367458", + "4158865282786404163413953114870269622875596290766033564087307867933865333818", + "197302210312744933010843010704445784068657690384188106020011018676818793232", + "20619701001583904760601357484951574588621083236087856586626117568842480512645", + "20402931748843538985151001264530049874871572933694634836567070693966133783803", + "421743594562400382753388642386256516545992082196004333756405989743524594615", + "12650941915662020058015862023665998998969191525479888727406889100124684769509", + "11699596668367776675346610687704220591435078791727316319397053191800576917728", + "15549849457946371566896172786938980432421851627449396898353380550861104573629", + "17220337697351015657950521176323262483320249231368149235373741788599650842711", + "13536764371732269273912573961853310557438878140379554347802702086337840854307", + "12143866164239048021030917283424216263377309185099704096317235600302831912062", + "934650972362265999028062457054462628285482693704334323590406443310927365533", + "5709868443893258075976348696661355716898495876243883251619397131511003808859", + "19200870435978225707111062059747084165650991997241425080699860725083300967194", + "7419588552507395652481651088034484897579724952953562618697845598160172257810", + "2082940218526944230311718225077035922214683169814847712455127909555749686340", + "19103219067921713944291392827692070036145651957329286315305642004821462161904", + ]; + let fr_s = data + .iter() + .map(|s: &&str| Fr::from_str(*s).unwrap()) + .collect::>(); - #[test] - fn test_is_zeroed_empty_buffer_non_zero_first_byte() { - // Case where the buffer is empty and the first byte is non-zero - let first_byte = 1; - let buf: Vec = Vec::new(); - assert!( - !is_zeroed(first_byte, buf), - "Expected false for an empty buffer with a non-zero first byte" - ); + for i in 0..PRIMITIVE_ROOTS_OF_UNITY.len() { + let root_of_unity_at_index = PRIMITIVE_ROOTS_OF_UNITY[i]; + assert_eq!(root_of_unity_at_index, fr_s[i]); } } diff --git a/tests/polynomial_test.rs b/primitives/tests/polynomial_test.rs similarity index 99% rename from tests/polynomial_test.rs rename to primitives/tests/polynomial_test.rs index db6f1de..432510d 100644 --- a/tests/polynomial_test.rs +++ b/primitives/tests/polynomial_test.rs @@ -1,7 +1,7 @@ #[cfg(test)] mod tests { use ark_bn254::Fr; - use rust_kzg_bn254::{ + use rust_kzg_bn254_primitives::{ blob::Blob, polynomial::{PolynomialCoeffForm, PolynomialEvalForm}, }; diff --git a/tests/test-files/blobs-from-fr.txt b/primitives/tests/test-files/blobs-from-fr.txt similarity index 100% rename from tests/test-files/blobs-from-fr.txt rename to primitives/tests/test-files/blobs-from-fr.txt diff --git a/tests/test-files/blobs.txt b/primitives/tests/test-files/blobs.txt similarity index 100% rename from tests/test-files/blobs.txt rename to primitives/tests/test-files/blobs.txt diff --git a/prover/Cargo.toml b/prover/Cargo.toml new file mode 100644 index 0000000..76071a6 --- /dev/null +++ b/prover/Cargo.toml @@ -0,0 +1,58 @@ +[package] +name = "rust-kzg-bn254-prover" +version = "0.1.0" +edition.workspace = true +rust-version.workspace = true +description = "This library offers a set of functions for generating and interacting with bn254 KZG commitments and proofs in rust, with the motivation of supporting fraud and validity proof logic in EigenDA rollup integrations." +readme = "README.md" +repository.workspace = true +license-file.workspace = true +exclude = ["tests/*", "benches/*"] +# TODO: is this needed for the image to show up in the rust docs? +include = ["./kzg_commitment_diagram.png"] + +[dependencies] +rust-kzg-bn254-primitives = { path = "../primitives", version = "0.1.0" } +ark-bn254 = "0.5.0" +ark-ec = { version = "0.5.0", features = ["parallel"] } +ark-ff = { version = "0.5.0", features = ["parallel"] } +ark-std = { version = "0.5.0", features = ["parallel"] } +ark-poly = { version = "0.5.0", features = ["parallel"] } +num-traits = "0.2" +rayon = "1.10" +crossbeam-channel = "0.5" +num_cpus = "1.16.0" + +[dev-dependencies] +rand = "0.8.5" +criterion = "0.5.1" +lazy_static = "1.5.0" + +[[test]] +name = "kzg" +path = "tests/kzg_test.rs" + +[[bench]] +name = "bench_g1_ifft" +harness = false +path = "benches/bench_g1_ifft.rs" + +[[bench]] +name = "bench_kzg_setup" +harness = false +path = "benches/bench_kzg_setup.rs" + +[[bench]] +name = "bench_kzg_commit" +harness = false +path = "benches/bench_kzg_commit.rs" + +[[bench]] +name = "bench_kzg_commit_large_blobs" +harness = false +path = "benches/bench_kzg_commit_large_blobs.rs" + +[[bench]] +name = "bench_kzg_proof" +harness = false +path = "benches/bench_kzg_proof.rs" diff --git a/README.md b/prover/README.md similarity index 100% rename from README.md rename to prover/README.md diff --git a/benches/bench_g1_ifft.rs b/prover/benches/bench_g1_ifft.rs similarity index 75% rename from benches/bench_g1_ifft.rs rename to prover/benches/bench_g1_ifft.rs index 17b2a70..7c36dd0 100644 --- a/benches/bench_g1_ifft.rs +++ b/prover/benches/bench_g1_ifft.rs @@ -1,5 +1,5 @@ use criterion::{black_box, criterion_group, criterion_main, Criterion}; -use rust_kzg_bn254::kzg::KZG; +use rust_kzg_bn254_prover::{kzg::KZG, srs::SRS}; use std::time::Duration; fn generate_powers_of_2(limit: u64) -> Vec { @@ -16,10 +16,17 @@ fn generate_powers_of_2(limit: u64) -> Vec { fn bench_g1_ifft(c: &mut Criterion) { c.bench_function("bench_g1_ifft", |b| { - let kzg = KZG::setup("tests/test-files/mainnet-data/g1.131072.point", 3000, 3000).unwrap(); + let kzg = KZG::new(); + let srs = SRS::new( + "tests/test-files/mainnet-data/g1.131072.point", + 268435456, + 131072, + ) + .unwrap(); + b.iter(|| { for power in &generate_powers_of_2(3000) { - kzg.g1_ifft(black_box(*power)).unwrap(); + kzg.g1_ifft(black_box(*power), &srs).unwrap(); } }); }); diff --git a/benches/bench_kzg_commit.rs b/prover/benches/bench_kzg_commit.rs similarity index 80% rename from benches/bench_kzg_commit.rs rename to prover/benches/bench_kzg_commit.rs index 64e9a7a..d57d756 100644 --- a/benches/bench_kzg_commit.rs +++ b/prover/benches/bench_kzg_commit.rs @@ -1,14 +1,16 @@ use criterion::{criterion_group, criterion_main, Criterion}; use rand::Rng; -use rust_kzg_bn254::{blob::Blob, kzg::KZG}; +use rust_kzg_bn254_primitives::blob::Blob; +use rust_kzg_bn254_prover::{kzg::KZG, srs::SRS}; use std::time::Duration; fn bench_kzg_commit(c: &mut Criterion) { let mut rng = rand::thread_rng(); - let mut kzg = KZG::setup( - "tests/test-files/mainnet-data/g1.131072.point", + let mut kzg = KZG::new(); + let srs = SRS::new( + "tests/test-files/mainnet-data/g1.32mb.point", 268435456, - 131072, + 524288, ) .unwrap(); @@ -18,7 +20,7 @@ fn bench_kzg_commit(c: &mut Criterion) { let input_poly = input.to_polynomial_coeff_form(); kzg.calculate_and_store_roots_of_unity(input.len().try_into().unwrap()) .unwrap(); - b.iter(|| kzg.commit_coeff_form(&input_poly).unwrap()); + b.iter(|| kzg.commit_coeff_form(&input_poly, &srs).unwrap()); }); c.bench_function("bench_kzg_commit_30000", |b| { @@ -27,7 +29,7 @@ fn bench_kzg_commit(c: &mut Criterion) { let input_poly = input.to_polynomial_coeff_form(); kzg.calculate_and_store_roots_of_unity(input.len().try_into().unwrap()) .unwrap(); - b.iter(|| kzg.commit_coeff_form(&input_poly).unwrap()); + b.iter(|| kzg.commit_coeff_form(&input_poly, &srs).unwrap()); }); c.bench_function("bench_kzg_commit_50000", |b| { @@ -36,7 +38,7 @@ fn bench_kzg_commit(c: &mut Criterion) { let input_poly = input.to_polynomial_coeff_form(); kzg.calculate_and_store_roots_of_unity(input.len().try_into().unwrap()) .unwrap(); - b.iter(|| kzg.commit_coeff_form(&input_poly).unwrap()); + b.iter(|| kzg.commit_coeff_form(&input_poly, &srs).unwrap()); }); } diff --git a/benches/bench_kzg_commit_large_blobs.rs b/prover/benches/bench_kzg_commit_large_blobs.rs similarity index 83% rename from benches/bench_kzg_commit_large_blobs.rs rename to prover/benches/bench_kzg_commit_large_blobs.rs index b401c37..41d9510 100644 --- a/benches/bench_kzg_commit_large_blobs.rs +++ b/prover/benches/bench_kzg_commit_large_blobs.rs @@ -1,11 +1,13 @@ use criterion::{criterion_group, criterion_main, Criterion}; use rand::Rng; -use rust_kzg_bn254::{blob::Blob, kzg::KZG}; +use rust_kzg_bn254_primitives::blob::Blob; +use rust_kzg_bn254_prover::{kzg::KZG, srs::SRS}; use std::time::Duration; fn bench_kzg_commit(c: &mut Criterion) { let mut rng = rand::thread_rng(); - let mut kzg = KZG::setup( + let mut kzg = KZG::new(); + let srs = SRS::new( "tests/test-files/mainnet-data/g1.32mb.point", 268435456, 524288, @@ -20,7 +22,7 @@ fn bench_kzg_commit(c: &mut Criterion) { let input_poly = input.to_polynomial_coeff_form(); kzg.calculate_and_store_roots_of_unity(input.len().try_into().unwrap()) .unwrap(); - b.iter(|| kzg.commit_coeff_form(&input_poly).unwrap()); + b.iter(|| kzg.commit_coeff_form(&input_poly, &srs).unwrap()); }); c.bench_function("bench_kzg_commit_16mb", |b| { @@ -31,7 +33,7 @@ fn bench_kzg_commit(c: &mut Criterion) { let input_poly = input.to_polynomial_coeff_form(); kzg.calculate_and_store_roots_of_unity(input.len().try_into().unwrap()) .unwrap(); - b.iter(|| kzg.commit_coeff_form(&input_poly).unwrap()); + b.iter(|| kzg.commit_coeff_form(&input_poly, &srs).unwrap()); }); } diff --git a/benches/bench_kzg_proof.rs b/prover/benches/bench_kzg_proof.rs similarity index 88% rename from benches/bench_kzg_proof.rs rename to prover/benches/bench_kzg_proof.rs index b1055e8..e641e7b 100644 --- a/benches/bench_kzg_proof.rs +++ b/prover/benches/bench_kzg_proof.rs @@ -1,14 +1,16 @@ use criterion::{criterion_group, criterion_main, Criterion}; use rand::Rng; -use rust_kzg_bn254::{blob::Blob, kzg::KZG}; +use rust_kzg_bn254_primitives::blob::Blob; +use rust_kzg_bn254_prover::{kzg::KZG, srs::SRS}; use std::time::Duration; fn bench_kzg_proof(c: &mut Criterion) { let mut rng = rand::thread_rng(); - let mut kzg = KZG::setup( - "tests/test-files/mainnet-data/g1.131072.point", + let mut kzg = KZG::new(); + let srs = SRS::new( + "tests/test-files/mainnet-data/g1.32mb.point", 268435456, - 131072, + 524288, ) .unwrap(); @@ -21,7 +23,7 @@ fn bench_kzg_proof(c: &mut Criterion) { let index = rand::thread_rng().gen_range(0..input_poly.len_underlying_blob_field_elements()); b.iter(|| { - kzg.compute_proof_with_known_z_fr_index(&input_poly, index.try_into().unwrap()) + kzg.compute_proof_with_known_z_fr_index(&input_poly, index.try_into().unwrap(), &srs) .unwrap() }); }); @@ -35,7 +37,7 @@ fn bench_kzg_proof(c: &mut Criterion) { let index = rand::thread_rng().gen_range(0..input_poly.len_underlying_blob_field_elements()); b.iter(|| { - kzg.compute_proof_with_known_z_fr_index(&input_poly, index.try_into().unwrap()) + kzg.compute_proof_with_known_z_fr_index(&input_poly, index.try_into().unwrap(), &srs) .unwrap() }); }); @@ -49,7 +51,7 @@ fn bench_kzg_proof(c: &mut Criterion) { let index = rand::thread_rng().gen_range(0..input_poly.len_underlying_blob_field_elements()); b.iter(|| { - kzg.compute_proof_with_known_z_fr_index(&input_poly, index.try_into().unwrap()) + kzg.compute_proof_with_known_z_fr_index(&input_poly, index.try_into().unwrap(), &srs) .unwrap() }); }); diff --git a/benches/bench_kzg_setup.rs b/prover/benches/bench_kzg_setup.rs similarity index 72% rename from benches/bench_kzg_setup.rs rename to prover/benches/bench_kzg_setup.rs index 273b688..d7261dd 100644 --- a/benches/bench_kzg_setup.rs +++ b/prover/benches/bench_kzg_setup.rs @@ -1,18 +1,16 @@ use criterion::{criterion_group, criterion_main, Criterion}; -use rust_kzg_bn254::kzg::KZG; +use rust_kzg_bn254_prover::srs::SRS; use std::time::Duration; fn bench_kzg_setup(c: &mut Criterion) { c.bench_function("bench_kzg_setup", |b| { - b.iter(|| KZG::setup("tests/test-files/g1.point", 3000, 3000).unwrap()); - b.iter(|| { - KZG::setup( - "tests/test-files/mainnet-data/g1.131072.point", + SRS::new( + "tests/test-files/mainnet-data/g1.32mb.point", 268435456, - 131072, + 524288, ) - .unwrap() + .unwrap(); }); }); } diff --git a/kzg_commitment_diagram.png b/prover/kzg_commitment_diagram.png similarity index 100% rename from kzg_commitment_diagram.png rename to prover/kzg_commitment_diagram.png diff --git a/prover/src/kzg.rs b/prover/src/kzg.rs new file mode 100644 index 0000000..ca97b2b --- /dev/null +++ b/prover/src/kzg.rs @@ -0,0 +1,314 @@ +use ark_bn254::{Fr, G1Affine, G1Projective}; +use ark_ec::{CurveGroup, VariableBaseMSM}; +use ark_poly::{EvaluationDomain, GeneralEvaluationDomain}; +use ark_std::{ops::Div, Zero}; +use num_traits::ToPrimitive; +use rayon::iter::{IntoParallelRefIterator, ParallelIterator}; +use rust_kzg_bn254_primitives::{ + blob::Blob, + errors::KzgError, + helpers, + polynomial::{PolynomialCoeffForm, PolynomialEvalForm}, +}; + +use crate::srs::SRS; + +/// Main interesting struct of the rust-kzg-bn254 crate. +/// [Kzg] is a struct that holds the SRS points in monomial form, and +/// provides methods for committing to a blob, (either via a [Blob] itself, +/// or a [PolynomialCoeffForm] or [PolynomialEvalForm]), and generating and +/// verifying proofs. +/// +/// The [Blob] and [PolynomialCoeffForm]/[PolynomialEvalForm] structs are mostly +/// with +/// constructor and few helper methods. +#[derive(Debug, PartialEq, Clone)] +pub struct KZG { + expanded_roots_of_unity: Vec, +} + +impl Default for KZG { + fn default() -> Self { + Self::new() + } +} + +impl KZG { + pub fn new() -> Self { + Self { + expanded_roots_of_unity: vec![], + } + } + + /// Calculates the roots of unities and assigns it to the struct + /// + /// # Arguments + /// * `length_of_data_after_padding` - Length of the blob data after padding in bytes. + /// + /// # Returns + /// * `Result<(), KzgError>` + /// + /// # Details + /// - Generates roots of unity needed for FFT operations + /// + /// # Example + /// ``` + /// use rust_kzg_bn254_prover::kzg::KZG; + /// use rust_kzg_bn254_primitives::blob::Blob; + /// use ark_std::One; + /// use ark_bn254::Fr; + /// + /// let mut kzg = KZG::new(); + /// let input_blob = Blob::from_raw_data(b"test blob data"); + /// kzg.calculate_and_store_roots_of_unity(input_blob.len().try_into().unwrap()).unwrap(); + /// ``` + pub fn calculate_and_store_roots_of_unity( + &mut self, + length_of_data_after_padding: u64, + ) -> Result<(), KzgError> { + let roots_of_unity = helpers::calculate_roots_of_unity(length_of_data_after_padding)?; + self.expanded_roots_of_unity = roots_of_unity; + Ok(()) + } + + pub fn get_roots_of_unities(&self) -> Vec { + self.expanded_roots_of_unity.clone() + } + + /// helper function to get the + pub fn get_nth_root_of_unity(&self, i: usize) -> Option<&Fr> { + self.expanded_roots_of_unity.get(i) + } + + /// Commit the polynomial with the srs values loaded into [Kzg]. + pub fn commit_eval_form( + &self, + polynomial: &PolynomialEvalForm, + srs: &SRS, + ) -> Result { + if polynomial.len() > srs.g1.len() { + return Err(KzgError::SerializationError( + "polynomial length is not correct".to_string(), + )); + } + + // When the polynomial is in evaluation form, use IFFT to transform monomial srs + // points to lagrange form. + let bases = self.g1_ifft(polynomial.len(), srs)?; + + match G1Projective::msm(&bases, polynomial.evaluations()) { + Ok(res) => Ok(res.into_affine()), + Err(err) => Err(KzgError::CommitError(err.to_string())), + } + } + + /// Commit the polynomial with the srs values loaded into [Kzg]. + pub fn commit_coeff_form( + &self, + polynomial: &PolynomialCoeffForm, + srs: &SRS, + ) -> Result { + if polynomial.len() > srs.g1.len() { + return Err(KzgError::SerializationError( + "polynomial length is not correct".to_string(), + )); + } + // When the polynomial is in coefficient form, use the original srs points (in + // monomial form). + let bases = srs.g1[..polynomial.len()].to_vec(); + + match G1Projective::msm(&bases, polynomial.coeffs()) { + Ok(res) => Ok(res.into_affine()), + Err(err) => Err(KzgError::CommitError(err.to_string())), + } + } + + /// Helper function for `compute_kzg_proof()` and `compute_blob_kzg_proof()` + fn compute_proof_impl( + &self, + polynomial: &PolynomialEvalForm, + z_fr: &Fr, + srs: &SRS, + ) -> Result { + // Verify polynomial length matches that of the roots of unity + if polynomial.len() != self.expanded_roots_of_unity.len() { + return Err(KzgError::GenericError( + "inconsistent length between blob and root of unities".to_string(), + )); + } + + let eval_fr = polynomial.evaluations(); + // Pre-allocate vector for shifted polynomial p(x) - y + let mut poly_shift: Vec = Vec::with_capacity(eval_fr.len()); + + // Evaluate polynomial at the point z + // This gives us y = p(z) + let y_fr = helpers::evaluate_polynomial_in_evaluation_form(polynomial, z_fr)?; + + // Compute p(x) - y for each evaluation point + // This is the numerator of the quotient polynomial + for fr in eval_fr { + poly_shift.push(*fr - y_fr); + } + + // Compute denominator polynomial (x - z) at each root of unity + let mut denom_poly = Vec::::with_capacity(self.expanded_roots_of_unity.len()); + for root_of_unity in self.expanded_roots_of_unity.iter().take(eval_fr.len()) { + denom_poly.push(*root_of_unity - z_fr); + } + + // Pre-allocate vector for quotient polynomial evaluations + let mut quotient_poly = Vec::::with_capacity(self.expanded_roots_of_unity.len()); + + // Compute quotient polynomial q(x) = (p(x) - y)/(x - z) at each root of unity + for i in 0..self.expanded_roots_of_unity.len() { + if denom_poly[i].is_zero() { + // Special case: when x = z, use L'Hôpital's rule + // Compute the derivative evaluation instead + quotient_poly.push(self.compute_quotient_eval_on_domain(z_fr, eval_fr, &y_fr)); + } else { + // Normal case: direct polynomial division + quotient_poly.push(poly_shift[i].div(denom_poly[i])); + } + } + + let quotient_poly_eval_form = PolynomialEvalForm::new(quotient_poly); + self.commit_eval_form("ient_poly_eval_form, srs) + } + + /// commit to a [Blob], by transforming it into a [PolynomialEvalForm] and + /// then calling [Kzg::commit_eval_form]. + pub fn commit_blob(&self, blob: &Blob, srs: &SRS) -> Result { + let polynomial = blob.to_polynomial_eval_form(); + self.commit_eval_form(&polynomial, srs) + } + + pub fn compute_proof_with_known_z_fr_index( + &self, + polynomial: &PolynomialEvalForm, + index: u64, + srs: &SRS, + ) -> Result { + // Convert u64 index to usize for array indexing + let usized_index = index.to_usize().ok_or(KzgError::GenericError( + "Index conversion to usize failed".to_string(), + ))?; + + // Get the root of unity at the specified index + let z_fr = self + .get_nth_root_of_unity(usized_index) + .ok_or_else(|| KzgError::GenericError("Root of unity not found".to_string()))?; + + // Compute the KZG proof at the selected root of unity + // This delegates to the main proof computation function + // using our selected evaluation point + self.compute_proof(polynomial, z_fr, srs) + } + + /// Compute a kzg proof from a polynomial in evaluation form. + /// We don't currently support proofs for polynomials in coefficient form, + /// but one can take the FFT of the polynomial in coefficient form to + /// get the polynomial in evaluation form. This is available via the + /// method [PolynomialCoeffForm::to_eval_form]. + /// TODO(anupsv): Accept bytes instead of Fr element. Ref: https://github.com/Layr-Labs/rust-kzg-bn254/issues/29 + pub fn compute_proof( + &self, + polynomial: &PolynomialEvalForm, + z_fr: &Fr, + srs: &SRS, + ) -> Result { + // Verify that polynomial length matches roots of unity length + if polynomial.len() != self.expanded_roots_of_unity.len() { + return Err(KzgError::GenericError( + "inconsistent length between blob and root of unities".to_string(), + )); + } + + // Call the implementation to compute the actual proof + // This will: + // 1. Evaluate polynomial at z + // 2. Compute quotient polynomial q(x) = (p(x) - p(z)) / (x - z) + // 3. Generate KZG proof as commitment to q(x) + self.compute_proof_impl(polynomial, z_fr, srs) + } + + /// refer to DA for more context + pub fn compute_quotient_eval_on_domain(&self, z_fr: &Fr, eval_fr: &[Fr], value_fr: &Fr) -> Fr { + let mut quotient = Fr::zero(); + let mut fi: Fr = Fr::zero(); + let mut numerator: Fr = Fr::zero(); + let mut denominator: Fr = Fr::zero(); + let mut temp: Fr = Fr::zero(); + + self.expanded_roots_of_unity + .iter() + .enumerate() + .for_each(|(i, omega_i)| { + if *omega_i == *z_fr { + return; + } + fi = eval_fr[i] - value_fr; + numerator = fi * omega_i; + denominator = z_fr - omega_i; + denominator *= z_fr; + temp = numerator.div(denominator); + quotient += temp; + }); + + quotient + } + + /// function to compute the inverse FFT + pub fn g1_ifft(&self, length: usize, srs: &SRS) -> Result, KzgError> { + // is not power of 2 + if !length.is_power_of_two() { + return Err(KzgError::FFTError( + "length provided is not a power of 2".to_string(), + )); + } + + let points_projective: Vec = srs.g1[..length] + .par_iter() + .map(|&p| G1Projective::from(p)) + .collect(); + let ifft_result: Vec<_> = GeneralEvaluationDomain::::new(length) + .ok_or(KzgError::FFTError( + "Could not perform IFFT due to domain consturction error".to_string(), + ))? + .ifft(&points_projective) + .par_iter() + .map(|p| p.into_affine()) + .collect(); + + Ok(ifft_result) + } + + /// TODO(anupsv): Match 4844 specs w.r.t to the inputs. Ref: https://github.com/Layr-Labs/rust-kzg-bn254/issues/30 + pub fn compute_blob_proof( + &self, + blob: &Blob, + commitment: &G1Affine, + srs: &SRS, + ) -> Result { + // Validate that the commitment is a valid point on the G1 curve + // This prevents potential invalid curve attacks + if !commitment.is_on_curve() || !commitment.is_in_correct_subgroup_assuming_on_curve() { + return Err(KzgError::NotOnCurveError( + "commitment not on curve".to_string(), + )); + } + + // Convert the blob to a polynomial in evaluation form + // This is necessary because KZG proofs work with polynomials + let blob_poly = blob.to_polynomial_eval_form(); + + // Compute the evaluation challenge using Fiat-Shamir heuristic + // This challenge determines the point at which we evaluate the polynomial + let evaluation_challenge = helpers::compute_challenge(blob, commitment)?; + + // Compute the actual KZG proof using the polynomial and evaluation point + // This creates a proof that the polynomial evaluates to a specific value at the challenge point + // The proof is a single G1 point that can be used to verify the evaluation + self.compute_proof_impl(&blob_poly, &evaluation_challenge, srs) + } +} diff --git a/src/lib.rs b/prover/src/lib.rs similarity index 91% rename from src/lib.rs rename to prover/src/lib.rs index f358938..cf1bfa5 100644 --- a/src/lib.rs +++ b/prover/src/lib.rs @@ -56,9 +56,11 @@ //! //! ### Commit to a some user data //! ```rust -//! use rust_kzg_bn254::{blob::Blob, kzg::KZG}; -//! -//! let kzg = KZG::setup( +//! use rust_kzg_bn254_prover::kzg::KZG; +//! use rust_kzg_bn254_prover::srs::SRS; +//! use rust_kzg_bn254_primitives::{blob::Blob}; +//! let kzg = KZG::new(); +//! let srs = SRS::new( //! "tests/test-files/mainnet-data/g1.131072.point", //! 268435456, //! 131072, @@ -67,7 +69,7 @@ //! let rollup_data: &[u8] = "some rollup batcher data".as_bytes(); //! let blob = Blob::from_raw_data(rollup_data); //! let poly = blob.to_polynomial_eval_form(); -//! let commitment = kzg.commit_eval_form(&poly).unwrap(); +//! let commitment = kzg.commit_eval_form(&poly, &srs).unwrap(); //! ``` //! //! ### Generate a proof for a piece of data @@ -76,11 +78,5 @@ //! ``` //! -mod arith; -pub mod blob; -pub mod consts; -pub mod errors; -pub mod helpers; pub mod kzg; -pub mod polynomial; -mod traits; +pub mod srs; diff --git a/prover/src/srs.rs b/prover/src/srs.rs new file mode 100644 index 0000000..7f12341 --- /dev/null +++ b/prover/src/srs.rs @@ -0,0 +1,241 @@ +use ark_bn254::G1Affine; +use crossbeam_channel::{bounded, Receiver}; +use rust_kzg_bn254_primitives::errors::KzgError; +use rust_kzg_bn254_primitives::traits::ReadPointFromBytes; +use std::fs::File; +use std::io::{self, BufReader, Read}; + +/// Represents the Structured Reference String (SRS) used in KZG commitments. +#[derive(Debug, PartialEq, Clone)] +pub struct SRS { + // SRS points are stored in monomial form, ready to be used for commitments with polynomials + // in coefficient form. To commit against a polynomial in evaluation form, we need to transform + // the SRS points to lagrange form using IFFT. + pub g1: Vec, + /// The order of the SRS. + pub order: u32, +} + +impl SRS { + /// Initializes the SRS by loading G1 points from a file. + /// + /// # Arguments + /// + /// * `path_to_g1_points` - The file path to load G1 points from. + /// * `order` - The total order of the SRS. + /// * `points_to_load` - The number of SRS points to load. + /// + /// # Returns + /// + /// * `Result` - The initialized SRS or an error. + pub fn new(path_to_g1_points: &str, order: u32, points_to_load: u32) -> Result { + if points_to_load > order { + return Err(KzgError::GenericError( + "Number of points to load exceeds SRS order.".to_string(), + )); + } + + let g1_points = + Self::parallel_read_g1_points(path_to_g1_points.to_owned(), points_to_load, false) + .map_err(|e| KzgError::SerializationError(e.to_string()))?; + + Ok(Self { + g1: g1_points, + order, + }) + } + + pub fn process_chunks(receiver: Receiver<(Vec, usize, bool)>) -> Vec<(T, usize)> + where + T: ReadPointFromBytes, + { + // TODO: should we use rayon to process this in parallel? + receiver + .iter() + .map(|(chunk, position, is_native)| { + let point: T = if is_native { + T::read_point_from_bytes_native_compressed(&chunk) + .expect("Failed to read point from bytes") + } else { + T::read_point_from_bytes_be(&chunk).expect("Failed to read point from bytes") + }; + (point, position) + }) + .collect() + } + + /// Reads G1 points in parallel from a file. + /// + /// # Arguments + /// + /// * `file_path` - The path to the file containing G1 points. + /// * `points_to_load` - The number of points to load. + /// * `is_native` - Whether the points are in native Arkworks format. + /// + /// # Returns + /// + /// * `Result, KzgError>` - The loaded G1 points or an error. + fn parallel_read_g1_points( + file_path: String, + points_to_load: u32, + is_native: bool, + ) -> Result, KzgError> { + let (sender, receiver) = bounded::<(Vec, usize, bool)>(1000); + + // Spawn the reader thread + let reader_handle = std::thread::spawn( + move || -> Result<(), Box> { + Self::read_file_chunks(&file_path, sender, 32, points_to_load, is_native) + .map_err(|e| Box::new(e) as Box) + }, + ); + + let num_workers = num_cpus::get(); + + let workers: Vec<_> = (0..num_workers) + .map(|_| { + let receiver = receiver.clone(); + std::thread::spawn(move || Self::process_chunks::(receiver)) + }) + .collect(); + + // Wait for the reader thread to finish + match reader_handle.join() { + Ok(result) => match result { + Ok(_) => {}, + Err(e) => return Err(KzgError::GenericError(e.to_string())), + }, + Err(_) => { + return Err(KzgError::GenericError( + "Reader thread panicked.".to_string(), + )) + }, + } + + // Collect and sort the results + let mut all_points = Vec::new(); + for worker in workers { + let points = worker.join().expect("Worker thread panicked."); + all_points.extend(points); + } + + // Sort by original position to maintain order + all_points.sort_by_key(|&(_, position)| position); + + // Extract the G1Affine points + Ok(all_points.iter().map(|(point, _)| *point).collect()) + } + + /// Reads file chunks and sends them through a channel. + /// + /// # Arguments + /// + /// * `file_path` - Path to the file. + /// * `sender` - Channel sender to send read chunks. + /// * `point_size` - Size of each point in bytes. + /// * `num_points` - Number of points to read. + /// * `is_native` - Whether the points are in native format. + /// + /// # Returns + /// + /// * `io::Result<()>` - Ok if successful, or an I/O error. + /// TODO: chunks seems misleading here, since we read one field element at a time. + fn read_file_chunks( + file_path: &str, + sender: crossbeam_channel::Sender<(Vec, usize, bool)>, + point_size: usize, + num_points: u32, + is_native: bool, + ) -> io::Result<()> { + let file = File::open(file_path)?; + let mut reader = BufReader::new(file); + let mut position = 0; + let mut buffer = vec![0u8; point_size]; + + let mut i = 0; + // We are making one syscall per field element, which is super inefficient. + // FIXME: Read the entire file (or large segments) into memory and then split it + // into field elements. Entire G1 file might be ~8GiB, so might not fit + // in RAM. But we can only read the subset of the file that we need. + // For eg. for fault proof usage, only need to read 32MiB if our blob size is + // that large. + while let Ok(bytes_read) = reader.read(&mut buffer) { + if bytes_read == 0 { + break; + } + sender + .send((buffer[..bytes_read].to_vec(), position, is_native)) + .unwrap(); + position += bytes_read; + buffer.resize(point_size, 0); // Ensure the buffer is always the correct size + i += 1; + if num_points == i { + break; + } + } + Ok(()) + } + + /// read G1 points in parallel, by creating one reader thread, which reads + /// bytes from the file, and fans them out to worker threads (one per + /// cpu) which parse the bytes into G1Affine points. The worker threads + /// then fan in the parsed points to the main thread, which sorts them by + /// their original position in the file to maintain order. Not used anywhere + /// but kept as a reference. + /// + /// # Arguments + /// * `file_path` - The path to the file containing the G1 points + /// * `points_to_load` - The number of points to load from the file + /// * `is_native` - Whether the points are in native arkworks format or not + /// + /// # Returns + /// * `Ok(Vec)` - The G1 points read from the file + /// * `Err(KzgError)` - An error occurred while reading the file + pub fn parallel_read_g1_points_native( + file_path: String, + points_to_load: u32, + is_native: bool, + ) -> Result, KzgError> { + // Channel contains (bytes, position, is_native) tuples. The position is used to + // reorder the points after processing them. + let (sender, receiver) = bounded::<(Vec, usize, bool)>(1000); + + // Spawning the reader thread + let reader_thread = std::thread::spawn( + move || -> Result<(), Box> { + Self::read_file_chunks(&file_path, sender, 32, points_to_load, is_native) + .map_err(|e| -> Box { Box::new(e) }) + }, + ); + + let num_workers = num_cpus::get(); + + let workers: Vec<_> = (0..num_workers) + .map(|_| { + let receiver = receiver.clone(); + std::thread::spawn(move || Self::process_chunks::(receiver)) + }) + .collect(); + + // Wait for the reader thread to finish + match reader_thread.join() { + Ok(result) => match result { + Ok(_) => {}, + Err(e) => return Err(KzgError::GenericError(e.to_string())), + }, + Err(_) => return Err(KzgError::GenericError("Thread panicked".to_string())), + } + + // Collect and sort results + let mut all_points = Vec::new(); + for worker in workers { + let points = worker.join().expect("Worker thread panicked"); + all_points.extend(points); + } + + // Sort by original position to maintain order + all_points.sort_by_key(|&(_, position)| position); + + Ok(all_points.iter().map(|(point, _)| *point).collect()) + } +} diff --git a/prover/tests/kzg_test.rs b/prover/tests/kzg_test.rs new file mode 100644 index 0000000..645f286 --- /dev/null +++ b/prover/tests/kzg_test.rs @@ -0,0 +1,75 @@ +#[cfg(test)] +mod tests { + use ark_bn254::Fr; + use ark_ff::UniformRand; + use lazy_static::lazy_static; + use rand::Rng; + use rust_kzg_bn254_primitives::{ + blob::Blob, errors::KzgError, polynomial::PolynomialCoeffForm, + }; + use rust_kzg_bn254_prover::{kzg::KZG, srs::SRS}; + + // Define a static variable for setup + lazy_static! { + static ref KZG_INSTANCE: KZG = KZG::new(); + static ref SRS_INSTANCE: SRS = SRS::new( + "tests/test-files/mainnet-data/g1.131072.point", + 268435456, + 131072 + ) + .unwrap(); + } + + #[test] + fn test_commit_errors() { + let mut coeffs = vec![]; + let mut rng = rand::thread_rng(); + coeffs.resize(5000000, Fr::rand(&mut rng)); + let polynomial = PolynomialCoeffForm::new(coeffs); + let result = KZG_INSTANCE.commit_coeff_form(&polynomial, &SRS_INSTANCE); + assert_eq!( + result, + Err(KzgError::SerializationError( + "polynomial length is not correct".to_string() + )) + ); + } + + #[test] + fn test_srs_setup_errors() { + let srs = SRS::new("tests/test-files/g1.point", 3000, 3001); + assert_eq!( + srs, + Err(KzgError::GenericError( + "Number of points to load exceeds SRS order.".to_string() + )) + ); + } + + // This test is kept here to prevent cyclic dependency in tests. + #[test] + fn test_evaluate_polynomial_in_evaluation_form_random_blob_all_indexes() { + let mut rng = rand::thread_rng(); + let mut kzg = KZG_INSTANCE.clone(); + let blob_length: u64 = rand::thread_rng().gen_range(35..40000); + let random_blob: Vec = (0..blob_length) + .map(|_| rng.gen_range(32..=126) as u8) + .collect(); + + let input = Blob::from_raw_data(&random_blob); + let input_poly = input.to_polynomial_eval_form(); + + for i in 0..input_poly.len_underlying_blob_field_elements() { + kzg.calculate_and_store_roots_of_unity(input.len().try_into().unwrap()) + .unwrap(); + let z_fr = kzg.get_nth_root_of_unity(i).unwrap(); + let claimed_y_fr = + rust_kzg_bn254_primitives::helpers::evaluate_polynomial_in_evaluation_form( + &input_poly, + z_fr, + ) + .unwrap(); + assert_eq!(claimed_y_fr, input_poly.evaluations()[i]); + } + } +} diff --git a/tests/test-files/g1.point b/prover/tests/test-files/g1.point similarity index 100% rename from tests/test-files/g1.point rename to prover/tests/test-files/g1.point diff --git a/tests/test-files/kzg.proof.eq.input b/prover/tests/test-files/kzg.proof.eq.input similarity index 100% rename from tests/test-files/kzg.proof.eq.input rename to prover/tests/test-files/kzg.proof.eq.input diff --git a/tests/test-files/lagrangeG1SRS.txt b/prover/tests/test-files/lagrangeG1SRS.txt similarity index 100% rename from tests/test-files/lagrangeG1SRS.txt rename to prover/tests/test-files/lagrangeG1SRS.txt diff --git a/tests/test-files/mainnet-data/g1.131072.point b/prover/tests/test-files/mainnet-data/g1.131072.point similarity index 100% rename from tests/test-files/mainnet-data/g1.131072.point rename to prover/tests/test-files/mainnet-data/g1.131072.point diff --git a/tests/test-files/mainnet-data/g1.32mb.point b/prover/tests/test-files/mainnet-data/g1.32mb.point similarity index 100% rename from tests/test-files/mainnet-data/g1.32mb.point rename to prover/tests/test-files/mainnet-data/g1.32mb.point diff --git a/tests/test-files/mainnet-data/g2.point.powerOf2 b/prover/tests/test-files/mainnet-data/g2.point.powerOf2 similarity index 100% rename from tests/test-files/mainnet-data/g2.point.powerOf2 rename to prover/tests/test-files/mainnet-data/g2.point.powerOf2 diff --git a/tests/test-files/srs.g1.points.string b/prover/tests/test-files/srs.g1.points.string similarity index 100% rename from tests/test-files/srs.g1.points.string rename to prover/tests/test-files/srs.g1.points.string diff --git a/src/kzg.rs b/src/kzg.rs deleted file mode 100644 index 0dfe69a..0000000 --- a/src/kzg.rs +++ /dev/null @@ -1,1133 +0,0 @@ -use crate::{ - blob::Blob, - consts::{ - BYTES_PER_FIELD_ELEMENT, G2_TAU_FOR_MAINNET_SRS, G2_TAU_FOR_TEST_SRS_3000, - MAINNET_SRS_G1_SIZE, SIZE_OF_G1_AFFINE_COMPRESSED, - }, - errors::KzgError, - helpers, - polynomial::{PolynomialCoeffForm, PolynomialEvalForm}, -}; - -use crate::consts::{FIAT_SHAMIR_PROTOCOL_DOMAIN, RANDOM_CHALLENGE_KZG_BATCH_DOMAIN}; -use crate::helpers::is_on_curve_g1; -use ark_bn254::{Bn254, Fr, G1Affine, G1Projective, G2Affine}; -use ark_ec::{pairing::Pairing, AffineRepr, CurveGroup, VariableBaseMSM}; -use ark_ff::{BigInteger, Field, PrimeField}; -use ark_poly::{EvaluationDomain, GeneralEvaluationDomain}; -use ark_serialize::{CanonicalSerialize, Read}; -use ark_std::{iterable::Iterable, ops::Div, One, Zero}; -use crossbeam_channel::{bounded, Sender}; -use num_traits::ToPrimitive; -use rayon::iter::{IntoParallelRefIterator, ParallelIterator}; -use sha2::{Digest, Sha256}; -use std::{ - fs::File, - io::{self, BufReader}, -}; - -/// Main interesting struct of the rust-kzg-bn254 crate. -/// [Kzg] is a struct that holds the SRS points in monomial form, and -/// provides methods for committing to a blob, (either via a [Blob] itself, -/// or a [PolynomialCoeffForm] or [PolynomialEvalForm]), and generating and -/// verifying proofs. -/// -/// The [Blob] and [PolynomialCoeffForm]/[PolynomialEvalForm] structs are mostly -/// with -/// constructor and few helper methods. -#[derive(Debug, PartialEq, Clone)] -pub struct KZG { - // SRS points are stored in monomial form, ready to be used for commitments with polynomials - // in coefficient form. To commit against a polynomial in evaluation form, we need to transform - // the SRS points to lagrange form using IFFT. - g1: Vec, - srs_order: u64, - expanded_roots_of_unity: Vec, - completed_setup: bool, -} - -impl KZG { - pub fn setup( - path_to_g1_points: &str, - srs_order: u32, - srs_points_to_load: u32, - ) -> Result { - if srs_points_to_load > srs_order { - return Err(KzgError::GenericError( - "number of points to load is more than the srs order".to_string(), - )); - } - - let g1_points = - Self::parallel_read_g1_points(path_to_g1_points.to_owned(), srs_points_to_load, false) - .map_err(|e| KzgError::SerializationError(e.to_string()))?; - - Ok(Self { - g1: g1_points, - srs_order: srs_order.into(), - expanded_roots_of_unity: vec![], - completed_setup: false, - }) - } - - /// Calculates the roots of unities but doesn't assign it to the struct - /// Used in batch verification process as the roots need to be calculated for each blob - /// because of different length. - /// - /// # Arguments - /// * `length_of_data_after_padding` - Length of the blob data after padding in bytes. - /// - /// # Returns - /// * `Result<(Params, Vec), KzgError>` - Tuple containing: - /// - Params: KZG library operational parameters - /// - Vec: Vector of roots of unity - /// - /// # Details - /// - Generates roots of unity needed for FFT operations - /// - Calculates KZG operational parameters for commitment scheme - /// ``` - fn calculate_roots_of_unity( - length_of_data_after_padding: u64, - srs_order: u64, - ) -> Result, KzgError> { - // Calculate log2 of the next power of two of the length of data after padding - let log2_of_evals = (length_of_data_after_padding - .div_ceil(32) - .next_power_of_two() as f64) - .log2() - .to_u8() - .ok_or_else(|| { - KzgError::GenericError( - "Failed to convert length_of_data_after_padding to u8".to_string(), - ) - })?; - - // Check if the length of data after padding is valid with respect to the SRS order - if length_of_data_after_padding - .div_ceil(BYTES_PER_FIELD_ELEMENT as u64) - .next_power_of_two() - > srs_order - { - return Err(KzgError::SerializationError( - "the supplied encoding parameters are not valid with respect to the SRS." - .to_string(), - )); - } - - // Find the root of unity corresponding to the calculated log2 value - let root_of_unity = helpers::get_primitive_root_of_unity(log2_of_evals.into())?; - - // Expand the root to get all the roots of unity - let mut expanded_roots_of_unity = Self::expand_root_of_unity(&root_of_unity); - - // Remove the last element to avoid duplication - expanded_roots_of_unity.truncate(expanded_roots_of_unity.len() - 1); - - // Return the parameters and the expanded roots of unity - Ok(expanded_roots_of_unity) - } - - /// Calculates the roots of unities and assigns it to the struct - /// - /// # Arguments - /// * `length_of_data_after_padding` - Length of the blob data after padding in bytes. - /// - /// # Returns - /// * `Result<(), KzgError>` - /// - /// # Details - /// - Generates roots of unity needed for FFT operations - /// - /// # Example - /// ``` - /// use rust_kzg_bn254::kzg::KZG; - /// use rust_kzg_bn254::blob::Blob; - /// use ark_std::One; - /// use ark_bn254::Fr; - /// - /// let mut kzg = KZG::setup( - /// "tests/test-files/mainnet-data/g1.131072.point", - /// 268435456, - /// 131072, - /// ).unwrap(); - /// let input_blob = Blob::from_raw_data(b"test blob data"); - /// kzg.calculate_and_store_roots_of_unity(input_blob.len().try_into().unwrap()).unwrap(); - /// ``` - pub fn calculate_and_store_roots_of_unity( - &mut self, - length_of_data_after_padding: u64, - ) -> Result<(), KzgError> { - let roots_of_unity = - Self::calculate_roots_of_unity(length_of_data_after_padding, self.srs_order)?; - self.completed_setup = true; - self.expanded_roots_of_unity = roots_of_unity; - Ok(()) - } - - pub fn get_roots_of_unities(&self) -> Vec { - self.expanded_roots_of_unity.clone() - } - - /// helper function to get the - pub fn get_nth_root_of_unity(&self, i: usize) -> Option<&Fr> { - self.expanded_roots_of_unity.get(i) - } - - /// function to expand the roots based on the configuration - fn expand_root_of_unity(root_of_unity: &Fr) -> Vec { - let mut roots = vec![Fr::one()]; // Initialize with 1 - roots.push(*root_of_unity); // Add the root of unity - - let mut i = 1; - while !roots[i].is_one() { - // Continue until the element cycles back to one - let this = &roots[i]; - i += 1; - roots.push(this * root_of_unity); // Push the next power of the root - // of unity - } - roots - } - - /// helper function to get g1 points - pub fn get_g1_points(&self) -> Vec { - self.g1.to_vec() - } - - /// read files in chunks with specified length - /// TODO: chunks seems misleading here, since we read one field element at a - /// time. - fn read_file_chunks( - file_path: &str, - sender: Sender<(Vec, usize, bool)>, - point_size: usize, - num_points: u32, - is_native: bool, - ) -> io::Result<()> { - let file = File::open(file_path)?; - let mut reader = BufReader::new(file); - let mut position = 0; - let mut buffer = vec![0u8; point_size]; - - let mut i = 0; - // We are making one syscall per field element, which is super inefficient. - // FIXME: Read the entire file (or large segments) into memory and then split it - // into field elements. Entire G1 file might be ~8GiB, so might not fit - // in RAM. But we can only read the subset of the file that we need. - // For eg. for fault proof usage, only need to read 32MiB if our blob size is - // that large. - while let Ok(bytes_read) = reader.read(&mut buffer) { - if bytes_read == 0 { - break; - } - sender - .send((buffer[..bytes_read].to_vec(), position, is_native)) - .unwrap(); - position += bytes_read; - buffer.resize(point_size, 0); // Ensure the buffer is always the correct size - i += 1; - if num_points == i { - break; - } - } - Ok(()) - } - - /// read G1 points in parallel, by creating one reader thread, which reads - /// bytes from the file, and fans them out to worker threads (one per - /// cpu) which parse the bytes into G1Affine points. The worker threads - /// then fan in the parsed points to the main thread, which sorts them by - /// their original position in the file to maintain order. Not used anywhere - /// but kept as a reference. - /// - /// # Arguments - /// * `file_path` - The path to the file containing the G1 points - /// * `srs_points_to_load` - The number of points to load from the file - /// * `is_native` - Whether the points are in native arkworks format or not - /// - /// # Returns - /// * `Ok(Vec)` - The G1 points read from the file - /// * `Err(KzgError)` - An error occurred while reading the file - pub fn parallel_read_g1_points_native( - file_path: String, - srs_points_to_load: u32, - is_native: bool, - ) -> Result, KzgError> { - // Channel contains (bytes, position, is_native) tuples. The position is used to - // reorder the points after processing them. - let (sender, receiver) = bounded::<(Vec, usize, bool)>(1000); - - // Spawning the reader thread - let reader_thread = std::thread::spawn( - move || -> Result<(), Box> { - Self::read_file_chunks(&file_path, sender, 32, srs_points_to_load, is_native) - .map_err(|e| -> Box { Box::new(e) }) - }, - ); - - let num_workers = num_cpus::get(); - - let workers: Vec<_> = (0..num_workers) - .map(|_| { - let receiver = receiver.clone(); - std::thread::spawn(move || helpers::process_chunks::(receiver)) - }) - .collect(); - - // Wait for the reader thread to finish - match reader_thread.join() { - Ok(result) => match result { - Ok(_) => {}, - Err(e) => return Err(KzgError::GenericError(e.to_string())), - }, - Err(_) => return Err(KzgError::GenericError("Thread panicked".to_string())), - } - - // Collect and sort results - let mut all_points = Vec::new(); - for worker in workers { - let points = worker.join().expect("Worker thread panicked"); - all_points.extend(points); - } - - // Sort by original position to maintain order - all_points.sort_by_key(|&(_, position)| position); - - Ok(all_points.iter().map(|(point, _)| *point).collect()) - } - - /// read G1 points in parallel - pub fn parallel_read_g1_points( - file_path: String, - srs_points_to_load: u32, - is_native: bool, - ) -> Result, KzgError> { - let (sender, receiver) = bounded::<(Vec, usize, bool)>(1000); - - // Spawning the reader thread - let reader_handle = std::thread::spawn( - move || -> Result<(), Box> { - Self::read_file_chunks(&file_path, sender, 32, srs_points_to_load, is_native) - .map_err(|e| -> Box { Box::new(e) }) - }, - ); - - let num_workers = num_cpus::get(); - - let worker_handles: Vec<_> = (0..num_workers) - .map(|_| { - let receiver = receiver.clone(); - std::thread::spawn(move || helpers::process_chunks::(receiver)) - }) - .collect(); - - // Wait for the reader thread to finish - match reader_handle.join() { - Ok(result) => match result { - Ok(_) => {}, - Err(e) => return Err(KzgError::GenericError(e.to_string())), - }, - Err(_) => return Err(KzgError::GenericError("Thread panicked".to_string())), - } - - // Collect and sort results - let mut all_points = Vec::new(); - for handle in worker_handles { - let points = handle.join().expect("Worker thread panicked"); - all_points.extend(points); - } - - // Sort by original position to maintain order - all_points.sort_by_key(|&(_, position)| position); - - Ok(all_points.iter().map(|(point, _)| *point).collect()) - } - - /// Commit the polynomial with the srs values loaded into [Kzg]. - pub fn commit_eval_form(&self, polynomial: &PolynomialEvalForm) -> Result { - if polynomial.len() > self.g1.len() { - return Err(KzgError::SerializationError( - "polynomial length is not correct".to_string(), - )); - } - - // When the polynomial is in evaluation form, use IFFT to transform monomial srs - // points to lagrange form. - let bases = self.g1_ifft(polynomial.len())?; - - match G1Projective::msm(&bases, polynomial.evaluations()) { - Ok(res) => Ok(res.into_affine()), - Err(err) => Err(KzgError::CommitError(err.to_string())), - } - } - - /// Commit the polynomial with the srs values loaded into [Kzg]. - pub fn commit_coeff_form( - &self, - polynomial: &PolynomialCoeffForm, - ) -> Result { - if polynomial.len() > self.g1.len() { - return Err(KzgError::SerializationError( - "polynomial length is not correct".to_string(), - )); - } - // When the polynomial is in coefficient form, use the original srs points (in - // monomial form). - let bases = self.g1[..polynomial.len()].to_vec(); - - match G1Projective::msm(&bases, polynomial.coeffs()) { - Ok(res) => Ok(res.into_affine()), - Err(err) => Err(KzgError::CommitError(err.to_string())), - } - } - - /// Helper function for `compute_kzg_proof()` and `compute_blob_kzg_proof()` - fn compute_proof_impl( - &self, - polynomial: &PolynomialEvalForm, - z_fr: &Fr, - ) -> Result { - if !self.completed_setup { - return Err(KzgError::GenericError( - "setup is not complete, run the data_setup functions".to_string(), - )); - } - - // Verify polynomial length matches that of the roots of unity - if polynomial.len() != self.expanded_roots_of_unity.len() { - return Err(KzgError::GenericError( - "inconsistent length between blob and root of unities".to_string(), - )); - } - - let eval_fr = polynomial.evaluations(); - // Pre-allocate vector for shifted polynomial p(x) - y - let mut poly_shift: Vec = Vec::with_capacity(eval_fr.len()); - - // Evaluate polynomial at the point z - // This gives us y = p(z) - let y_fr = Self::evaluate_polynomial_in_evaluation_form(polynomial, z_fr, self.srs_order)?; - - // Compute p(x) - y for each evaluation point - // This is the numerator of the quotient polynomial - for fr in eval_fr { - poly_shift.push(*fr - y_fr); - } - - // Compute denominator polynomial (x - z) at each root of unity - let mut denom_poly = Vec::::with_capacity(self.expanded_roots_of_unity.len()); - for root_of_unity in self.expanded_roots_of_unity.iter().take(eval_fr.len()) { - denom_poly.push(*root_of_unity - z_fr); - } - - // Pre-allocate vector for quotient polynomial evaluations - let mut quotient_poly = Vec::::with_capacity(self.expanded_roots_of_unity.len()); - - // Compute quotient polynomial q(x) = (p(x) - y)/(x - z) at each root of unity - for i in 0..self.expanded_roots_of_unity.len() { - if denom_poly[i].is_zero() { - // Special case: when x = z, use L'Hôpital's rule - // Compute the derivative evaluation instead - quotient_poly.push(self.compute_quotient_eval_on_domain(z_fr, eval_fr, &y_fr)); - } else { - // Normal case: direct polynomial division - quotient_poly.push(poly_shift[i].div(denom_poly[i])); - } - } - - let quotient_poly_eval_form = PolynomialEvalForm::new(quotient_poly); - self.commit_eval_form("ient_poly_eval_form) - } - - /// commit to a [Blob], by transforming it into a [PolynomialEvalForm] and - /// then calling [Kzg::commit_eval_form]. - pub fn commit_blob(&self, blob: &Blob) -> Result { - let polynomial = blob.to_polynomial_eval_form(); - self.commit_eval_form(&polynomial) - } - - pub fn compute_proof_with_known_z_fr_index( - &self, - polynomial: &PolynomialEvalForm, - index: u64, - ) -> Result { - // Convert u64 index to usize for array indexing - let usized_index = index.to_usize().ok_or(KzgError::GenericError( - "Index conversion to usize failed".to_string(), - ))?; - - // Get the root of unity at the specified index - let z_fr = self - .get_nth_root_of_unity(usized_index) - .ok_or_else(|| KzgError::GenericError("Root of unity not found".to_string()))?; - - // Compute the KZG proof at the selected root of unity - // This delegates to the main proof computation function - // using our selected evaluation point - self.compute_proof(polynomial, z_fr) - } - - /// Compute a kzg proof from a polynomial in evaluation form. - /// We don't currently support proofs for polynomials in coefficient form, - /// but one can take the FFT of the polynomial in coefficient form to - /// get the polynomial in evaluation form. This is available via the - /// method [PolynomialCoeffForm::to_eval_form]. - /// TODO(anupsv): Accept bytes instead of Fr element. Ref: https://github.com/Layr-Labs/rust-kzg-bn254/issues/29 - pub fn compute_proof( - &self, - polynomial: &PolynomialEvalForm, - z_fr: &Fr, - ) -> Result { - if !self.completed_setup { - return Err(KzgError::GenericError( - "setup is not complete, run one of the setup functions".to_string(), - )); - } - - // Verify that polynomial length matches roots of unity length - if polynomial.len() != self.expanded_roots_of_unity.len() { - return Err(KzgError::GenericError( - "inconsistent length between blob and root of unities".to_string(), - )); - } - - // Call the implementation to compute the actual proof - // This will: - // 1. Evaluate polynomial at z - // 2. Compute quotient polynomial q(x) = (p(x) - p(z)) / (x - z) - // 3. Generate KZG proof as commitment to q(x) - self.compute_proof_impl(polynomial, z_fr) - } - - /// refer to DA for more context - pub fn compute_quotient_eval_on_domain(&self, z_fr: &Fr, eval_fr: &[Fr], value_fr: &Fr) -> Fr { - let mut quotient = Fr::zero(); - let mut fi: Fr = Fr::zero(); - let mut numerator: Fr = Fr::zero(); - let mut denominator: Fr = Fr::zero(); - let mut temp: Fr = Fr::zero(); - - self.expanded_roots_of_unity - .iter() - .enumerate() - .for_each(|(i, omega_i)| { - if *omega_i == *z_fr { - return; - } - fi = eval_fr[i] - value_fr; - numerator = fi * omega_i; - denominator = z_fr - omega_i; - denominator *= z_fr; - temp = numerator.div(denominator); - quotient += temp; - }); - - quotient - } - - /// function to compute the inverse FFT - pub fn g1_ifft(&self, length: usize) -> Result, KzgError> { - // is not power of 2 - if !length.is_power_of_two() { - return Err(KzgError::FFTError( - "length provided is not a power of 2".to_string(), - )); - } - - let points_projective: Vec = self.g1[..length] - .par_iter() - .map(|&p| G1Projective::from(p)) - .collect(); - let ifft_result: Vec<_> = GeneralEvaluationDomain::::new(length) - .ok_or(KzgError::FFTError( - "Could not perform IFFT due to domain consturction error".to_string(), - ))? - .ifft(&points_projective) - .par_iter() - .map(|p| p.into_affine()) - .collect(); - - Ok(ifft_result) - } - - /// TODO(anupsv): Accept bytes instead of Fr element and Affine points. Ref: https://github.com/Layr-Labs/rust-kzg-bn254/issues/30 - pub fn verify_proof( - &self, - commitment: G1Affine, - proof: G1Affine, - value_fr: Fr, - z_fr: Fr, - ) -> Result { - // Get τ*G2 from the trusted setup - // This is the second generator point multiplied by the trusted setup secret - let g2_tau = self.get_g2_tau(); - - // Compute [value]*G1 - // This encrypts the claimed evaluation value as a point in G1 - let value_g1 = (G1Affine::generator() * value_fr).into_affine(); - - // Compute [C - value*G1] - // This represents the difference between the commitment and claimed value - // If the claim is valid, this equals H(X)(X - z) in the polynomial equation - let commit_minus_value = (commitment - value_g1).into_affine(); - - // Compute [z]*G2 - // This encrypts the evaluation point as a point in G2 - let z_g2 = (G2Affine::generator() * z_fr).into_affine(); - - // Compute [τ - z]*G2 - // This represents (X - z) in the polynomial equation - // τ is the secret from the trusted setup representing the variable X - let x_minus_z = (g2_tau - z_g2).into_affine(); - - // Verify the pairing equation: - // e([C - value*G1], G2) = e(proof, [τ - z]*G2) - // This checks if (C - value*G1) = proof * (τ - z) - // which verifies the polynomial quotient relationship - Ok(Self::pairings_verify( - commit_minus_value, // Left side first argument - G2Affine::generator(), // Left side second argument (G2 generator) - proof, // Right side first argument - x_minus_z, // Right side second argument - )) - } - - pub fn get_g2_tau(&self) -> G2Affine { - if self.g1.len() == MAINNET_SRS_G1_SIZE { - G2_TAU_FOR_MAINNET_SRS - } else { - G2_TAU_FOR_TEST_SRS_3000 - } - } - - fn pairings_verify(a1: G1Affine, a2: G2Affine, b1: G1Affine, b2: G2Affine) -> bool { - let neg_b1 = -b1; - let p = [a1, neg_b1]; - let q = [a2, b2]; - let result = Bn254::multi_pairing(p, q); - result.is_zero() - } - - /// TODO(anupsv): Accept bytes instead of Affine points. Ref: https://github.com/Layr-Labs/rust-kzg-bn254/issues/31 - pub fn verify_blob_kzg_proof( - &self, - blob: &Blob, - commitment: &G1Affine, - proof: &G1Affine, - ) -> Result { - // Convert blob to polynomial - let polynomial = blob.to_polynomial_eval_form(); - - // Compute the evaluation challenge for the blob and commitment - let evaluation_challenge = Self::compute_challenge(blob, commitment)?; - - // Evaluate the polynomial in evaluation form - let y = Self::evaluate_polynomial_in_evaluation_form( - &polynomial, - &evaluation_challenge, - self.srs_order, - )?; - - // Verify the KZG proof - self.verify_proof(*commitment, *proof, y, evaluation_challenge) - } - - /// TODO(anupsv): Match 4844 specs w.r.t to the inputs. Ref: https://github.com/Layr-Labs/rust-kzg-bn254/issues/30 - pub fn compute_blob_proof( - &self, - blob: &Blob, - commitment: &G1Affine, - ) -> Result { - // Validate that the commitment is a valid point on the G1 curve - // This prevents potential invalid curve attacks - if !commitment.is_on_curve() || !commitment.is_in_correct_subgroup_assuming_on_curve() { - return Err(KzgError::NotOnCurveError( - "commitment not on curve".to_string(), - )); - } - - // Convert the blob to a polynomial in evaluation form - // This is necessary because KZG proofs work with polynomials - let blob_poly = blob.to_polynomial_eval_form(); - - // Compute the evaluation challenge using Fiat-Shamir heuristic - // This challenge determines the point at which we evaluate the polynomial - let evaluation_challenge = Self::compute_challenge(blob, commitment)?; - - // Compute the actual KZG proof using the polynomial and evaluation point - // This creates a proof that the polynomial evaluates to a specific value at the challenge point - // The proof is a single G1 point that can be used to verify the evaluation - self.compute_proof_impl(&blob_poly, &evaluation_challenge) - } - - /// Maps a byte slice to a field element (`Fr`) using SHA-256 from SHA3 family as the - /// hash function. - /// - /// # Arguments - /// - /// * `msg` - The input byte slice to hash. - /// - /// # Returns - /// - /// * `Fr` - The resulting field element. - fn hash_to_field_element(msg: &[u8]) -> Fr { - // Perform the hash operation. - let msg_digest = Sha256::digest(msg); - let hash_elements = msg_digest.as_slice(); - - let fr_element: Fr = Fr::from_be_bytes_mod_order(hash_elements); - - fr_element - } - - /// Computes the Fiat-Shamir challenge from a blob and its commitment. - /// - /// # Arguments - /// - /// * `blob` - A reference to the `Blob` struct. - /// * `commitment` - A reference to the `G1Affine` commitment. - /// - /// # Returns - /// - /// * `Ok(Fr)` - The resulting field element challenge. - /// * `Err(KzgError)` - If any step fails. - pub fn compute_challenge(blob: &Blob, commitment: &G1Affine) -> Result { - // Convert the blob to a polynomial in evaluation form - // This is needed to process the blob data for the challenge - let blob_poly = blob.to_polynomial_eval_form(); - - // Calculate total size needed for the challenge input buffer: - // - Length of domain separator - // - 8 bytes for number of field elements - // - Size of blob data (number of field elements * bytes per element) - // - Size of compressed G1 point (commitment) - let challenge_input_size = FIAT_SHAMIR_PROTOCOL_DOMAIN.len() - + 8 - + (blob_poly.len() * BYTES_PER_FIELD_ELEMENT) - + SIZE_OF_G1_AFFINE_COMPRESSED; - - // Initialize buffer to store all data that will be hashed - let mut digest_bytes = vec![0; challenge_input_size]; - let mut offset = 0; - - // Step 1: Copy the Fiat-Shamir domain separator - // This provides domain separation for the hash function to prevent - // attacks that try to confuse different protocol messages - digest_bytes[offset..offset + FIAT_SHAMIR_PROTOCOL_DOMAIN.len()] - .copy_from_slice(FIAT_SHAMIR_PROTOCOL_DOMAIN); - offset += FIAT_SHAMIR_PROTOCOL_DOMAIN.len(); - - // Step 2: Copy the number of field elements (blob polynomial length) - // Convert to bytes using the configured endianness - let number_of_field_elements = blob_poly.len().to_be_bytes(); - digest_bytes[offset..offset + 8].copy_from_slice(&number_of_field_elements); - offset += 8; - - // Step 3: Copy the blob data - // Convert polynomial to bytes using helper function - let blob_data = helpers::to_byte_array( - blob_poly.evaluations(), - blob_poly.len() * BYTES_PER_FIELD_ELEMENT, - ); - digest_bytes[offset..offset + blob_data.len()].copy_from_slice(&blob_data); - offset += blob_data.len(); - - // Step 4: Copy the commitment (compressed G1 point) - // Serialize the commitment point in compressed form - let mut commitment_bytes = Vec::with_capacity(SIZE_OF_G1_AFFINE_COMPRESSED); - commitment - .serialize_compressed(&mut commitment_bytes) - .map_err(|_| { - KzgError::SerializationError("Failed to serialize commitment".to_string()) - })?; - digest_bytes[offset..offset + SIZE_OF_G1_AFFINE_COMPRESSED] - .copy_from_slice(&commitment_bytes); - - // Verify that we wrote exactly the amount of bytes we expected - // This helps catch any buffer overflow/underflow bugs - if offset + SIZE_OF_G1_AFFINE_COMPRESSED != challenge_input_size { - return Err(KzgError::InvalidInputLength); - } - - // Hash all the data to generate the challenge field element - // This implements the Fiat-Shamir transform to generate a "random" challenge - Ok(Self::hash_to_field_element(&digest_bytes)) - } - - /// Ref: https://github.com/ethereum/consensus-specs/blob/master/specs/deneb/polynomial-commitments.md#evaluate_polynomial_in_evaluation_form - pub fn evaluate_polynomial_in_evaluation_form( - polynomial: &PolynomialEvalForm, - z: &Fr, - srs_order: u64, - ) -> Result { - // Step 1: Retrieve the length of the padded blob - let blob_size = polynomial.len_underlying_blob_bytes(); - - // Step 2: Calculate roots of unity for the given blob size and SRS order - let roots_of_unity = Self::calculate_roots_of_unity(blob_size as u64, srs_order)?; - - // Step 3: Ensure the polynomial length matches the domain length - if polynomial.len() != roots_of_unity.len() { - return Err(KzgError::InvalidInputLength); - } - - let width = polynomial.len(); - - // Step 4: Compute inverse_width = 1 / width - let inverse_width = Fr::from(width as u64) - .inverse() - .ok_or(KzgError::InvalidDenominator)?; - - // Step 5: Check if `z` is in the domain - if let Some(index) = roots_of_unity.iter().position(|&domain_i| domain_i == *z) { - return polynomial - .get_evalualtion(index) - .cloned() - .ok_or(KzgError::GenericError( - "Polynomial element missing at the found index.".to_string(), - )); - } - - // Step 6: Use the barycentric formula to compute the evaluation - let sum = polynomial - .evaluations() - .iter() - .zip(roots_of_unity.iter()) - .map(|(f_i, &domain_i)| { - let a = *f_i * domain_i; - let b = *z - domain_i; - // Since `z` is not in the domain, `b` should never be zero - a / b - }) - .fold(Fr::zero(), |acc, val| acc + val); - - // Step 7: Compute r = z^width - 1 - let r = z.pow([width as u64]) - Fr::one(); - - // Step 8: Compute f(z) = (z^width - 1) / width * sum - let f_z = sum * r * inverse_width; - - Ok(f_z) - } - - /// A helper function for the `verify_blob_kzg_proof_batch` function. - fn compute_challenges_and_evaluate_polynomial( - blobs: &[Blob], - commitments: &[G1Affine], - srs_order: u64, - ) -> Result<(Vec, Vec), KzgError> { - // Pre-allocate vectors to store: - // - evaluation_challenges: Points where polynomials will be evaluated - // - ys: Results of polynomial evaluations at challenge points - let mut evaluation_challenges = Vec::with_capacity(blobs.len()); - let mut ys = Vec::with_capacity(blobs.len()); - - // Process each blob sequentially - // TODO: Potential optimizations: - // 1. Cache roots of unity calculations across iterations - // 2. Parallelize processing for large numbers of blobs - // 3. Batch polynomial conversions if possible - for i in 0..blobs.len() { - // Step 1: Convert blob to polynomial form - // This is necessary because we need to evaluate the polynomial - let polynomial = blobs[i].to_polynomial_eval_form(); - - // Step 2: Generate Fiat-Shamir challenge - // This creates a "random" evaluation point based on the blob and commitment - // The challenge is deterministic but unpredictable, making the proof non-interactive - let evaluation_challenge = Self::compute_challenge(&blobs[i], &commitments[i])?; - - // Step 3: Evaluate the polynomial at the challenge point - // This uses the evaluation form for efficient computation - // The srs_order parameter ensures compatibility with the trusted setup - let y = Self::evaluate_polynomial_in_evaluation_form( - &polynomial, - &evaluation_challenge, - srs_order, - )?; - - // Store both: - // - The challenge point (where we evaluated) - // - The evaluation result (what the polynomial equals at that point) - evaluation_challenges.push(evaluation_challenge); - ys.push(y); - } - - // Return tuple of: - // 1. Vector of evaluation points (challenges) - // 2. Vector of polynomial evaluations at those points - // These will be used in the KZG proof verification process - Ok((evaluation_challenges, ys)) - } - - /// Ref: https://github.com/ethereum/consensus-specs/blob/master/specs/deneb/polynomial-commitments.md#verify_blob_kzg_proof_batch - pub fn verify_blob_kzg_proof_batch( - &self, - blobs: &Vec, - commitments: &Vec, - proofs: &Vec, - ) -> Result { - // First validation check: Ensure all input vectors have matching lengths - // This is critical for batch verification to work correctly - if !(commitments.len() == blobs.len() && proofs.len() == blobs.len()) { - return Err(KzgError::GenericError( - "length's of the input are not the same".to_owned(), - )); - } - - // Validate that all commitments are valid points on the G1 curve - // Using parallel iterator (par_iter) for better performance on large batches - // This prevents invalid curve attacks - if commitments.iter().any(|commitment| { - commitment == &G1Affine::identity() - || !commitment.is_on_curve() - || !commitment.is_in_correct_subgroup_assuming_on_curve() - }) { - return Err(KzgError::NotOnCurveError( - "commitment not on curve".to_owned(), - )); - } - - // Validate that all proofs are valid points on the G1 curve - // Using parallel iterator for efficiency - if proofs.iter().any(|proof| { - proof == &G1Affine::identity() - || !proof.is_on_curve() - || !proof.is_in_correct_subgroup_assuming_on_curve() - }) { - return Err(KzgError::NotOnCurveError("proof not on curve".to_owned())); - } - - // Compute evaluation challenges and evaluate polynomials at those points - // This step: - // 1. Generates random evaluation points for each blob - // 2. Evaluates each blob's polynomial at its corresponding point - let (evaluation_challenges, ys) = - Self::compute_challenges_and_evaluate_polynomial(blobs, commitments, self.srs_order)?; - - // Convert each blob to its polynomial evaluation form and get the length of number of field elements - // This length value is needed for computing the challenge - let blobs_as_field_elements_length: Vec = blobs - .iter() - .map(|blob| blob.to_polynomial_eval_form().evaluations().len() as u64) - .collect(); - - // Perform the actual batch verification using the computed values: - // - commitments: Original KZG commitments - // - evaluation_challenges: Points where polynomials are evaluated - // - ys: Values of polynomials at evaluation points - // - proofs: KZG proofs for each evaluation - // - blobs_as_field_elements_length: Length of each blob's polynomial - self.verify_kzg_proof_batch( - commitments, - &evaluation_challenges, - &ys, - proofs, - &blobs_as_field_elements_length, - ) - } - - /// Ref: https://github.com/ethereum/consensus-specs/blob/master/specs/deneb/polynomial-commitments.md#verify_kzg_proof_batch - /// A helper function to the `helpers::compute_powers` function. This does the below reference code from the 4844 spec. - /// Ref: `# Append all inputs to the transcript before we hash - /// for commitment, z, y, proof in zip(commitments, zs, ys, proofs): - /// data += commitment + bls_field_to_bytes(z) + bls_field_to_bytes(y) + proof`` - fn compute_r_powers( - &self, - commitments: &[G1Affine], - zs: &[Fr], - ys: &[Fr], - proofs: &[G1Affine], - blobs_as_field_elements_length: &[u64], - ) -> Result, KzgError> { - // Get the number of commitments/proofs we're processing - let n = commitments.len(); - - // Initial data length includes: - // - 24 bytes for domain separator - // - 8 bytes for number of field elements per blob - // - 8 bytes for number of commitments - let mut initial_data_length: usize = 40; - - // Calculate total input size: - // - initial_data_length (40 bytes) - // - For the number of commitments/zs/ys/proofs/blobs_as_field_elements_length (which are all the same length): - // * BYTES_PER_FIELD_ELEMENT for commitment - // * 2 * BYTES_PER_FIELD_ELEMENT for z and y values - // * BYTES_PER_FIELD_ELEMENT for proof - // * 8 bytes for blob length - let input_size = initial_data_length - + n * (BYTES_PER_FIELD_ELEMENT - + 2 * BYTES_PER_FIELD_ELEMENT - + BYTES_PER_FIELD_ELEMENT - + 8); - - // Initialize buffer for data to be hashed - let mut data_to_be_hashed: Vec = vec![0; input_size]; - - // Copy domain separator to start of buffer - // This provides domain separation for the hash function - data_to_be_hashed[0..24].copy_from_slice(RANDOM_CHALLENGE_KZG_BATCH_DOMAIN); - - // Convert number of commitments to bytes and copy to buffer - // Uses configured endianness (Big or Little) - let n_bytes: [u8; 8] = n.to_be_bytes(); - data_to_be_hashed[32..40].copy_from_slice(&n_bytes); - - let target_slice = &mut data_to_be_hashed[24..24 + (n * 8)]; - for (chunk, &length) in target_slice - .chunks_mut(8) - .zip(blobs_as_field_elements_length) - { - chunk.copy_from_slice(&length.to_be_bytes()); - } - initial_data_length += n * 8; - - // Process each commitment, proof, and evaluation point/value - for i in 0..n { - // Serialize and copy commitment - let mut v = vec![]; - - // TODO(anupsv): Move serialization to helper function. Ref: https://github.com/Layr-Labs/rust-kzg-bn254/issues/32 - commitments[i].serialize_compressed(&mut v).map_err(|_| { - KzgError::SerializationError("Failed to serialize commitment".to_string()) - })?; - data_to_be_hashed[initial_data_length..(v.len() + initial_data_length)] - .copy_from_slice(&v[..]); - initial_data_length += BYTES_PER_FIELD_ELEMENT; - - // Convert z point to bytes and copy - let v = zs[i].into_bigint().to_bytes_be(); - data_to_be_hashed[initial_data_length..(v.len() + initial_data_length)] - .copy_from_slice(&v[..]); - initial_data_length += BYTES_PER_FIELD_ELEMENT; - - // Convert y value to bytes and copy - let v = ys[i].into_bigint().to_bytes_be(); - data_to_be_hashed[initial_data_length..(v.len() + initial_data_length)] - .copy_from_slice(&v[..]); - initial_data_length += BYTES_PER_FIELD_ELEMENT; - - // Serialize and copy proof - let mut proof_bytes = vec![]; - proofs[i] - .serialize_compressed(&mut proof_bytes) - .map_err(|_| { - KzgError::SerializationError("Failed to serialize proof".to_string()) - })?; - data_to_be_hashed[initial_data_length..(proof_bytes.len() + initial_data_length)] - .copy_from_slice(&proof_bytes[..]); - initial_data_length += BYTES_PER_FIELD_ELEMENT; - } - - // Verify we filled the entire buffer - // This ensures we didn't make any buffer overflow or underflow errors - if initial_data_length != input_size { - return Err(KzgError::InvalidInputLength); - } - - // Hash all the data to get our random challenge - let r = Self::hash_to_field_element(&data_to_be_hashed); - - // Compute powers of the random challenge: [r^0, r^1, r^2, ..., r^(n-1)] - Ok(helpers::compute_powers(&r, n)) - } - - /// Verifies multiple KZG proofs efficiently. - /// Ref: https://github.com/ethereum/consensus-specs/blob/master/specs/deneb/polynomial-commitments.md#verify_kzg_proof_batch - /// # Arguments - /// - /// * `commitments` - A slice of `G1Affine` commitments. - /// * `zs` - A slice of `Fr` elements representing z values. - /// * `ys` - A slice of `Fr` elements representing y values. - /// * `proofs` - A slice of `G1Affine` proofs. - /// - /// # Returns - /// - /// * `Ok(true)` if all proofs are valid. - /// * `Ok(false)` if any proof is invalid. - /// * `Err(KzgError)` if an error occurs during verification. - /// - fn verify_kzg_proof_batch( - &self, - commitments: &[G1Affine], - zs: &[Fr], - ys: &[Fr], - proofs: &[G1Affine], - blobs_as_field_elements_length: &[u64], - ) -> Result { - // Verify that all input arrays have the same length - // This is crucial for batch verification to work correctly - if !(commitments.len() == zs.len() && zs.len() == ys.len() && ys.len() == proofs.len()) { - return Err(KzgError::GenericError( - "length's of the input are not the same".to_owned(), - )); - } - - // Check that all commitments are valid points on the G1 curve - // This prevents invalid curve attacks - if !commitments - .iter() - .all(|commitment| is_on_curve_g1(&G1Projective::from(*commitment))) - { - return Err(KzgError::NotOnCurveError( - "commitment not on curve".to_owned(), - )); - } - - // Check that all proofs are valid points on the G1 curve - if !proofs - .iter() - .all(|proof| is_on_curve_g1(&G1Projective::from(*proof))) - { - return Err(KzgError::NotOnCurveError("proof".to_owned())); - } - - let n = commitments.len(); - - // Initialize vectors to store: - // c_minus_y: [C_i - [y_i]] (commitment minus the evaluation point encrypted) - // r_times_z: [r^i * z_i] (powers of random challenge times evaluation points) - let mut c_minus_y: Vec = Vec::with_capacity(n); - let mut r_times_z: Vec = Vec::with_capacity(n); - - // Compute powers of the random challenge: [r^0, r^1, r^2, ..., r^(n-1)] - let r_powers = - self.compute_r_powers(commitments, zs, ys, proofs, blobs_as_field_elements_length)?; - - // Compute Σ(r^i * proof_i) - let proof_lincomb = helpers::g1_lincomb(proofs, &r_powers)?; - - // For each proof i: - // 1. Compute C_i - [y_i] - // 2. Compute r^i * z_i - for i in 0..n { - // Encrypt y_i as a point on G1 - let ys_encrypted = G1Affine::generator() * ys[i]; - // Compute C_i - [y_i] and convert to affine coordinates - c_minus_y.push((commitments[i] - ys_encrypted).into_affine()); - // Compute r^i * z_i - r_times_z.push(r_powers[i] * zs[i]); - } - - // Compute: - // proof_z_lincomb = Σ(r^i * z_i * proof_i) - // c_minus_y_lincomb = Σ(r^i * (C_i - [y_i])) - let proof_z_lincomb = helpers::g1_lincomb(proofs, &r_times_z)?; - let c_minus_y_lincomb = helpers::g1_lincomb(&c_minus_y, &r_powers)?; - - // Compute right-hand side of the pairing equation - let rhs_g1 = c_minus_y_lincomb + proof_z_lincomb; - - // Verify the pairing equation: - // e(Σ(r^i * proof_i), [τ]) = e(Σ(r^i * (C_i - [y_i])) + Σ(r^i * z_i * proof_i), [1]) - // A test exists to check if g2_tau is on curve, so we don't need to check it here. - let result = Self::pairings_verify( - proof_lincomb, - self.get_g2_tau(), - rhs_g1.into(), - G2Affine::generator(), - ); - Ok(result) - } -} diff --git a/verifier/Cargo.toml b/verifier/Cargo.toml new file mode 100644 index 0000000..f3a7ea7 --- /dev/null +++ b/verifier/Cargo.toml @@ -0,0 +1,26 @@ +[package] +name = "rust-kzg-bn254-verifier" +version = "0.1.0" +license-file.workspace = true +edition.workspace = true +rust-version.workspace = true +repository.workspace = true + +[dependencies] +rust-kzg-bn254-primitives = { path = "../primitives", version = "0.1.0" } +ark-bn254 = "0.5.0" +ark-ec = { version = "0.5.0", features = ["parallel"] } +ark-ff = { version = "0.5.0", features = ["parallel"] } +ark-serialize = "0.5.0" + +[dev-dependencies] +rand = "0.8.5" +criterion = "0.5" +lazy_static = "1.5" +ark-std = { version = "0.5.0", features = ["parallel"] } +rust-kzg-bn254-prover = { path = "../prover", version = "0.1.0" } + +[[bench]] +name = "bench_kzg_verify" +harness = false +path = "benches/bench_kzg_verify.rs" diff --git a/verifier/README.md b/verifier/README.md new file mode 100644 index 0000000..b7a6194 --- /dev/null +++ b/verifier/README.md @@ -0,0 +1,9 @@ +# rust-kzg-bn254-batch-verification + +[![Crate](https://img.shields.io/crates/v/rust-kzg-bn254.svg)](https://crates.io/crates/rust-kzg-bn254-batch-verification) + +This library offers batch verification functions for KZG. + +## Warning & Disclaimer + +This code is unaudited and under construction. This is experimental software and is provided on an "as is" and "as available" basis and may not work at all. It should not be used in production. diff --git a/benches/bench_kzg_verify.rs b/verifier/benches/bench_kzg_verify.rs similarity index 77% rename from benches/bench_kzg_verify.rs rename to verifier/benches/bench_kzg_verify.rs index 74adfbd..0c8a351 100644 --- a/benches/bench_kzg_verify.rs +++ b/verifier/benches/bench_kzg_verify.rs @@ -1,14 +1,17 @@ use criterion::{criterion_group, criterion_main, Criterion}; use rand::Rng; -use rust_kzg_bn254::{blob::Blob, kzg::KZG}; +use rust_kzg_bn254_primitives::blob::Blob; +use rust_kzg_bn254_prover::{kzg::KZG, srs::SRS}; +use rust_kzg_bn254_verifier::verify::verify_proof; use std::time::Duration; fn bench_kzg_verify(c: &mut Criterion) { let mut rng = rand::thread_rng(); - let mut kzg = KZG::setup( - "tests/test-files/mainnet-data/g1.131072.point", + let mut kzg = KZG::new(); + let srs = SRS::new( + "../prover/tests/test-files/mainnet-data/g1.32mb.point", 268435456, - 131072, + 524288, ) .unwrap(); @@ -20,13 +23,13 @@ fn bench_kzg_verify(c: &mut Criterion) { .unwrap(); let index = rand::thread_rng().gen_range(0..input_poly.len_underlying_blob_field_elements()); - let commitment = kzg.commit_eval_form(&input_poly).unwrap(); + let commitment = kzg.commit_eval_form(&input_poly, &srs).unwrap(); let proof = kzg - .compute_proof_with_known_z_fr_index(&input_poly, index.try_into().unwrap()) + .compute_proof_with_known_z_fr_index(&input_poly, index.try_into().unwrap(), &srs) .unwrap(); let value_fr = input_poly.get_evalualtion(index).unwrap(); let z_fr = kzg.get_nth_root_of_unity(index).unwrap(); - b.iter(|| kzg.verify_proof(commitment, proof, *value_fr, *z_fr)); + b.iter(|| verify_proof(commitment, proof, *value_fr, *z_fr)); }); c.bench_function("bench_kzg_verify_30000", |b| { @@ -37,13 +40,13 @@ fn bench_kzg_verify(c: &mut Criterion) { .unwrap(); let index = rand::thread_rng().gen_range(0..input_poly.len_underlying_blob_field_elements()); - let commitment = kzg.commit_eval_form(&input_poly).unwrap(); + let commitment = kzg.commit_eval_form(&input_poly, &srs).unwrap(); let proof = kzg - .compute_proof_with_known_z_fr_index(&input_poly, index.try_into().unwrap()) + .compute_proof_with_known_z_fr_index(&input_poly, index.try_into().unwrap(), &srs) .unwrap(); let value_fr = input_poly.get_evalualtion(index).unwrap(); let z_fr = kzg.get_nth_root_of_unity(index).unwrap(); - b.iter(|| kzg.verify_proof(commitment, proof, *value_fr, *z_fr)); + b.iter(|| verify_proof(commitment, proof, *value_fr, *z_fr)); }); c.bench_function("bench_kzg_verify_50000", |b| { @@ -54,13 +57,13 @@ fn bench_kzg_verify(c: &mut Criterion) { .unwrap(); let index = rand::thread_rng().gen_range(0..input_poly.len_underlying_blob_field_elements()); - let commitment = kzg.commit_eval_form(&input_poly).unwrap(); + let commitment = kzg.commit_eval_form(&input_poly, &srs).unwrap(); let proof = kzg - .compute_proof_with_known_z_fr_index(&input_poly, index.try_into().unwrap()) + .compute_proof_with_known_z_fr_index(&input_poly, index.try_into().unwrap(), &srs) .unwrap(); let value_fr = input_poly.get_evalualtion(index).unwrap(); let z_fr = kzg.get_nth_root_of_unity(index).unwrap(); - b.iter(|| kzg.verify_proof(commitment, proof, *value_fr, *z_fr)); + b.iter(|| verify_proof(commitment, proof, *value_fr, *z_fr)); }); } diff --git a/verifier/src/batch.rs b/verifier/src/batch.rs new file mode 100644 index 0000000..ab9b93f --- /dev/null +++ b/verifier/src/batch.rs @@ -0,0 +1,272 @@ +use ark_bn254::{Fr, G1Affine, G1Projective, G2Affine, G2Projective}; +use ark_ec::{AffineRepr, CurveGroup}; +use ark_ff::{BigInteger, PrimeField}; +use ark_serialize::CanonicalSerialize; +use rust_kzg_bn254_primitives::{ + blob::Blob, + consts::{BYTES_PER_FIELD_ELEMENT, G2_TAU, RANDOM_CHALLENGE_KZG_BATCH_DOMAIN}, + errors::KzgError, + helpers::{self, is_on_curve_g1}, +}; + +/// Ref: https://github.com/ethereum/consensus-specs/blob/master/specs/deneb/polynomial-commitments.md#verify_blob_kzg_proof_batch +pub fn verify_blob_kzg_proof_batch( + blobs: &[Blob], + commitments: &[G1Affine], + proofs: &[G1Affine], +) -> Result { + // First validation check: Ensure all input vectors have matching lengths + // This is critical for batch verification to work correctly + if !(commitments.len() == blobs.len() && proofs.len() == blobs.len()) { + return Err(KzgError::GenericError( + "length's of the input are not the same".to_owned(), + )); + } + + // Validate that all commitments are valid points on the G1 curve + // Using parallel iterator (par_iter) for better performance on large batches + // This prevents invalid curve attacks + if commitments.iter().any(|commitment| { + commitment == &G1Affine::identity() + || !commitment.is_on_curve() + || !commitment.is_in_correct_subgroup_assuming_on_curve() + }) { + return Err(KzgError::NotOnCurveError( + "commitment not on curve".to_owned(), + )); + } + + // Validate that all proofs are valid points on the G1 curve + // Using parallel iterator for efficiency + if proofs.iter().any(|proof| { + proof == &G1Affine::identity() + || !proof.is_on_curve() + || !proof.is_in_correct_subgroup_assuming_on_curve() + }) { + return Err(KzgError::NotOnCurveError("proof not on curve".to_owned())); + } + + // Compute evaluation challenges and evaluate polynomials at those points + // This step: + // 1. Generates random evaluation points for each blob + // 2. Evaluates each blob's polynomial at its corresponding point + let (evaluation_challenges, ys) = + helpers::compute_challenges_and_evaluate_polynomial(blobs, commitments)?; + + // Convert each blob to its polynomial evaluation form and get the length of number of field elements + // This length value is needed for computing the challenge + let blobs_as_field_elements_length: Vec = blobs + .iter() + .map(|blob| blob.to_polynomial_eval_form().evaluations().len() as u64) + .collect(); + + // Perform the actual batch verification using the computed values: + // - commitments: Original KZG commitments + // - evaluation_challenges: Points where polynomials are evaluated + // - ys: Values of polynomials at evaluation points + // - proofs: KZG proofs for each evaluation + // - blobs_as_field_elements_length: Length of each blob's polynomial + verify_kzg_proof_batch( + commitments, + &evaluation_challenges, + &ys, + proofs, + &blobs_as_field_elements_length, + ) +} + +/// Ref: https://github.com/ethereum/consensus-specs/blob/master/specs/deneb/polynomial-commitments.md#verify_kzg_proof_batch +/// A helper function to the `helpers::compute_powers` function. This does the below reference code from the 4844 spec. +/// Ref: `# Append all inputs to the transcript before we hash +/// for commitment, z, y, proof in zip(commitments, zs, ys, proofs): +/// data += commitment + bls_field_to_bytes(z) + bls_field_to_bytes(y) + proof`` +fn compute_r_powers( + commitments: &[G1Affine], + zs: &[Fr], + ys: &[Fr], + proofs: &[G1Affine], + blobs_as_field_elements_length: &[u64], +) -> Result, KzgError> { + // Get the number of commitments/proofs we're processing + let n = commitments.len(); + + // Initial data length includes: + // - 24 bytes for domain separator + // - 8 bytes for number of field elements per blob + // - 8 bytes for number of commitments + let mut initial_data_length: usize = 40; + + // Calculate total input size: + // - initial_data_length (40 bytes) + // - For the number of commitments/zs/ys/proofs/blobs_as_field_elements_length (which are all the same length): + // * BYTES_PER_FIELD_ELEMENT for commitment + // * 2 * BYTES_PER_FIELD_ELEMENT for z and y values + // * BYTES_PER_FIELD_ELEMENT for proof + // * 8 bytes for blob length + let input_size = initial_data_length + + n * (BYTES_PER_FIELD_ELEMENT + 2 * BYTES_PER_FIELD_ELEMENT + BYTES_PER_FIELD_ELEMENT + 8); + + // Initialize buffer for data to be hashed + let mut data_to_be_hashed: Vec = vec![0; input_size]; + + // Copy domain separator to start of buffer + // This provides domain separation for the hash function + data_to_be_hashed[0..24].copy_from_slice(RANDOM_CHALLENGE_KZG_BATCH_DOMAIN); + + // Convert number of commitments to bytes and copy to buffer + // Uses configured endianness (Big or Little) + let n_bytes: [u8; 8] = n.to_be_bytes(); + data_to_be_hashed[32..40].copy_from_slice(&n_bytes); + + let target_slice = &mut data_to_be_hashed[24..24 + (n * 8)]; + for (chunk, &length) in target_slice + .chunks_mut(8) + .zip(blobs_as_field_elements_length) + { + chunk.copy_from_slice(&length.to_be_bytes()); + } + initial_data_length += n * 8; + + // Process each commitment, proof, and evaluation point/value + for i in 0..n { + // Serialize and copy commitment + let mut v = vec![]; + + // TODO(anupsv): Move serialization to helper function. Ref: https://github.com/Layr-Labs/rust-kzg-bn254/issues/32 + commitments[i].serialize_compressed(&mut v).map_err(|_| { + KzgError::SerializationError("Failed to serialize commitment".to_string()) + })?; + data_to_be_hashed[initial_data_length..(v.len() + initial_data_length)] + .copy_from_slice(&v[..]); + initial_data_length += BYTES_PER_FIELD_ELEMENT; + + // Convert z point to bytes and copy + let v = zs[i].into_bigint().to_bytes_be(); + data_to_be_hashed[initial_data_length..(v.len() + initial_data_length)] + .copy_from_slice(&v[..]); + initial_data_length += BYTES_PER_FIELD_ELEMENT; + + // Convert y value to bytes and copy + let v = ys[i].into_bigint().to_bytes_be(); + data_to_be_hashed[initial_data_length..(v.len() + initial_data_length)] + .copy_from_slice(&v[..]); + initial_data_length += BYTES_PER_FIELD_ELEMENT; + + // Serialize and copy proof + let mut proof_bytes = vec![]; + proofs[i] + .serialize_compressed(&mut proof_bytes) + .map_err(|_| KzgError::SerializationError("Failed to serialize proof".to_string()))?; + data_to_be_hashed[initial_data_length..(proof_bytes.len() + initial_data_length)] + .copy_from_slice(&proof_bytes[..]); + initial_data_length += BYTES_PER_FIELD_ELEMENT; + } + + // Verify we filled the entire buffer + // This ensures we didn't make any buffer overflow or underflow errors + if initial_data_length != input_size { + return Err(KzgError::InvalidInputLength); + } + + // Hash all the data to get our random challenge + let r = helpers::hash_to_field_element(&data_to_be_hashed); + + // Compute powers of the random challenge: [r^0, r^1, r^2, ..., r^(n-1)] + Ok(helpers::compute_powers(&r, n)) +} + +/// Verifies multiple KZG proofs efficiently. +/// Ref: https://github.com/ethereum/consensus-specs/blob/master/specs/deneb/polynomial-commitments.md#verify_kzg_proof_batch +/// # Arguments +/// +/// * `commitments` - A slice of `G1Affine` commitments. +/// * `zs` - A slice of `Fr` elements representing z values. +/// * `ys` - A slice of `Fr` elements representing y values. +/// * `proofs` - A slice of `G1Affine` proofs. +/// +/// # Returns +/// +/// * `Ok(true)` if all proofs are valid. +/// * `Ok(false)` if any proof is invalid. +/// * `Err(KzgError)` if an error occurs during verification. +/// +fn verify_kzg_proof_batch( + commitments: &[G1Affine], + zs: &[Fr], + ys: &[Fr], + proofs: &[G1Affine], + blobs_as_field_elements_length: &[u64], +) -> Result { + // Verify that all input arrays have the same length + // This is crucial for batch verification to work correctly + if !(commitments.len() == zs.len() && zs.len() == ys.len() && ys.len() == proofs.len()) { + return Err(KzgError::GenericError( + "length's of the input are not the same".to_owned(), + )); + } + + // Check that all commitments are valid points on the G1 curve + // This prevents invalid curve attacks + if !commitments + .iter() + .all(|commitment| is_on_curve_g1(&G1Projective::from(*commitment))) + { + return Err(KzgError::NotOnCurveError( + "commitment not on curve".to_owned(), + )); + } + + // Check that all proofs are valid points on the G1 curve + if !proofs + .iter() + .all(|proof| is_on_curve_g1(&G1Projective::from(*proof))) + { + return Err(KzgError::NotOnCurveError("proof".to_owned())); + } + + // Verify that the trusted setup point τ*G2 is on the G2 curve + if !helpers::is_on_curve_g2(&G2Projective::from(G2_TAU)) { + return Err(KzgError::NotOnCurveError("g2 tau".to_owned())); + } + + let n = commitments.len(); + + // Initialize vectors to store: + // c_minus_y: [C_i - [y_i]] (commitment minus the evaluation point encrypted) + // r_times_z: [r^i * z_i] (powers of random challenge times evaluation points) + let mut c_minus_y: Vec = Vec::with_capacity(n); + let mut r_times_z: Vec = Vec::with_capacity(n); + + // Compute powers of the random challenge: [r^0, r^1, r^2, ..., r^(n-1)] + let r_powers = compute_r_powers(commitments, zs, ys, proofs, blobs_as_field_elements_length)?; + + // Compute Σ(r^i * proof_i) + let proof_lincomb = helpers::g1_lincomb(proofs, &r_powers)?; + + // For each proof i: + // 1. Compute C_i - [y_i] + // 2. Compute r^i * z_i + for i in 0..n { + // Encrypt y_i as a point on G1 + let ys_encrypted = G1Affine::generator() * ys[i]; + // Compute C_i - [y_i] and convert to affine coordinates + c_minus_y.push((commitments[i] - ys_encrypted).into_affine()); + // Compute r^i * z_i + r_times_z.push(r_powers[i] * zs[i]); + } + + // Compute: + // proof_z_lincomb = Σ(r^i * z_i * proof_i) + // c_minus_y_lincomb = Σ(r^i * (C_i - [y_i])) + let proof_z_lincomb = helpers::g1_lincomb(proofs, &r_times_z)?; + let c_minus_y_lincomb = helpers::g1_lincomb(&c_minus_y, &r_powers)?; + + // Compute right-hand side of the pairing equation + let rhs_g1 = c_minus_y_lincomb + proof_z_lincomb; + + // Verify the pairing equation: + // e(Σ(r^i * proof_i), [τ]) = e(Σ(r^i * (C_i - [y_i])) + Σ(r^i * z_i * proof_i), [1]) + let result = + helpers::pairings_verify(proof_lincomb, G2_TAU, rhs_g1.into(), G2Affine::generator()); + Ok(result) +} diff --git a/verifier/src/lib.rs b/verifier/src/lib.rs new file mode 100644 index 0000000..04c79a0 --- /dev/null +++ b/verifier/src/lib.rs @@ -0,0 +1,2 @@ +pub mod batch; +pub mod verify; diff --git a/verifier/src/verify.rs b/verifier/src/verify.rs new file mode 100644 index 0000000..c124442 --- /dev/null +++ b/verifier/src/verify.rs @@ -0,0 +1,85 @@ +use ark_bn254::{Fr, G1Affine, G2Affine}; +use ark_ec::{AffineRepr, CurveGroup}; +use rust_kzg_bn254_primitives::{blob::Blob, consts::G2_TAU, errors::KzgError, helpers}; + +/// Ref: https://github.com/ethereum/consensus-specs/blob/master/specs/deneb/polynomial-commitments.md#verify_proof +/// TODO(anupsv): Accept bytes instead of Fr element and Affine points. Ref: https://github.com/Layr-Labs/rust-kzg-bn254/issues/30 +pub fn verify_proof( + commitment: G1Affine, + proof: G1Affine, + value_fr: Fr, + z_fr: Fr, +) -> Result { + if !commitment.is_on_curve() || !commitment.is_in_correct_subgroup_assuming_on_curve() { + return Err(KzgError::NotOnCurveError( + "commitment not on curve".to_string(), + )); + } + + if !proof.is_on_curve() || !proof.is_in_correct_subgroup_assuming_on_curve() { + return Err(KzgError::NotOnCurveError("proof not on curve".to_string())); + } + + // Get τ*G2 from the trusted setup + // This is the second generator point multiplied by the trusted setup secret + let g2_tau = G2_TAU; + + // Compute [value]*G1 + // This encrypts the claimed evaluation value as a point in G1 + let value_g1 = (G1Affine::generator() * value_fr).into_affine(); + + // Compute [C - value*G1] + // This represents the difference between the commitment and claimed value + // If the claim is valid, this equals H(X)(X - z) in the polynomial equation + let commit_minus_value = (commitment - value_g1).into_affine(); + + // Compute [z]*G2 + // This encrypts the evaluation point as a point in G2 + let z_g2 = (G2Affine::generator() * z_fr).into_affine(); + + // Compute [τ - z]*G2 + // This represents (X - z) in the polynomial equation + // τ is the secret from the trusted setup representing the variable X + let x_minus_z = (g2_tau - z_g2).into_affine(); + + // Verify the pairing equation: + // e([C - value*G1], G2) = e(proof, [τ - z]*G2) + // This checks if (C - value*G1) = proof * (τ - z) + // which verifies the polynomial quotient relationship + Ok(helpers::pairings_verify( + commit_minus_value, // Left side first argument + G2Affine::generator(), // Left side second argument (G2 generator) + proof, // Right side first argument + x_minus_z, // Right side second argument + )) +} + +/// Ref: https://github.com/ethereum/consensus-specs/blob/master/specs/deneb/polynomial-commitments.md#verify_blob_kzg_proof +/// TODO(anupsv): Accept bytes instead of Affine points. Ref: https://github.com/Layr-Labs/rust-kzg-bn254/issues/31 +pub fn verify_blob_kzg_proof( + blob: &Blob, + commitment: &G1Affine, + proof: &G1Affine, +) -> Result { + if !commitment.is_on_curve() || !commitment.is_in_correct_subgroup_assuming_on_curve() { + return Err(KzgError::NotOnCurveError( + "commitment not on curve".to_string(), + )); + } + + if !proof.is_on_curve() || !proof.is_in_correct_subgroup_assuming_on_curve() { + return Err(KzgError::NotOnCurveError("proof not on curve".to_string())); + } + + // Convert blob to polynomial + let polynomial = blob.to_polynomial_eval_form(); + + // Compute the evaluation challenge for the blob and commitment + let evaluation_challenge = helpers::compute_challenge(blob, commitment)?; + + // Evaluate the polynomial in evaluation form + let y = helpers::evaluate_polynomial_in_evaluation_form(&polynomial, &evaluation_challenge)?; + + // Verify the KZG proof + self::verify_proof(*commitment, *proof, y, evaluation_challenge) +} diff --git a/tests/kzg_test.rs b/verifier/tests/tests.rs similarity index 51% rename from tests/kzg_test.rs rename to verifier/tests/tests.rs index ca46c99..51410a8 100644 --- a/tests/kzg_test.rs +++ b/verifier/tests/tests.rs @@ -1,132 +1,25 @@ #[cfg(test)] mod tests { - use ark_bn254::{Fq, Fr, G1Affine, G2Projective}; + use ark_bn254::{Fq, G1Affine}; use ark_ec::AffineRepr; use ark_ff::UniformRand; use lazy_static::lazy_static; use rand::Rng; - use rust_kzg_bn254::{ - blob::Blob, consts::PRIMITIVE_ROOTS_OF_UNITY, errors::KzgError, helpers, kzg::KZG, - polynomial::PolynomialCoeffForm, - }; - use std::{env, fs::File, io::BufReader}; const GETTYSBURG_ADDRESS_BYTES: &[u8] = "Fourscore and seven years ago our fathers brought forth, on this continent, a new nation, conceived in liberty, and dedicated to the proposition that all men are created equal. Now we are engaged in a great civil war, testing whether that nation, or any nation so conceived, and so dedicated, can long endure. We are met on a great battle-field of that war. We have come to dedicate a portion of that field, as a final resting-place for those who here gave their lives, that that nation might live. It is altogether fitting and proper that we should do this. But, in a larger sense, we cannot dedicate, we cannot consecrate—we cannot hallow—this ground. The brave men, living and dead, who struggled here, have consecrated it far above our poor power to add or detract. The world will little note, nor long remember what we say here, but it can never forget what they did here. It is for us the living, rather, to be dedicated here to the unfinished work which they who fought here have thus far so nobly advanced. It is rather for us to be here dedicated to the great task remaining before us—that from these honored dead we take increased devotion to that cause for which they here gave the last full measure of devotion—that we here highly resolve that these dead shall not have died in vain—that this nation, under God, shall have a new birth of freedom, and that government of the people, by the people, for the people, shall not perish from the earth.".as_bytes(); use ark_std::{str::FromStr, One}; - - // Function to determine the setup based on an environment variable - fn determine_setup() -> KZG { - match env::var("KZG_ENV") { - Ok(val) if val == "mainnet-data" => KZG::setup( - "tests/test-files/mainnet-data/g1.131072.point", - 268435456, - 131072, - ) - .unwrap(), - _ => KZG::setup("tests/test-files/g1.point", 3000, 3000).unwrap(), - } - } + use rust_kzg_bn254_primitives::blob::Blob; + use rust_kzg_bn254_prover::{kzg::KZG, srs::SRS}; + use rust_kzg_bn254_verifier::{batch::verify_blob_kzg_proof_batch, verify::verify_proof}; // Define a static variable for setup lazy_static! { - static ref KZG_INSTANCE: KZG = determine_setup(); - static ref KZG_3000: KZG = KZG::setup("tests/test-files/g1.point", 3000, 3000,).unwrap(); - } - - #[test] - fn test_commit_errors() { - let mut coeffs = vec![]; - for _ in 0..4000 { - coeffs.push(Fr::one()); - } - - let polynomial = PolynomialCoeffForm::new(coeffs); - let result = KZG_3000.commit_coeff_form(&polynomial); - assert_eq!( - result, - Err(KzgError::SerializationError( - "polynomial length is not correct".to_string() - )) - ); - } - - #[test] - fn test_kzg_setup_errors() { - let kzg2 = KZG::setup("tests/test-files/g1.point", 3000, 3001); - assert_eq!( - kzg2, - Err(KzgError::GenericError( - "number of points to load is more than the srs order".to_string() - )) - ); - } - - #[test] - fn test_blob_to_kzg_commitment() { - use ark_bn254::Fq; - - let blob = Blob::from_raw_data(GETTYSBURG_ADDRESS_BYTES); - let fn_output = KZG_3000.commit_blob(&blob).unwrap(); - let commitment_from_da = G1Affine::new_unchecked( - Fq::from_str( - "2961155957874067312593973807786254905069537311739090798303675273531563528369", - ) - .unwrap(), - Fq::from_str( - "159565752702690920280451512738307422982252330088949702406468210607852362941", - ) - .unwrap(), - ); - assert_eq!(commitment_from_da, fn_output); - } - - #[test] - fn test_compute_kzg_proof_random_100_blobs() { - use rand::Rng; - - let mut rng = rand::thread_rng(); - let mut kzg = KZG_INSTANCE.clone(); - - (0..1).for_each(|_| { - let blob_length = rand::thread_rng().gen_range(35..50000); - let random_blob: Vec = (0..blob_length) - .map(|_| rng.gen_range(32..=126) as u8) - .collect(); - println!("generating blob of length is {}", blob_length); - - let input = Blob::from_raw_data(&random_blob); - let input_poly = input.to_polynomial_eval_form(); - kzg.calculate_and_store_roots_of_unity(input.len().try_into().unwrap()) - .unwrap(); - - let index = - rand::thread_rng().gen_range(0..input_poly.len_underlying_blob_field_elements()); - let commitment = kzg.commit_eval_form(&input_poly.clone()).unwrap(); - let proof = kzg - .compute_proof_with_known_z_fr_index(&input_poly, index.try_into().unwrap()) - .unwrap(); - let value_fr = input_poly.get_evalualtion(index).unwrap(); - let z_fr = kzg.get_nth_root_of_unity(index).unwrap(); - let pairing_result = kzg - .verify_proof(commitment, proof, value_fr.clone(), z_fr.clone()) - .unwrap(); - assert_eq!(pairing_result, true); - - // take random index, not the same index and check - assert_eq!( - kzg.verify_proof( - commitment, - proof, - value_fr.clone(), - kzg.get_nth_root_of_unity( - (index + 1) % input_poly.len_underlying_blob_field_elements() - ) - .unwrap() - .clone() - ) - .unwrap(), - false - ) - }) + static ref KZG_INSTANCE: KZG = KZG::new(); + static ref SRS_INSTANCE: SRS = SRS::new( + "../prover/tests/test-files/mainnet-data/g1.131072.point", + 268435456, + 131072 + ) + .unwrap(); } #[test] @@ -151,21 +44,24 @@ mod tests { break; } } - let commitment = kzg.commit_eval_form(&input_poly).unwrap(); + let commitment = kzg.commit_eval_form(&input_poly, &SRS_INSTANCE).unwrap(); let proof = kzg - .compute_proof_with_known_z_fr_index(&input_poly, index.try_into().unwrap()) + .compute_proof_with_known_z_fr_index( + &input_poly, + index.try_into().unwrap(), + &SRS_INSTANCE, + ) .unwrap(); let value_fr = input_poly.get_evalualtion(index).unwrap(); let z_fr = kzg.get_nth_root_of_unity(index).unwrap(); - let pairing_result = kzg - .verify_proof(commitment, proof, value_fr.clone(), z_fr.clone()) - .unwrap(); + let pairing_result = + verify_proof(commitment, proof, value_fr.clone(), z_fr.clone()).unwrap(); assert_eq!(pairing_result, true); assert_eq!( - kzg.verify_proof( + verify_proof( commitment, proof, value_fr.clone(), @@ -178,55 +74,58 @@ mod tests { } #[test] - fn test_g1_ifft() { - use ark_bn254::Fq; - use std::io::BufRead; - - let file = File::open("tests/test-files/lagrangeG1SRS.txt").unwrap(); - let reader = BufReader::new(file); - - let kzg_g1_points = KZG_3000.g1_ifft(64).unwrap(); - - // Iterate over each line in the file - for (i, line_result) in reader.lines().enumerate() { - let mut line = line_result.unwrap(); // Retrieve the line, handling potential I/O errors - line = line.trim_end().to_string(); - - // Split the line at each comma and process the parts - let parts: Vec<&str> = line.split(',').collect(); - - let x = Fq::from_str(parts[0]).expect("should be fine"); - let y = Fq::from_str(parts[1]).expect("should be fine"); - - let point = G1Affine::new_unchecked(x, y); - assert_eq!(point, kzg_g1_points[i], "failed on {i}"); - } - } - - #[test] - fn test_read_g1_point_from_bytes_be() { - use ark_bn254::Fq; - use ark_std::str::FromStr; - use std::io::BufRead; + fn test_compute_kzg_proof_random_100_blobs() { + use rand::Rng; - let file = File::open("tests/test-files/srs.g1.points.string").unwrap(); - let reader = BufReader::new(file); - let kzg_g1_points = KZG_3000.get_g1_points(); + let mut rng = rand::thread_rng(); + let mut kzg = KZG_INSTANCE.clone(); - // Iterate over each line in the file - for (i, line_result) in reader.lines().enumerate() { - let mut line = line_result.unwrap(); // Retrieve the line, handling potential I/O errors - line = line.trim_end().to_string(); + (0..100).for_each(|_| { + let blob_length = rand::thread_rng().gen_range(35..50000); + let random_blob: Vec = (0..blob_length) + .map(|_| rng.gen_range(32..=126) as u8) + .collect(); + println!("generating blob of length is {}", blob_length); - // Split the line at each comma and process the parts - let parts: Vec<&str> = line.split(',').collect(); + let input = Blob::from_raw_data(&random_blob); + let input_poly = input.to_polynomial_eval_form(); + kzg.calculate_and_store_roots_of_unity(input.len().try_into().unwrap()) + .unwrap(); - let x = Fq::from_str(parts[0]).expect("should be fine"); - let y = Fq::from_str(parts[1]).expect("should be fine"); + let index = + rand::thread_rng().gen_range(0..input_poly.len_underlying_blob_field_elements()); + let commitment = kzg + .commit_eval_form(&input_poly.clone(), &SRS_INSTANCE) + .unwrap(); + let proof = kzg + .compute_proof_with_known_z_fr_index( + &input_poly, + index.try_into().unwrap(), + &SRS_INSTANCE, + ) + .unwrap(); + let value_fr = input_poly.get_evalualtion(index).unwrap(); + let z_fr = kzg.get_nth_root_of_unity(index).unwrap(); + let pairing_result = + verify_proof(commitment, proof, value_fr.clone(), z_fr.clone()).unwrap(); + assert_eq!(pairing_result, true); - let point = G1Affine::new_unchecked(x, y); - assert_eq!(point, kzg_g1_points[i]); - } + // take random index, not the same index and check + assert_eq!( + verify_proof( + commitment, + proof, + value_fr.clone(), + kzg.get_nth_root_of_unity( + (index + 1) % input_poly.len_underlying_blob_field_elements() + ) + .unwrap() + .clone() + ) + .unwrap(), + false + ) + }) } #[test] @@ -238,8 +137,8 @@ mod tests { let mut commitments: Vec = Vec::new(); let mut proofs: Vec = Vec::new(); - (0..1).for_each(|_| { - let blob_length = rand::thread_rng().gen_range(35..50000); + (0..100).for_each(|_| { + let blob_length = rng.gen_range(35..50000); let random_blob: Vec = (0..blob_length) .map(|_| rng.gen_range(32..=126) as u8) .collect(); @@ -249,8 +148,10 @@ mod tests { kzg.calculate_and_store_roots_of_unity(input.len().try_into().unwrap()) .unwrap(); - let commitment = kzg.commit_eval_form(&input_poly).unwrap(); - let proof = kzg.compute_blob_proof(&input, &commitment).unwrap(); + let commitment = kzg.commit_eval_form(&input_poly, &SRS_INSTANCE).unwrap(); + let proof = kzg + .compute_blob_proof(&input, &commitment, &SRS_INSTANCE) + .unwrap(); blobs.push(input); commitments.push(commitment); @@ -261,35 +162,29 @@ mod tests { let mut bad_commitments = commitments.clone(); let mut bad_proofs = proofs.clone(); - let pairing_result = kzg - .verify_blob_kzg_proof_batch(&blobs, &commitments, &proofs) - .unwrap(); + let pairing_result = verify_blob_kzg_proof_batch(&blobs, &commitments, &proofs).unwrap(); assert_eq!(pairing_result, true); bad_blobs.pop(); bad_blobs.push(Blob::from_raw_data(b"random")); - let pairing_result_bad_blobs = kzg - .verify_blob_kzg_proof_batch(&bad_blobs, &commitments, &proofs) - .unwrap(); + let pairing_result_bad_blobs = + verify_blob_kzg_proof_batch(&bad_blobs, &commitments, &proofs).unwrap(); assert_eq!(pairing_result_bad_blobs, false); bad_commitments.pop(); bad_commitments.push(G1Affine::rand(&mut rng)); - let pairing_result_bad_commitments = kzg - .verify_blob_kzg_proof_batch(&blobs, &bad_commitments, &proofs) - .unwrap(); + let pairing_result_bad_commitments = + verify_blob_kzg_proof_batch(&blobs, &bad_commitments, &proofs).unwrap(); assert_eq!(pairing_result_bad_commitments, false); bad_proofs.pop(); bad_proofs.push(G1Affine::rand(&mut rng)); - let pairing_result_bad_proofs = kzg - .verify_blob_kzg_proof_batch(&blobs, &commitments, &bad_proofs) - .unwrap(); + let pairing_result_bad_proofs = + verify_blob_kzg_proof_batch(&blobs, &commitments, &bad_proofs).unwrap(); assert_eq!(pairing_result_bad_proofs, false); - let pairing_result_everything_bad = kzg - .verify_blob_kzg_proof_batch(&bad_blobs, &bad_commitments, &bad_proofs) - .unwrap(); + let pairing_result_everything_bad = + verify_blob_kzg_proof_batch(&bad_blobs, &bad_commitments, &bad_proofs).unwrap(); assert_eq!(pairing_result_everything_bad, false); } @@ -304,8 +199,12 @@ mod tests { let input_poly1 = input1.to_polynomial_eval_form(); - let commitment1 = kzg.commit_eval_form(&input_poly1.clone()).unwrap(); - let proof_1 = kzg.compute_blob_proof(&input1, &commitment1).unwrap(); + let commitment1 = kzg + .commit_eval_form(&input_poly1.clone(), &SRS_INSTANCE) + .unwrap(); + let proof_1 = kzg + .compute_blob_proof(&input1, &commitment1, &SRS_INSTANCE) + .unwrap(); let mut reversed_input: Vec = vec![0; GETTYSBURG_ADDRESS_BYTES.len()]; reversed_input.clone_from_slice(GETTYSBURG_ADDRESS_BYTES); @@ -318,18 +217,18 @@ mod tests { .unwrap(); let input_poly2 = input2.to_polynomial_eval_form(); - let commitment2 = kzg2.commit_eval_form(&input_poly2).unwrap(); + let commitment2 = kzg2.commit_eval_form(&input_poly2, &SRS_INSTANCE).unwrap(); - let proof_2 = kzg2.compute_blob_proof(&input2, &commitment2).unwrap(); + let proof_2 = kzg2 + .compute_blob_proof(&input2, &commitment2, &SRS_INSTANCE) + .unwrap(); let blobs = vec![input1, input2]; let commitments = vec![commitment1, commitment2]; let proofs = vec![proof_1, proof_2]; // let res = kzg.verify_blob_kzg_proof(&input1, &commitment1, &auto_proof).unwrap(); - let pairing_result = kzg - .verify_blob_kzg_proof_batch(&blobs, &commitments, &proofs) - .unwrap(); + let pairing_result = verify_blob_kzg_proof_batch(&blobs, &commitments, &proofs).unwrap(); assert_eq!(pairing_result, true); } @@ -346,8 +245,10 @@ mod tests { // First blob and proof - regular case let input1 = Blob::from_raw_data(GETTYSBURG_ADDRESS_BYTES); let input_poly1 = input1.to_polynomial_eval_form(); - let commitment1 = kzg.commit_eval_form(&input_poly1).unwrap(); - let proof_1 = kzg.compute_blob_proof(&input1, &commitment1).unwrap(); + let commitment1 = kzg.commit_eval_form(&input_poly1, &SRS_INSTANCE).unwrap(); + let proof_1 = kzg + .compute_blob_proof(&input1, &commitment1, &SRS_INSTANCE) + .unwrap(); // Create a proof point at infinity let proof_at_infinity = G1Affine::identity(); @@ -357,21 +258,21 @@ mod tests { let proofs = vec![proof_at_infinity]; // This should fail since a proof point at infinity is invalid - let result = kzg.verify_blob_kzg_proof_batch(&blobs, &commitments, &proofs); + let result = verify_blob_kzg_proof_batch(&blobs, &commitments, &proofs); assert!(result.is_err()); // Also test mixed case - one valid proof, one at infinity let input2 = Blob::from_raw_data(b"second input"); let input_poly2 = input2.to_polynomial_eval_form(); - let commitment2 = kzg.commit_eval_form(&input_poly2).unwrap(); + let commitment2 = kzg.commit_eval_form(&input_poly2, &SRS_INSTANCE).unwrap(); let blobs_mixed = vec![input1, input2]; let commitments_mixed = vec![commitment1, commitment2]; let proofs_mixed = vec![proof_1, proof_at_infinity]; let result_mixed = - kzg.verify_blob_kzg_proof_batch(&blobs_mixed, &commitments_mixed, &proofs_mixed); + verify_blob_kzg_proof_batch(&blobs_mixed, &commitments_mixed, &proofs_mixed); assert!(result_mixed.is_err()); } @@ -384,8 +285,10 @@ mod tests { // Create valid inputs first let input = Blob::from_raw_data(GETTYSBURG_ADDRESS_BYTES); let input_poly = input.to_polynomial_eval_form(); - let valid_commitment = kzg.commit_eval_form(&input_poly).unwrap(); - let valid_proof = kzg.compute_blob_proof(&input, &valid_commitment).unwrap(); + let valid_commitment = kzg.commit_eval_form(&input_poly, &SRS_INSTANCE).unwrap(); + let valid_proof = kzg + .compute_blob_proof(&input, &valid_commitment, &SRS_INSTANCE) + .unwrap(); // Create points not on the curve let invalid_point_commitment = generate_point_wrong_subgroup(); @@ -432,7 +335,7 @@ mod tests { for (commitments, proofs, case_description) in test_cases { let blobs = vec![input.clone(), input.clone()]; - let result = kzg.verify_blob_kzg_proof_batch(&blobs, &commitments, &proofs); + let result = verify_blob_kzg_proof_batch(&blobs, &commitments, &proofs); assert!( result.is_err(), @@ -442,79 +345,6 @@ mod tests { } } - #[test] - fn test_evaluate_polynomial_in_evaluation_form_random_blob_all_indexes() { - let mut rng = rand::thread_rng(); - let mut kzg = KZG_INSTANCE.clone(); - let blob_length: u64 = rand::thread_rng().gen_range(35..40000); - let random_blob: Vec = (0..blob_length) - .map(|_| rng.gen_range(32..=126) as u8) - .collect(); - - let input = Blob::from_raw_data(&random_blob); - let input_poly = input.to_polynomial_eval_form(); - - for i in 0..input_poly.len_underlying_blob_field_elements() { - kzg.calculate_and_store_roots_of_unity(input.len().try_into().unwrap()) - .unwrap(); - let z_fr = kzg.get_nth_root_of_unity(i).unwrap(); - let claimed_y_fr = - KZG::evaluate_polynomial_in_evaluation_form(&input_poly, z_fr, 3000).unwrap(); - assert_eq!(claimed_y_fr, input_poly.evaluations()[i]); - } - } - - #[test] - fn test_primitive_roots_from_bigint_to_fr() { - let data: [&str; 29] = [ - "1", - "21888242871839275222246405745257275088548364400416034343698204186575808495616", - "21888242871839275217838484774961031246007050428528088939761107053157389710902", - "19540430494807482326159819597004422086093766032135589407132600596362845576832", - "14940766826517323942636479241147756311199852622225275649687664389641784935947", - "4419234939496763621076330863786513495701855246241724391626358375488475697872", - "9088801421649573101014283686030284801466796108869023335878462724291607593530", - "10359452186428527605436343203440067497552205259388878191021578220384701716497", - "3478517300119284901893091970156912948790432420133812234316178878452092729974", - "6837567842312086091520287814181175430087169027974246751610506942214842701774", - "3161067157621608152362653341354432744960400845131437947728257924963983317266", - "1120550406532664055539694724667294622065367841900378087843176726913374367458", - "4158865282786404163413953114870269622875596290766033564087307867933865333818", - "197302210312744933010843010704445784068657690384188106020011018676818793232", - "20619701001583904760601357484951574588621083236087856586626117568842480512645", - "20402931748843538985151001264530049874871572933694634836567070693966133783803", - "421743594562400382753388642386256516545992082196004333756405989743524594615", - "12650941915662020058015862023665998998969191525479888727406889100124684769509", - "11699596668367776675346610687704220591435078791727316319397053191800576917728", - "15549849457946371566896172786938980432421851627449396898353380550861104573629", - "17220337697351015657950521176323262483320249231368149235373741788599650842711", - "13536764371732269273912573961853310557438878140379554347802702086337840854307", - "12143866164239048021030917283424216263377309185099704096317235600302831912062", - "934650972362265999028062457054462628285482693704334323590406443310927365533", - "5709868443893258075976348696661355716898495876243883251619397131511003808859", - "19200870435978225707111062059747084165650991997241425080699860725083300967194", - "7419588552507395652481651088034484897579724952953562618697845598160172257810", - "2082940218526944230311718225077035922214683169814847712455127909555749686340", - "19103219067921713944291392827692070036145651957329286315305642004821462161904", - ]; - let fr_s = data - .iter() - .map(|s: &&str| Fr::from_str(*s).unwrap()) - .collect::>(); - - for i in 0..PRIMITIVE_ROOTS_OF_UNITY.len() { - let root_of_unity_at_index = PRIMITIVE_ROOTS_OF_UNITY[i]; - assert_eq!(root_of_unity_at_index, fr_s[i]); - } - } - - #[test] - fn test_g2_tau_in_group() { - let kzg = &KZG_INSTANCE; - let tau = kzg.get_g2_tau(); - assert!(helpers::is_on_curve_g2(&G2Projective::from(tau))); - } - // Helper function to generate a point in the wrong subgroup fn generate_point_wrong_subgroup() -> G1Affine { let x = Fq::from_str(