Skip to content

Commit

Permalink
Fixing g2 tau points as Consts (#40)
Browse files Browse the repository at this point in the history
* removing reading g2, fixing g2 tau points. clean up

* fixing mainnet points

* removing unwated files

* style+perf: clean-up and optimize remove_empty_byte_from_padded_bytes_unchecked fn (#41)

* style+perf: clean-up and optimize remove_empty_byte_from_padded_bytes_unchecked function

* ci: make cargo fmt use nightly

There were a bunch of warnings that some of our set fmt properties were not being run:
Warning: can't set `wrap_comments = true`, unstable features are only available in nightly channel.
Warning: can't set `normalize_comments = true`, unstable features are only available in nightly channel.

* style: cargo fmt

* Revert "ci: make cargo fmt use nightly"

Getting "error: toolchain 'nightly-x86_64-unknown-linux-gnu' is not installed" on github,
and don't feel like debugging. Not even sure how cargo/rust are installed.
Do they come preloaded by default?

This reverts commit 6e87e0a.

* Revert "style: cargo fmt"

This reverts commit ae70bf5.

* style: cargo fmt

* Cleanup/roots of unities setup functions (#37)

* adding bare changes for batch verification

* adding some comments

* adding more comments

* moving back to sha2

* removing a test which is no longer needed. Removing methods no longer needed

* updates to method visibility, updating tests

* fmt fixes

* clean up

* cleanup, optimization, inline docs

* removing unwanted const

* more docs and cleanup

* formatting

* removing unwanted comments

* cargo fmt and clippy

* adding test for point at infinity

* cleaner errors, cleanup

* adding another test case

* removing unwanted errors

* adding fixes per comments

* adding 4844 spec references

* comment fixes

* formatting, adding index out of bound check, removing print statement

* removing unwanted test, adding test for evaluate_polynomial_in_evaluation_form

* moving test to bottom section

* Update src/polynomial.rs

Co-authored-by: Samuel Laferriere <[email protected]>

* Update src/kzg.rs

Co-authored-by: Samuel Laferriere <[email protected]>

* Update src/kzg.rs

Co-authored-by: Samuel Laferriere <[email protected]>

* Update src/kzg.rs

Co-authored-by: Samuel Laferriere <[email protected]>

* Update src/helpers.rs

Co-authored-by: Samuel Laferriere <[email protected]>

* updating deps, and toolchain to 1.84

* removing errors test, no longer useful

* adding to_byte_array arg explanation

* fmt fixes

* fmt and clippy fixes

* fixing function names and fmt

* clippy fixes

* removing unwanted setup functions

* removing vars from struct

* fixing function name and comments

* fixing naming for tests

* fixing naming in benches

* formatting

* removing is_zero

---------

Co-authored-by: anupsv <[email protected]>
Co-authored-by: Samuel Laferriere

* fixing endianess to big across functions (#39)

* fixing endianess to big across functions

* removing todo's

* storing g2 tau as consts

* cargo fmt

* moving checker to test, adding explanation, cargo fmt

* more cargo fmt

---------

Co-authored-by: anupsv <[email protected]>
Co-authored-by: Samuel Laferriere <[email protected]>
  • Loading branch information
3 people authored Jan 19, 2025
1 parent 94f57a1 commit 089e1ac
Show file tree
Hide file tree
Showing 17 changed files with 62 additions and 3,376 deletions.
9 changes: 1 addition & 8 deletions benches/bench_g1_ifft.rs
Original file line number Diff line number Diff line change
Expand Up @@ -16,14 +16,7 @@ fn generate_powers_of_2(limit: u64) -> Vec<usize> {

fn bench_g1_ifft(c: &mut Criterion) {
c.bench_function("bench_g1_ifft", |b| {
let kzg = KZG::setup(
"tests/test-files/mainnet-data/g1.131072.point",
"",
"tests/test-files/mainnet-data/g2.point.powerOf2",
3000,
3000,
)
.unwrap();
let kzg = KZG::setup("tests/test-files/mainnet-data/g1.131072.point", 3000, 3000).unwrap();
b.iter(|| {
for power in &generate_powers_of_2(3000) {
kzg.g1_ifft(black_box(*power)).unwrap();
Expand Down
2 changes: 0 additions & 2 deletions benches/bench_kzg_commit.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,8 +7,6 @@ fn bench_kzg_commit(c: &mut Criterion) {
let mut rng = rand::thread_rng();
let mut kzg = KZG::setup(
"tests/test-files/mainnet-data/g1.131072.point",
"",
"tests/test-files/mainnet-data/g2.point.powerOf2",
268435456,
131072,
)
Expand Down
2 changes: 0 additions & 2 deletions benches/bench_kzg_commit_large_blobs.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,8 +7,6 @@ fn bench_kzg_commit(c: &mut Criterion) {
let mut rng = rand::thread_rng();
let mut kzg = KZG::setup(
"tests/test-files/mainnet-data/g1.32mb.point",
"",
"tests/test-files/mainnet-data/g2.point.powerOf2",
268435456,
524288,
)
Expand Down
2 changes: 0 additions & 2 deletions benches/bench_kzg_proof.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,8 +7,6 @@ fn bench_kzg_proof(c: &mut Criterion) {
let mut rng = rand::thread_rng();
let mut kzg = KZG::setup(
"tests/test-files/mainnet-data/g1.131072.point",
"",
"tests/test-files/mainnet-data/g2.point.powerOf2",
268435456,
131072,
)
Expand Down
13 changes: 1 addition & 12 deletions benches/bench_kzg_setup.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,22 +4,11 @@ use std::time::Duration;

fn bench_kzg_setup(c: &mut Criterion) {
c.bench_function("bench_kzg_setup", |b| {
b.iter(|| {
KZG::setup(
"tests/test-files/g1.point",
"tests/test-files/g2.point",
"tests/test-files/g2.point.powerOf2",
3000,
3000,
)
.unwrap()
});
b.iter(|| KZG::setup("tests/test-files/g1.point", 3000, 3000).unwrap());

b.iter(|| {
KZG::setup(
"tests/test-files/mainnet-data/g1.131072.point",
"",
"tests/test-files/mainnet-data/g2.point.powerOf2",
268435456,
131072,
)
Expand Down
2 changes: 0 additions & 2 deletions benches/bench_kzg_verify.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,8 +7,6 @@ fn bench_kzg_verify(c: &mut Criterion) {
let mut rng = rand::thread_rng();
let mut kzg = KZG::setup(
"tests/test-files/mainnet-data/g1.131072.point",
"",
"tests/test-files/mainnet-data/g2.point.powerOf2",
268435456,
131072,
)
Expand Down
29 changes: 28 additions & 1 deletion src/consts.rs
Original file line number Diff line number Diff line change
@@ -1,9 +1,36 @@
use ark_bn254::{Fq2, G2Affine};
use ark_ff::MontFp;

pub const BYTES_PER_FIELD_ELEMENT: usize = 32;
pub const SIZE_OF_G1_AFFINE_COMPRESSED: usize = 32; // in bytes
pub const SIZE_OF_G2_AFFINE_COMPRESSED: usize = 64; // in bytes

/// Ref: https://github.com/ethereum/consensus-specs/blob/master/specs/deneb/polynomial-commitments.md#blob
pub const FIAT_SHAMIR_PROTOCOL_DOMAIN: &[u8] = b"EIGENDA_FSBLOBVERIFY_V1_"; // Adapted from 4844

/// Ref: https://github.com/ethereum/consensus-specs/blob/master/specs/deneb/polynomial-commitments.md#blob
pub const RANDOM_CHALLENGE_KZG_BATCH_DOMAIN: &[u8] = b"EIGENDA_RCKZGBATCH___V1_"; // Adapted from 4844

pub const G2_TAU_FOR_TEST_SRS_3000: G2Affine = G2Affine::new_unchecked(
Fq2::new(
MontFp!("7912312892787135728292535536655271843828059318189722219035249994421084560563"),
MontFp!("21039730876973405969844107393779063362038454413254731404052240341412356318284"),
),
Fq2::new(
MontFp!("18697407556011630376420900106252341752488547575648825575049647403852275261247"),
MontFp!("7586489485579523767759120334904353546627445333297951253230866312564920951171"),
),
);

pub const G2_TAU_FOR_MAINNET_SRS: G2Affine = G2Affine::new_unchecked(
Fq2::new(
MontFp!("19394299006376106554626551996044114846855237028623244664226757033024550999552"),
MontFp!("10478571113809844268398751534081669357808742555529167819607714577862447855483"),
),
Fq2::new(
MontFp!("9205262336805673656533560220225620941045451042642528799409071118332922267006"),
MontFp!("10552783866161062341197740743287753408530108186218052255509661543860392060676"),
),
);

// This is the G2 Tau for the MAINNET SRS points.
pub const MAINNET_SRS_G1_SIZE: usize = 131072;
73 changes: 3 additions & 70 deletions src/helpers.rs
Original file line number Diff line number Diff line change
@@ -1,13 +1,13 @@
use ark_bn254::{Fq, Fq2, Fr, G1Affine, G1Projective, G2Affine, G2Projective};
use ark_bn254::{Fq, Fq2, Fr, G1Affine, G1Projective, G2Projective};
use ark_ec::{AffineRepr, CurveGroup, VariableBaseMSM};
use ark_ff::{sbb, BigInt, BigInteger, Field, LegendreSymbol, PrimeField};
use ark_ff::{sbb, BigInt, BigInteger, Field, PrimeField};
use ark_std::{str::FromStr, vec::Vec, One, Zero};
use crossbeam_channel::Receiver;
use std::cmp;

use crate::{
arith,
consts::{BYTES_PER_FIELD_ELEMENT, SIZE_OF_G1_AFFINE_COMPRESSED, SIZE_OF_G2_AFFINE_COMPRESSED},
consts::{BYTES_PER_FIELD_ELEMENT, SIZE_OF_G1_AFFINE_COMPRESSED},
errors::KzgError,
traits::ReadPointFromBytes,
};
Expand Down Expand Up @@ -171,8 +171,6 @@ pub fn to_fr_array(data: &[u8]) -> Vec<Fr> {
///
/// let mut kzg = KZG::setup(
/// "tests/test-files/mainnet-data/g1.131072.point",
/// "",
/// "tests/test-files/mainnet-data/g2.point.powerOf2",
/// 268435456,
/// 131072,
/// ).unwrap();
Expand Down Expand Up @@ -274,71 +272,6 @@ pub fn lexicographically_largest(z: &Fq) -> bool {
borrow == 0
}

pub fn read_g2_point_from_bytes_be(g2_bytes_be: &[u8]) -> Result<G2Affine, &str> {
if g2_bytes_be.len() != SIZE_OF_G2_AFFINE_COMPRESSED {
return Err("not enough bytes for g2 point");
}

let m_mask: u8 = 0b11 << 6;
let m_compressed_infinity: u8 = 0b01 << 6;
let m_compressed_smallest: u8 = 0b10 << 6;
let m_compressed_largest: u8 = 0b11 << 6;

let m_data = g2_bytes_be[0] & m_mask;

if m_data == m_compressed_infinity {
if !is_zeroed(
g2_bytes_be[0] & !m_mask,
g2_bytes_be[1..SIZE_OF_G2_AFFINE_COMPRESSED].to_vec(),
) {
return Err("point at infinity not coded properly for g2");
}
return Ok(G2Affine::zero());
}

let mut x_bytes = [0u8; SIZE_OF_G2_AFFINE_COMPRESSED];
x_bytes.copy_from_slice(g2_bytes_be);
x_bytes[0] &= !m_mask;
let half_size = SIZE_OF_G2_AFFINE_COMPRESSED / 2;

let c1 = Fq::from_be_bytes_mod_order(&x_bytes[..half_size]);
let c0 = Fq::from_be_bytes_mod_order(&x_bytes[half_size..]);
let x = Fq2::new(c0, c1);
let y_squared = x * x * x;

// this is bTwistCurveCoeff
let twist_curve_coeff = get_b_twist_curve_coeff();

let added_result = y_squared + twist_curve_coeff;
if added_result.legendre() == LegendreSymbol::QuadraticNonResidue {
return Err("invalid compressed coordinate: square root doesn't exist");
}

let mut y_sqrt = added_result.sqrt().ok_or("no square root found").unwrap();

let lexicographical_check_result = if y_sqrt.c1.0.is_zero() {
lexicographically_largest(&y_sqrt.c0)
} else {
lexicographically_largest(&y_sqrt.c1)
};

if lexicographical_check_result {
if m_data == m_compressed_smallest {
y_sqrt.neg_in_place();
}
} else if m_data == m_compressed_largest {
y_sqrt.neg_in_place();
}

let point = G2Affine::new_unchecked(x, y_sqrt);
if !point.is_in_correct_subgroup_assuming_on_curve()
&& is_on_curve_g2(&G2Projective::from(point))
{
return Err("point couldn't be created");
}
Ok(point)
}

pub fn read_g1_point_from_bytes_be(g1_bytes_be: &[u8]) -> Result<G1Affine, &str> {
if g1_bytes_be.len() != SIZE_OF_G1_AFFINE_COMPRESSED {
return Err("not enough bytes for g1 point");
Expand Down
Loading

0 comments on commit 089e1ac

Please sign in to comment.