Skip to content

Commit

Permalink
moving process_chunks
Browse files Browse the repository at this point in the history
  • Loading branch information
anupsv committed Jan 29, 2025
1 parent d8e2e2b commit 8be3338
Show file tree
Hide file tree
Showing 3 changed files with 23 additions and 26 deletions.
1 change: 0 additions & 1 deletion primitives/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,6 @@ ark-serialize = "0.5.0"
ark-std = { version = "0.5.0", features = ["parallel"] }
ark-poly = { version = "0.5.0", features = ["parallel"] }
sha2 = "0.10.8"
crossbeam-channel = "0.5"
num-traits = "0.2"
thiserror = "2.0.11"

Expand Down
21 changes: 0 additions & 21 deletions primitives/src/helpers.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@ use ark_ec::{pairing::Pairing, AffineRepr, CurveGroup, VariableBaseMSM};
use ark_ff::{sbb, BigInt, BigInteger, Field, PrimeField};
use ark_serialize::CanonicalSerialize;
use ark_std::{str::FromStr, vec::Vec, One, Zero};
use crossbeam_channel::Receiver;
use num_traits::ToPrimitive;
use sha2::{Digest, Sha256};
use std::cmp;
Expand All @@ -17,7 +16,6 @@ use crate::{
},
errors::KzgError,
polynomial::PolynomialEvalForm,
traits::ReadPointFromBytes,
};
use ark_ec::AdditiveGroup;

Expand Down Expand Up @@ -317,25 +315,6 @@ pub fn read_g1_point_from_bytes_be(g1_bytes_be: &[u8]) -> Result<G1Affine, &str>
Ok(point)
}

pub fn process_chunks<T>(receiver: Receiver<(Vec<u8>, usize, bool)>) -> Vec<(T, usize)>
where
T: ReadPointFromBytes,
{
// TODO: should we use rayon to process this in parallel?
receiver
.iter()
.map(|(chunk, position, is_native)| {
let point: T = if is_native {
T::read_point_from_bytes_native_compressed(&chunk)
.expect("Failed to read point from bytes")
} else {
T::read_point_from_bytes_be(&chunk).expect("Failed to read point from bytes")
};
(point, position)
})
.collect()
}

fn get_b_twist_curve_coeff() -> Fq2 {
let twist_c0 = Fq::from(9);
let twist_c1 = Fq::from(1);
Expand Down
27 changes: 23 additions & 4 deletions prover/src/srs.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
use ark_bn254::G1Affine;
use crossbeam_channel::bounded;
use crossbeam_channel::{bounded, Receiver};
use rust_kzg_bn254_primitives::errors::KzgError;
use rust_kzg_bn254_primitives::helpers;
use rust_kzg_bn254_primitives::traits::ReadPointFromBytes;
use std::fs::File;
use std::io::{self, BufReader, Read};

Expand Down Expand Up @@ -45,6 +45,25 @@ impl SRS {
})
}

pub fn process_chunks<T>(receiver: Receiver<(Vec<u8>, usize, bool)>) -> Vec<(T, usize)>
where
T: ReadPointFromBytes,
{
// TODO: should we use rayon to process this in parallel?
receiver
.iter()
.map(|(chunk, position, is_native)| {
let point: T = if is_native {
T::read_point_from_bytes_native_compressed(&chunk)
.expect("Failed to read point from bytes")
} else {
T::read_point_from_bytes_be(&chunk).expect("Failed to read point from bytes")
};
(point, position)
})
.collect()
}

/// Reads G1 points in parallel from a file.
///
/// # Arguments
Expand Down Expand Up @@ -76,7 +95,7 @@ impl SRS {
let workers: Vec<_> = (0..num_workers)
.map(|_| {
let receiver = receiver.clone();
std::thread::spawn(move || helpers::process_chunks::<G1Affine>(receiver))
std::thread::spawn(move || Self::process_chunks::<G1Affine>(receiver))
})
.collect();

Expand Down Expand Up @@ -194,7 +213,7 @@ impl SRS {
let workers: Vec<_> = (0..num_workers)
.map(|_| {
let receiver = receiver.clone();
std::thread::spawn(move || helpers::process_chunks::<G1Affine>(receiver))
std::thread::spawn(move || Self::process_chunks::<G1Affine>(receiver))
})
.collect();

Expand Down

0 comments on commit 8be3338

Please sign in to comment.