Skip to content

Commit

Permalink
chore: clippy fix
Browse files Browse the repository at this point in the history
  • Loading branch information
jonathanpwang committed Jul 27, 2023
1 parent 98bc83b commit 07c6a2b
Show file tree
Hide file tree
Showing 8 changed files with 40 additions and 25 deletions.
1 change: 1 addition & 0 deletions halo2_proofs/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -72,6 +72,7 @@ criterion = "0.3"
gumdrop = "0.8"
proptest = "1"
rand_core = { version = "0.6", features = ["getrandom"] }
rand_chacha = "0.3.1"

[target.'cfg(all(target_arch = "wasm32", target_os = "unknown"))'.dev-dependencies]
getrandom = { version = "0.2", features = ["js"] }
Expand Down
1 change: 1 addition & 0 deletions halo2_proofs/src/plonk/circuit.rs
Original file line number Diff line number Diff line change
Expand Up @@ -81,6 +81,7 @@ pub(crate) mod sealed {
assert!(self.0 < 2, "The API only supports three phases");
Phase(self.0 + 1)
}
#[allow(clippy::wrong_self_convention)]
pub fn to_u8(&self) -> u8 {
self.0
}
Expand Down
10 changes: 8 additions & 2 deletions halo2_proofs/src/plonk/evaluation.rs
Original file line number Diff line number Diff line change
Expand Up @@ -303,6 +303,7 @@ impl<C: CurveAffine> Evaluator<C> {
let p = &pk.vk.cs.permutation;

// Calculate the advice and instance cosets
#[cfg(feature = "profile")]
let start = start_measure("cosets", false);
let advice: Vec<Vec<Polynomial<C::Scalar, ExtendedLagrangeCoeff>>> = advice_polys
.iter()
Expand All @@ -322,7 +323,7 @@ impl<C: CurveAffine> Evaluator<C> {
.collect()
})
.collect();
stop_measure(start);
// stop_measure(start);

let mut values = domain.empty_extended();

Expand All @@ -335,7 +336,7 @@ impl<C: CurveAffine> Evaluator<C> {
.zip(permutations.iter())
{
// Custom gates
let start = start_measure("custom gates", false);
// let start = start_measure("custom gates", false);
multicore::scope(|scope| {
let chunk_size = (size + num_threads - 1) / num_threads;
for (thread_idx, values) in values.chunks_mut(chunk_size).enumerate() {
Expand Down Expand Up @@ -363,9 +364,11 @@ impl<C: CurveAffine> Evaluator<C> {
});
}
});
#[cfg(feature = "profile")]
stop_measure(start);

// Permutations
#[cfg(feature = "profile")]
let start = start_measure("permutations", false);
let sets = &permutation.sets;
if !sets.is_empty() {
Expand Down Expand Up @@ -447,9 +450,11 @@ impl<C: CurveAffine> Evaluator<C> {
}
});
}
#[cfg(feature = "profile")]
stop_measure(start);

// Lookups
#[cfg(feature = "profile")]
let start = start_measure("lookups", false);
for (n, lookup) in lookups.iter().enumerate() {
// Polynomials required for this lookup.
Expand Down Expand Up @@ -520,6 +525,7 @@ impl<C: CurveAffine> Evaluator<C> {
}
});
}
#[cfg(feature = "profile")]
stop_measure(start);
}
values
Expand Down
4 changes: 2 additions & 2 deletions halo2_proofs/src/plonk/permutation/keygen.rs
Original file line number Diff line number Diff line change
Expand Up @@ -175,7 +175,7 @@ impl Assembly {
{
let omega = domain.get_omega();
parallelize(&mut omega_powers, |o, start| {
let mut cur = omega.pow_vartime(&[start as u64]);
let mut cur = omega.pow_vartime([start as u64]);
for v in o.iter_mut() {
*v = cur;
cur *= &omega;
Expand All @@ -187,7 +187,7 @@ impl Assembly {
let mut deltaomega = vec![omega_powers; p.columns.len()];
{
parallelize(&mut deltaomega, |o, start| {
let mut cur = C::Scalar::DELTA.pow_vartime(&[start as u64]);
let mut cur = C::Scalar::DELTA.pow_vartime([start as u64]);
for omega_powers in o.iter_mut() {
for v in omega_powers {
*v *= &cur;
Expand Down
2 changes: 1 addition & 1 deletion halo2_proofs/src/plonk/prover.rs
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,6 @@ use std::marker::PhantomData;
use std::ops::RangeTo;
use std::rc::Rc;
use std::sync::atomic::AtomicUsize;
use std::time::Instant;
use std::{collections::HashMap, iter, mem, sync::atomic::Ordering};

use super::{
Expand Down Expand Up @@ -753,6 +752,7 @@ where
#[cfg(feature = "profile")]
let multiopen_time = start_timer!(|| "Phase 5: multiopen");
let prover = P::new(params);
#[allow(clippy::let_and_return)]
let multiopen_res = prover
.create_proof(&mut rng, transcript, instances)
.map_err(|_| Error::ConstraintSystemFailure);
Expand Down
23 changes: 14 additions & 9 deletions halo2_proofs/src/poly/domain.rs
Original file line number Diff line number Diff line change
Expand Up @@ -79,7 +79,7 @@ struct FFTData<F: FieldExt> {
impl<F: FieldExt> FFTData<F> {
/// Create FFT data
pub fn new(n: usize, omega: F, omega_inv: F) -> Self {
let stages = get_stages(n as usize, vec![]);
let stages = get_stages(n, vec![]);
let mut f_twiddles = vec![];
let mut inv_twiddles = vec![];
let mut scratch = vec![F::zero(); n];
Expand All @@ -99,7 +99,7 @@ impl<F: FieldExt> FFTData<F> {
// Twiddles
parallelize(twiddles, |twiddles, start| {
let w_m = o;
let mut w = o.pow_vartime(&[start as u64, 0, 0, 0]);
let mut w = o.pow_vartime([start as u64]);
for value in twiddles.iter_mut() {
*value = w;
w *= w_m;
Expand Down Expand Up @@ -168,9 +168,9 @@ fn butterfly_2_parallel<F: FieldExt>(
num_threads: usize,
) {
let n = out.len();
let mut chunk = (n as usize) / num_threads;
let mut chunk = n / num_threads;
if chunk < num_threads {
chunk = n as usize;
chunk = n;
}

multicore::scope(|scope| {
Expand Down Expand Up @@ -257,9 +257,9 @@ pub fn butterfly_4_parallel<F: FieldExt>(
let j = twiddles[twiddles.len() - 1];

let n = out.len();
let mut chunk = (n as usize) / num_threads;
let mut chunk = n / num_threads;
if chunk < num_threads {
chunk = n as usize;
chunk = n;
}
multicore::scope(|scope| {
//let mut parts: Vec<&mut [F]> = out.chunks_mut(4).collect();
Expand Down Expand Up @@ -1016,7 +1016,7 @@ fn test_fft() {
}
let k = get_degree() as u32;

let mut domain = EvaluationDomain::<Scalar>::new(1, k);
let domain = EvaluationDomain::<Scalar>::new(1, k);
let n = domain.n as usize;

let input = vec![Scalar::random(OsRng); n];
Expand All @@ -1025,19 +1025,24 @@ fn test_fft() {
input[i] = Scalar::random(OsRng);
}*/

#[cfg(feature = "profile")]
let num_threads = multicore::current_num_threads();

let mut a = input.clone();
#[cfg(feature = "profile")]
let start = start_measure(format!("best fft {} ({})", a.len(), num_threads), false);
best_fft(&mut a, domain.omega, k);
#[cfg(feature = "profile")]
stop_measure(start);

let mut b = input.clone();
let mut b = input;
#[cfg(feature = "profile")]
let start = start_measure(
format!("recursive fft {} ({})", a.len(), num_threads),
false,
);
recursive_fft(&mut domain.fft_data, &mut b, false);
recursive_fft(&domain.fft_data, &mut b, false);
#[cfg(feature = "profile")]
stop_measure(start);

for i in 0..n {
Expand Down
20 changes: 10 additions & 10 deletions halo2_proofs/src/transcript.rs
Original file line number Diff line number Diff line change
Expand Up @@ -239,12 +239,12 @@ impl<R: Read, C: CurveAffine> Transcript<C, Challenge255<C>>
for Keccak256Read<R, C, Challenge255<C>>
{
fn squeeze_challenge(&mut self) -> Challenge255<C> {
self.state.update(&[KECCAK256_PREFIX_CHALLENGE]);
self.state.update([KECCAK256_PREFIX_CHALLENGE]);

let mut state_lo = self.state.clone();
let mut state_hi = self.state.clone();
state_lo.update(&[KECCAK256_PREFIX_CHALLENGE_LO]);
state_hi.update(&[KECCAK256_PREFIX_CHALLENGE_HI]);
state_lo.update([KECCAK256_PREFIX_CHALLENGE_LO]);
state_hi.update([KECCAK256_PREFIX_CHALLENGE_HI]);
let result_lo: [u8; 32] = state_lo.finalize().as_slice().try_into().unwrap();
let result_hi: [u8; 32] = state_hi.finalize().as_slice().try_into().unwrap();

Expand All @@ -256,7 +256,7 @@ impl<R: Read, C: CurveAffine> Transcript<C, Challenge255<C>>
}

fn common_point(&mut self, point: C) -> io::Result<()> {
self.state.update(&[KECCAK256_PREFIX_POINT]);
self.state.update([KECCAK256_PREFIX_POINT]);
let coords: Coordinates<C> = Option::from(point.coordinates()).ok_or_else(|| {
io::Error::new(
io::ErrorKind::Other,
Expand All @@ -270,7 +270,7 @@ impl<R: Read, C: CurveAffine> Transcript<C, Challenge255<C>>
}

fn common_scalar(&mut self, scalar: C::Scalar) -> io::Result<()> {
self.state.update(&[KECCAK256_PREFIX_SCALAR]);
self.state.update([KECCAK256_PREFIX_SCALAR]);
self.state.update(scalar.to_repr().as_ref());

Ok(())
Expand Down Expand Up @@ -401,12 +401,12 @@ impl<W: Write, C: CurveAffine> Transcript<C, Challenge255<C>>
for Keccak256Write<W, C, Challenge255<C>>
{
fn squeeze_challenge(&mut self) -> Challenge255<C> {
self.state.update(&[KECCAK256_PREFIX_CHALLENGE]);
self.state.update([KECCAK256_PREFIX_CHALLENGE]);

let mut state_lo = self.state.clone();
let mut state_hi = self.state.clone();
state_lo.update(&[KECCAK256_PREFIX_CHALLENGE_LO]);
state_hi.update(&[KECCAK256_PREFIX_CHALLENGE_HI]);
state_lo.update([KECCAK256_PREFIX_CHALLENGE_LO]);
state_hi.update([KECCAK256_PREFIX_CHALLENGE_HI]);
let result_lo: [u8; 32] = state_lo.finalize().as_slice().try_into().unwrap();
let result_hi: [u8; 32] = state_hi.finalize().as_slice().try_into().unwrap();

Expand All @@ -418,7 +418,7 @@ impl<W: Write, C: CurveAffine> Transcript<C, Challenge255<C>>
}

fn common_point(&mut self, point: C) -> io::Result<()> {
self.state.update(&[KECCAK256_PREFIX_POINT]);
self.state.update([KECCAK256_PREFIX_POINT]);
let coords: Coordinates<C> = Option::from(point.coordinates()).ok_or_else(|| {
io::Error::new(
io::ErrorKind::Other,
Expand All @@ -432,7 +432,7 @@ impl<W: Write, C: CurveAffine> Transcript<C, Challenge255<C>>
}

fn common_scalar(&mut self, scalar: C::Scalar) -> io::Result<()> {
self.state.update(&[KECCAK256_PREFIX_SCALAR]);
self.state.update([KECCAK256_PREFIX_SCALAR]);
self.state.update(scalar.to_repr().as_ref());

Ok(())
Expand Down
4 changes: 3 additions & 1 deletion halo2_proofs/tests/plonk_api.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,9 @@
#![allow(clippy::many_single_char_names)]
#![allow(clippy::op_ref)]
#![allow(unused_macros)]
#![allow(dead_code)]

use assert_matches::assert_matches;
// use assert_matches::assert_matches;
use halo2_proofs::arithmetic::{Field, FieldExt};
use halo2_proofs::circuit::{Cell, Layouter, SimpleFloorPlanner, Value};
use halo2_proofs::dev::MockProver;
Expand Down

0 comments on commit 07c6a2b

Please sign in to comment.