From 7aba62ff512dadc0f5f12e7a1b2cbf29e1b3f913 Mon Sep 17 00:00:00 2001 From: tyshko-rostyslav <122977916+tyshko-rostyslav@users.noreply.github.com> Date: Mon, 6 Feb 2023 05:54:59 +0100 Subject: [PATCH] Add rust-clippy to CI (#108) Convert clippy warnings to errors, fix them --------- Co-authored-by: tyshkor --- .github/workflows/ci.yml | 8 ++-- multiplier/Cargo.toml | 2 +- multiplier/src/public.rs | 2 +- rln/Cargo.toml | 4 +- rln/src/circuit.rs | 2 +- rln/src/protocol.rs | 57 ++++++++++++------------ rln/src/public.rs | 32 +++++++------ rln/src/utils.rs | 4 +- semaphore/Cargo.toml | 6 +-- semaphore/build.rs | 2 +- semaphore/src/circuit.rs | 2 +- utils/src/merkle_tree/merkle_tree.rs | 2 +- utils/src/merkle_tree/mod.rs | 1 + utils/src/poseidon/poseidon_constants.rs | 10 ++--- utils/src/poseidon/poseidon_hash.rs | 27 ++++++----- 15 files changed, 80 insertions(+), 81 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index a1e178f..9ba2e92 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -185,22 +185,22 @@ jobs: - name: multiplier - cargo clippy if: success() || failure() run: | - cargo clippy --release + cargo clippy --release -- -D warnings working-directory: multiplier - name: semaphore - cargo clippy if: success() || failure() run: | - cargo clippy --release + cargo clippy --release -- -D warnings working-directory: semaphore - name: rln - cargo clippy if: success() || failure() run: | - cargo clippy --release + cargo clippy --release -- -D warnings working-directory: rln - name: utils - cargo clippy if: success() || failure() run: | - cargo clippy --release + cargo clippy --release -- -D warnings working-directory: utils # We skip clippy on rln-wasm, since wasm target is managed by cargo make # Currently not treating warnings as error, too noisy diff --git a/multiplier/Cargo.toml b/multiplier/Cargo.toml index 90fc779..bc09f41 100644 --- a/multiplier/Cargo.toml +++ b/multiplier/Cargo.toml @@ -28,7 +28,7 @@ ark-circom = { git = "https://github.com/gakonst/ark-circom", features = ["circo # error handling # thiserror = "1.0.26" -color-eyre = "0.5" +color-eyre = "0.6.1" # decoding of data # hex = "0.4.3" diff --git a/multiplier/src/public.rs b/multiplier/src/public.rs index a78c523..950c4f6 100644 --- a/multiplier/src/public.rs +++ b/multiplier/src/public.rs @@ -54,7 +54,7 @@ impl Multiplier { let proof = prove(circom, ¶ms, &mut rng).unwrap(); // XXX: Unclear if this is different from other serialization(s) - let _ = proof.serialize(result_data).unwrap(); + proof.serialize(result_data).unwrap(); Ok(()) } diff --git a/rln/Cargo.toml b/rln/Cargo.toml index 3edcb32..275d89c 100644 --- a/rln/Cargo.toml +++ b/rln/Cargo.toml @@ -26,7 +26,7 @@ ark-circom = { git = "https://github.com/vacp2p/ark-circom", branch = "wasm", de wasmer = { version = "2.3.0", default-features = false } # error handling -color-eyre = "0.5.11" +color-eyre = "0.6.1" thiserror = "1.0.0" # utilities @@ -53,4 +53,4 @@ wasm = ["wasmer/js", "wasmer/std"] fullmerkletree = ["default"] # Note: pmtree feature is still experimental -pmtree = ["default"] \ No newline at end of file +pmtree = ["default"] diff --git a/rln/src/circuit.rs b/rln/src/circuit.rs index 14f23a1..1e082de 100644 --- a/rln/src/circuit.rs +++ b/rln/src/circuit.rs @@ -136,7 +136,7 @@ pub fn circom_from_raw(wasm_buffer: Vec) -> &'static Mutex &'static Mutex { // We read the wasm file let wasm_path = format!("{resources_folder}{WASM_FILENAME}"); - let wasm_buffer = std::fs::read(&wasm_path).unwrap(); + let wasm_buffer = std::fs::read(wasm_path).unwrap(); circom_from_raw(wasm_buffer) } diff --git a/rln/src/protocol.rs b/rln/src/protocol.rs index ed461c8..4a56269 100644 --- a/rln/src/protocol.rs +++ b/rln/src/protocol.rs @@ -53,42 +53,42 @@ pub struct RLNProofValues { } pub fn serialize_field_element(element: Fr) -> Vec { - return fr_to_bytes_le(&element); + fr_to_bytes_le(&element) } pub fn deserialize_field_element(serialized: Vec) -> Fr { let (element, _) = bytes_le_to_fr(&serialized); - return element; + element } pub fn deserialize_identity_pair(serialized: Vec) -> (Fr, Fr) { let (identity_secret_hash, read) = bytes_le_to_fr(&serialized); - let (id_commitment, _) = bytes_le_to_fr(&serialized[read..].to_vec()); + let (id_commitment, _) = bytes_le_to_fr(&serialized[read..]); - return (identity_secret_hash, id_commitment); + (identity_secret_hash, id_commitment) } pub fn deserialize_identity_tuple(serialized: Vec) -> (Fr, Fr, Fr, Fr) { let mut all_read = 0; - let (identity_trapdoor, read) = bytes_le_to_fr(&serialized[all_read..].to_vec()); + let (identity_trapdoor, read) = bytes_le_to_fr(&serialized[all_read..]); all_read += read; - let (identity_nullifier, read) = bytes_le_to_fr(&serialized[all_read..].to_vec()); + let (identity_nullifier, read) = bytes_le_to_fr(&serialized[all_read..]); all_read += read; - let (identity_secret_hash, read) = bytes_le_to_fr(&serialized[all_read..].to_vec()); + let (identity_secret_hash, read) = bytes_le_to_fr(&serialized[all_read..]); all_read += read; - let (identity_commitment, _) = bytes_le_to_fr(&serialized[all_read..].to_vec()); + let (identity_commitment, _) = bytes_le_to_fr(&serialized[all_read..]); - return ( + ( identity_trapdoor, identity_nullifier, identity_secret_hash, identity_commitment, - ); + ) } pub fn serialize_witness(rln_witness: &RLNWitnessInput) -> Vec { @@ -107,22 +107,22 @@ pub fn serialize_witness(rln_witness: &RLNWitnessInput) -> Vec { pub fn deserialize_witness(serialized: &[u8]) -> (RLNWitnessInput, usize) { let mut all_read: usize = 0; - let (identity_secret, read) = bytes_le_to_fr(&serialized[all_read..].to_vec()); + let (identity_secret, read) = bytes_le_to_fr(&serialized[all_read..]); all_read += read; - let (path_elements, read) = bytes_le_to_vec_fr(&serialized[all_read..].to_vec()); + let (path_elements, read) = bytes_le_to_vec_fr(&serialized[all_read..]); all_read += read; - let (identity_path_index, read) = bytes_le_to_vec_u8(&serialized[all_read..].to_vec()); + let (identity_path_index, read) = bytes_le_to_vec_u8(&serialized[all_read..]); all_read += read; - let (x, read) = bytes_le_to_fr(&serialized[all_read..].to_vec()); + let (x, read) = bytes_le_to_fr(&serialized[all_read..]); all_read += read; - let (epoch, read) = bytes_le_to_fr(&serialized[all_read..].to_vec()); + let (epoch, read) = bytes_le_to_fr(&serialized[all_read..]); all_read += read; - let (rln_identifier, read) = bytes_le_to_fr(&serialized[all_read..].to_vec()); + let (rln_identifier, read) = bytes_le_to_fr(&serialized[all_read..]); all_read += read; // TODO: check rln_identifier against public::RLN_IDENTIFIER @@ -151,13 +151,13 @@ pub fn proof_inputs_to_rln_witness( ) -> (RLNWitnessInput, usize) { let mut all_read: usize = 0; - let (identity_secret, read) = bytes_le_to_fr(&serialized[all_read..].to_vec()); + let (identity_secret, read) = bytes_le_to_fr(&serialized[all_read..]); all_read += read; let id_index = u64::from_le_bytes(serialized[all_read..all_read + 8].try_into().unwrap()); all_read += 8; - let (epoch, read) = bytes_le_to_fr(&serialized[all_read..].to_vec()); + let (epoch, read) = bytes_le_to_fr(&serialized[all_read..]); all_read += read; let signal_len = u64::from_le_bytes(serialized[all_read..all_read + 8].try_into().unwrap()); @@ -317,22 +317,22 @@ pub fn serialize_proof_values(rln_proof_values: &RLNProofValues) -> Vec { pub fn deserialize_proof_values(serialized: &[u8]) -> (RLNProofValues, usize) { let mut all_read: usize = 0; - let (root, read) = bytes_le_to_fr(&serialized[all_read..].to_vec()); + let (root, read) = bytes_le_to_fr(&serialized[all_read..]); all_read += read; - let (epoch, read) = bytes_le_to_fr(&serialized[all_read..].to_vec()); + let (epoch, read) = bytes_le_to_fr(&serialized[all_read..]); all_read += read; - let (x, read) = bytes_le_to_fr(&serialized[all_read..].to_vec()); + let (x, read) = bytes_le_to_fr(&serialized[all_read..]); all_read += read; - let (y, read) = bytes_le_to_fr(&serialized[all_read..].to_vec()); + let (y, read) = bytes_le_to_fr(&serialized[all_read..]); all_read += read; - let (nullifier, read) = bytes_le_to_fr(&serialized[all_read..].to_vec()); + let (nullifier, read) = bytes_le_to_fr(&serialized[all_read..]); all_read += read; - let (rln_identifier, read) = bytes_le_to_fr(&serialized[all_read..].to_vec()); + let (rln_identifier, read) = bytes_le_to_fr(&serialized[all_read..]); all_read += read; ( @@ -364,9 +364,10 @@ pub fn prepare_prove_input( serialized.append(&mut signal_len.to_le_bytes().to_vec()); serialized.append(&mut signal.to_vec()); - return serialized; + serialized } +#[allow(clippy::redundant_clone)] pub fn prepare_verify_input(proof_data: Vec, signal: &[u8]) -> Vec { let signal_len = u64::try_from(signal.len()).unwrap(); @@ -376,7 +377,7 @@ pub fn prepare_verify_input(proof_data: Vec, signal: &[u8]) -> Vec { serialized.append(&mut signal_len.to_le_bytes().to_vec()); serialized.append(&mut signal.to_vec()); - return serialized; + serialized } /////////////////////////////////////////////////////// @@ -519,9 +520,9 @@ pub fn compute_id_secret( if a_1 == computed_a_1 { // We successfully recovered the identity secret - return Ok(a_0); + Ok(a_0) } else { - return Err("Cannot recover identity_secret_hash from provided shares".into()); + Err("Cannot recover identity_secret_hash from provided shares".into()) } } diff --git a/rln/src/public.rs b/rln/src/public.rs index f9007be..e34ddba 100644 --- a/rln/src/public.rs +++ b/rln/src/public.rs @@ -237,7 +237,7 @@ impl RLN<'_> { let (leaves, _) = bytes_le_to_vec_fr(&leaves_byte); // We set the leaves - return self.tree.set_range(index, leaves); + self.tree.set_range(index, leaves) } /// Resets the tree state to default and sets multiple leaves starting from index 0. @@ -251,7 +251,7 @@ impl RLN<'_> { // NOTE: this requires the tree to be initialized with the correct height initially // TODO: accept tree_height as a parameter and initialize the tree with that height self.set_tree(self.tree.depth())?; - return self.set_leaves_from(0, input_data); + self.set_leaves_from(0, input_data) } /// Sets a leaf value at the next available never-set leaf index. @@ -419,7 +419,7 @@ impl RLN<'_> { */ let proof = generate_proof( - &mut self.witness_calculator, + self.witness_calculator, self.proving_key.as_ref().unwrap(), &rln_witness, ) @@ -472,9 +472,9 @@ impl RLN<'_> { // [ proof<128> | root<32> | epoch<32> | share_x<32> | share_y<32> | nullifier<32> | rln_identifier<32> ] let mut input_byte: Vec = Vec::new(); input_data.read_to_end(&mut input_byte)?; - let proof = ArkProof::deserialize(&mut Cursor::new(&input_byte[..128].to_vec())).unwrap(); + let proof = ArkProof::deserialize(&mut Cursor::new(&input_byte[..128])).unwrap(); - let (proof_values, _) = deserialize_proof_values(&input_byte[128..].to_vec()); + let (proof_values, _) = deserialize_proof_values(&input_byte[128..]); let verified = verify_proof( self.verification_key.as_ref().unwrap(), @@ -618,7 +618,7 @@ impl RLN<'_> { let mut all_read = 0; let proof = ArkProof::deserialize(&mut Cursor::new(&serialized[..128].to_vec())).unwrap(); all_read += 128; - let (proof_values, read) = deserialize_proof_values(&serialized[all_read..].to_vec()); + let (proof_values, read) = deserialize_proof_values(&serialized[all_read..]); all_read += read; let signal_len = @@ -703,7 +703,7 @@ impl RLN<'_> { let mut all_read = 0; let proof = ArkProof::deserialize(&mut Cursor::new(&serialized[..128].to_vec())).unwrap(); all_read += 128; - let (proof_values, read) = deserialize_proof_values(&serialized[all_read..].to_vec()); + let (proof_values, read) = deserialize_proof_values(&serialized[all_read..]); all_read += read; let signal_len = @@ -726,7 +726,7 @@ impl RLN<'_> { && (proof_values.rln_identifier == hash_to_field(RLN_IDENTIFIER)); // We skip root validation if proof is already invalid - if partial_result == false { + if !partial_result { return Ok(partial_result); } @@ -749,14 +749,13 @@ impl RLN<'_> { } // We validate the root - let roots_verified: bool; - if roots.is_empty() { + let roots_verified: bool = if roots.is_empty() { // If no root is passed in roots_buffer, we skip proof's root check - roots_verified = true; + true } else { // Otherwise we check if proof's root is contained in the passed buffer - roots_verified = roots.contains(&proof_values.root); - } + roots.contains(&proof_values.root) + }; // We combine all checks Ok(partial_result && roots_verified) @@ -952,14 +951,14 @@ impl RLN<'_> { let mut serialized: Vec = Vec::new(); input_proof_data_1.read_to_end(&mut serialized)?; // We skip deserialization of the zk-proof at the beginning - let (proof_values_1, _) = deserialize_proof_values(&serialized[128..].to_vec()); + let (proof_values_1, _) = deserialize_proof_values(&serialized[128..]); let external_nullifier_1 = poseidon_hash(&[proof_values_1.epoch, proof_values_1.rln_identifier]); let mut serialized: Vec = Vec::new(); input_proof_data_2.read_to_end(&mut serialized)?; // We skip deserialization of the zk-proof at the beginning - let (proof_values_2, _) = deserialize_proof_values(&serialized[128..].to_vec()); + let (proof_values_2, _) = deserialize_proof_values(&serialized[128..]); let external_nullifier_2 = poseidon_hash(&[proof_values_2.epoch, proof_values_2.rln_identifier]); @@ -977,8 +976,7 @@ impl RLN<'_> { compute_id_secret(share1, share2, external_nullifier_1); // If an identity secret hash is recovered, we write it to output_data, otherwise nothing will be written. - if recovered_identity_secret_hash.is_ok() { - let identity_secret_hash = recovered_identity_secret_hash.unwrap(); + if let Ok(identity_secret_hash) = recovered_identity_secret_hash { output_data.write_all(&fr_to_bytes_le(&identity_secret_hash))?; } } diff --git a/rln/src/utils.rs b/rln/src/utils.rs index 72bcd9b..faae931 100644 --- a/rln/src/utils.rs +++ b/rln/src/utils.rs @@ -145,7 +145,7 @@ pub fn bytes_le_to_vec_fr(input: &[u8]) -> (Vec, usize) { let el_size = fr_byte_size(); for i in 0..len { - let (curr_el, _) = bytes_le_to_fr(&input[8 + el_size * i..8 + el_size * (i + 1)].to_vec()); + let (curr_el, _) = bytes_le_to_fr(&input[8 + el_size * i..8 + el_size * (i + 1)]); res.push(curr_el); read += el_size; } @@ -162,7 +162,7 @@ pub fn bytes_be_to_vec_fr(input: &[u8]) -> (Vec, usize) { let el_size = fr_byte_size(); for i in 0..len { - let (curr_el, _) = bytes_be_to_fr(&input[8 + el_size * i..8 + el_size * (i + 1)].to_vec()); + let (curr_el, _) = bytes_be_to_fr(&input[8 + el_size * i..8 + el_size * (i + 1)]); res.push(curr_el); read += el_size; } diff --git a/semaphore/Cargo.toml b/semaphore/Cargo.toml index 4818101..9f06fed 100644 --- a/semaphore/Cargo.toml +++ b/semaphore/Cargo.toml @@ -16,7 +16,7 @@ ark-ec = { version = "0.3.0", default-features = false, features = ["parallel"] ark-groth16 = { git = "https://github.com/arkworks-rs/groth16", rev = "765817f", features = ["parallel"] } ark-relations = { version = "0.3.0", default-features = false } ark-std = { version = "0.3.0", default-features = false, features = ["parallel"] } -color-eyre = "0.5" +color-eyre = "0.6.1" num-bigint = { version = "0.4", default-features = false, features = ["rand"] } once_cell = "1.8" rand = "0.8.4" @@ -32,10 +32,10 @@ rand_chacha = "0.3.1" serde_json = "1.0.79" [build-dependencies] -color-eyre = "0.5" +color-eyre = "0.6.1" wasmer = { version = "2.0" } wasmer-engine-dylib = { version = "2.2.1", optional = true } -wasmer-compiler-cranelift = { version = "2.2.1", optional = true } +wasmer-compiler-cranelift = { version = "3.1.1", optional = true } [profile.release] codegen-units = 1 diff --git a/semaphore/build.rs b/semaphore/build.rs index 9840b01..fb469fd 100644 --- a/semaphore/build.rs +++ b/semaphore/build.rs @@ -37,7 +37,7 @@ fn build_circuit() -> Result<()> { .current_dir("./vendor/semaphore") .status()? .success() - .then(|| ()) + .then_some(()) .ok_or(eyre!("procees returned failure"))?; Ok(()) }; diff --git a/semaphore/src/circuit.rs b/semaphore/src/circuit.rs index 2fa5118..58dfcf5 100644 --- a/semaphore/src/circuit.rs +++ b/semaphore/src/circuit.rs @@ -50,7 +50,7 @@ fn from_dylib(path: &Path) -> Mutex { #[must_use] pub fn zkey() -> &'static (ProvingKey, ConstraintMatrices) { - &*ZKEY + &ZKEY } #[cfg(feature = "dylib")] diff --git a/utils/src/merkle_tree/merkle_tree.rs b/utils/src/merkle_tree/merkle_tree.rs index bb3c8e5..3e1c89f 100644 --- a/utils/src/merkle_tree/merkle_tree.rs +++ b/utils/src/merkle_tree/merkle_tree.rs @@ -86,7 +86,7 @@ impl OptimalMerkleTree { cached_nodes.reverse(); OptimalMerkleTree { cached_nodes: cached_nodes.clone(), - depth: depth, + depth, nodes: HashMap::new(), next_index: 0, } diff --git a/utils/src/merkle_tree/mod.rs b/utils/src/merkle_tree/mod.rs index 1b9547c..5dd9401 100644 --- a/utils/src/merkle_tree/mod.rs +++ b/utils/src/merkle_tree/mod.rs @@ -1,2 +1,3 @@ +#[allow(clippy::module_inception)] pub mod merkle_tree; pub use self::merkle_tree::*; diff --git a/utils/src/poseidon/poseidon_constants.rs b/utils/src/poseidon/poseidon_constants.rs index 612f870..1bbda7c 100644 --- a/utils/src/poseidon/poseidon_constants.rs +++ b/utils/src/poseidon/poseidon_constants.rs @@ -88,8 +88,8 @@ impl PoseidonGrainLFSR { } // b50, ..., b79 are set to 1 - for i in 50..=79 { - state[i] = true; + for item in state.iter_mut().skip(50) { + *item = true; } let head = 0; @@ -111,7 +111,7 @@ impl PoseidonGrainLFSR { let mut new_bit = self.update(); // Loop until the first bit is true - while new_bit == false { + while !new_bit { // Discard the second bit let _ = self.update(); // Obtain another first bit @@ -263,8 +263,8 @@ pub fn find_poseidon_ark_and_mds( let ys = lfsr.get_field_elements_mod_p::(rate); for i in 0..(rate) { - for j in 0..(rate) { - mds[i][j] = (xs[i] + &ys[j]).inverse().unwrap(); + for (j, ys_item) in ys.iter().enumerate().take(rate) { + mds[i][j] = (xs[i] + ys_item).inverse().unwrap(); } } diff --git a/utils/src/poseidon/poseidon_hash.rs b/utils/src/poseidon/poseidon_hash.rs index 4c62a41..9104e22 100644 --- a/utils/src/poseidon/poseidon_hash.rs +++ b/utils/src/poseidon/poseidon_hash.rs @@ -28,8 +28,7 @@ impl Poseidon { pub fn from(poseidon_params: &[(usize, usize, usize, usize)]) -> Self { let mut read_params = Vec::>::new(); - for i in 0..poseidon_params.len() { - let (t, n_rounds_f, n_rounds_p, skip_matrices) = poseidon_params[i]; + for &(t, n_rounds_f, n_rounds_p, skip_matrices) in poseidon_params { let (ark, mds) = find_poseidon_ark_and_mds::( 1, // is_field = 1 0, // is_sbox_inverse = 0 @@ -40,10 +39,10 @@ impl Poseidon { skip_matrices, ); let rp = RoundParamenters { - t: t, - n_rounds_p: n_rounds_p, - n_rounds_f: n_rounds_f, - skip_matrices: skip_matrices, + t, + n_rounds_p, + n_rounds_f, + skip_matrices, c: ark, m: mds, }; @@ -67,11 +66,11 @@ impl Poseidon { pub fn sbox(&self, n_rounds_f: usize, n_rounds_p: usize, state: &mut [F], i: usize) { if (i < n_rounds_f / 2) || (i >= n_rounds_f / 2 + n_rounds_p) { - for j in 0..state.len() { - let aux = state[j]; - state[j] *= state[j]; - state[j] *= state[j]; - state[j] *= aux; + for current_state in &mut state.iter_mut() { + let aux = *current_state; + *current_state *= *current_state; + *current_state *= *current_state; + *current_state *= aux; } } else { let aux = state[0]; @@ -85,9 +84,9 @@ impl Poseidon { let mut new_state: Vec = Vec::new(); for i in 0..state.len() { new_state.push(F::zero()); - for j in 0..state.len() { + for (j, state_item) in state.iter().enumerate() { let mut mij = m[i][j]; - mij *= state[j]; + mij *= state_item; new_state[i] += mij; } } @@ -116,7 +115,7 @@ impl Poseidon { self.ark( &mut state, &self.round_params[param_index].c, - (i as usize) * self.round_params[param_index].t, + i * self.round_params[param_index].t, ); self.sbox( self.round_params[param_index].n_rounds_f,