Add rust-clippy to CI (#108)

Convert clippy warnings to errors, fix them 

---------

Co-authored-by: tyshkor <tyshko1@gmail.com>
This commit is contained in:
tyshko-rostyslav 2023-02-06 05:54:59 +01:00 committed by GitHub
parent cbf8c541c2
commit 7aba62ff51
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
15 changed files with 80 additions and 81 deletions

View File

@ -185,22 +185,22 @@ jobs:
- name: multiplier - cargo clippy
if: success() || failure()
run: |
cargo clippy --release
cargo clippy --release -- -D warnings
working-directory: multiplier
- name: semaphore - cargo clippy
if: success() || failure()
run: |
cargo clippy --release
cargo clippy --release -- -D warnings
working-directory: semaphore
- name: rln - cargo clippy
if: success() || failure()
run: |
cargo clippy --release
cargo clippy --release -- -D warnings
working-directory: rln
- name: utils - cargo clippy
if: success() || failure()
run: |
cargo clippy --release
cargo clippy --release -- -D warnings
working-directory: utils
# We skip clippy on rln-wasm, since wasm target is managed by cargo make
# Currently not treating warnings as error, too noisy

View File

@ -28,7 +28,7 @@ ark-circom = { git = "https://github.com/gakonst/ark-circom", features = ["circo
# error handling
# thiserror = "1.0.26"
color-eyre = "0.5"
color-eyre = "0.6.1"
# decoding of data
# hex = "0.4.3"

View File

@ -54,7 +54,7 @@ impl Multiplier {
let proof = prove(circom, &params, &mut rng).unwrap();
// XXX: Unclear if this is different from other serialization(s)
let _ = proof.serialize(result_data).unwrap();
proof.serialize(result_data).unwrap();
Ok(())
}

View File

@ -26,7 +26,7 @@ ark-circom = { git = "https://github.com/vacp2p/ark-circom", branch = "wasm", de
wasmer = { version = "2.3.0", default-features = false }
# error handling
color-eyre = "0.5.11"
color-eyre = "0.6.1"
thiserror = "1.0.0"
# utilities
@ -53,4 +53,4 @@ wasm = ["wasmer/js", "wasmer/std"]
fullmerkletree = ["default"]
# Note: pmtree feature is still experimental
pmtree = ["default"]
pmtree = ["default"]

View File

@ -136,7 +136,7 @@ pub fn circom_from_raw(wasm_buffer: Vec<u8>) -> &'static Mutex<WitnessCalculator
pub fn circom_from_folder(resources_folder: &str) -> &'static Mutex<WitnessCalculator> {
// We read the wasm file
let wasm_path = format!("{resources_folder}{WASM_FILENAME}");
let wasm_buffer = std::fs::read(&wasm_path).unwrap();
let wasm_buffer = std::fs::read(wasm_path).unwrap();
circom_from_raw(wasm_buffer)
}

View File

@ -53,42 +53,42 @@ pub struct RLNProofValues {
}
pub fn serialize_field_element(element: Fr) -> Vec<u8> {
return fr_to_bytes_le(&element);
fr_to_bytes_le(&element)
}
pub fn deserialize_field_element(serialized: Vec<u8>) -> Fr {
let (element, _) = bytes_le_to_fr(&serialized);
return element;
element
}
pub fn deserialize_identity_pair(serialized: Vec<u8>) -> (Fr, Fr) {
let (identity_secret_hash, read) = bytes_le_to_fr(&serialized);
let (id_commitment, _) = bytes_le_to_fr(&serialized[read..].to_vec());
let (id_commitment, _) = bytes_le_to_fr(&serialized[read..]);
return (identity_secret_hash, id_commitment);
(identity_secret_hash, id_commitment)
}
pub fn deserialize_identity_tuple(serialized: Vec<u8>) -> (Fr, Fr, Fr, Fr) {
let mut all_read = 0;
let (identity_trapdoor, read) = bytes_le_to_fr(&serialized[all_read..].to_vec());
let (identity_trapdoor, read) = bytes_le_to_fr(&serialized[all_read..]);
all_read += read;
let (identity_nullifier, read) = bytes_le_to_fr(&serialized[all_read..].to_vec());
let (identity_nullifier, read) = bytes_le_to_fr(&serialized[all_read..]);
all_read += read;
let (identity_secret_hash, read) = bytes_le_to_fr(&serialized[all_read..].to_vec());
let (identity_secret_hash, read) = bytes_le_to_fr(&serialized[all_read..]);
all_read += read;
let (identity_commitment, _) = bytes_le_to_fr(&serialized[all_read..].to_vec());
let (identity_commitment, _) = bytes_le_to_fr(&serialized[all_read..]);
return (
(
identity_trapdoor,
identity_nullifier,
identity_secret_hash,
identity_commitment,
);
)
}
pub fn serialize_witness(rln_witness: &RLNWitnessInput) -> Vec<u8> {
@ -107,22 +107,22 @@ pub fn serialize_witness(rln_witness: &RLNWitnessInput) -> Vec<u8> {
pub fn deserialize_witness(serialized: &[u8]) -> (RLNWitnessInput, usize) {
let mut all_read: usize = 0;
let (identity_secret, read) = bytes_le_to_fr(&serialized[all_read..].to_vec());
let (identity_secret, read) = bytes_le_to_fr(&serialized[all_read..]);
all_read += read;
let (path_elements, read) = bytes_le_to_vec_fr(&serialized[all_read..].to_vec());
let (path_elements, read) = bytes_le_to_vec_fr(&serialized[all_read..]);
all_read += read;
let (identity_path_index, read) = bytes_le_to_vec_u8(&serialized[all_read..].to_vec());
let (identity_path_index, read) = bytes_le_to_vec_u8(&serialized[all_read..]);
all_read += read;
let (x, read) = bytes_le_to_fr(&serialized[all_read..].to_vec());
let (x, read) = bytes_le_to_fr(&serialized[all_read..]);
all_read += read;
let (epoch, read) = bytes_le_to_fr(&serialized[all_read..].to_vec());
let (epoch, read) = bytes_le_to_fr(&serialized[all_read..]);
all_read += read;
let (rln_identifier, read) = bytes_le_to_fr(&serialized[all_read..].to_vec());
let (rln_identifier, read) = bytes_le_to_fr(&serialized[all_read..]);
all_read += read;
// TODO: check rln_identifier against public::RLN_IDENTIFIER
@ -151,13 +151,13 @@ pub fn proof_inputs_to_rln_witness(
) -> (RLNWitnessInput, usize) {
let mut all_read: usize = 0;
let (identity_secret, read) = bytes_le_to_fr(&serialized[all_read..].to_vec());
let (identity_secret, read) = bytes_le_to_fr(&serialized[all_read..]);
all_read += read;
let id_index = u64::from_le_bytes(serialized[all_read..all_read + 8].try_into().unwrap());
all_read += 8;
let (epoch, read) = bytes_le_to_fr(&serialized[all_read..].to_vec());
let (epoch, read) = bytes_le_to_fr(&serialized[all_read..]);
all_read += read;
let signal_len = u64::from_le_bytes(serialized[all_read..all_read + 8].try_into().unwrap());
@ -317,22 +317,22 @@ pub fn serialize_proof_values(rln_proof_values: &RLNProofValues) -> Vec<u8> {
pub fn deserialize_proof_values(serialized: &[u8]) -> (RLNProofValues, usize) {
let mut all_read: usize = 0;
let (root, read) = bytes_le_to_fr(&serialized[all_read..].to_vec());
let (root, read) = bytes_le_to_fr(&serialized[all_read..]);
all_read += read;
let (epoch, read) = bytes_le_to_fr(&serialized[all_read..].to_vec());
let (epoch, read) = bytes_le_to_fr(&serialized[all_read..]);
all_read += read;
let (x, read) = bytes_le_to_fr(&serialized[all_read..].to_vec());
let (x, read) = bytes_le_to_fr(&serialized[all_read..]);
all_read += read;
let (y, read) = bytes_le_to_fr(&serialized[all_read..].to_vec());
let (y, read) = bytes_le_to_fr(&serialized[all_read..]);
all_read += read;
let (nullifier, read) = bytes_le_to_fr(&serialized[all_read..].to_vec());
let (nullifier, read) = bytes_le_to_fr(&serialized[all_read..]);
all_read += read;
let (rln_identifier, read) = bytes_le_to_fr(&serialized[all_read..].to_vec());
let (rln_identifier, read) = bytes_le_to_fr(&serialized[all_read..]);
all_read += read;
(
@ -364,9 +364,10 @@ pub fn prepare_prove_input(
serialized.append(&mut signal_len.to_le_bytes().to_vec());
serialized.append(&mut signal.to_vec());
return serialized;
serialized
}
#[allow(clippy::redundant_clone)]
pub fn prepare_verify_input(proof_data: Vec<u8>, signal: &[u8]) -> Vec<u8> {
let signal_len = u64::try_from(signal.len()).unwrap();
@ -376,7 +377,7 @@ pub fn prepare_verify_input(proof_data: Vec<u8>, signal: &[u8]) -> Vec<u8> {
serialized.append(&mut signal_len.to_le_bytes().to_vec());
serialized.append(&mut signal.to_vec());
return serialized;
serialized
}
///////////////////////////////////////////////////////
@ -519,9 +520,9 @@ pub fn compute_id_secret(
if a_1 == computed_a_1 {
// We successfully recovered the identity secret
return Ok(a_0);
Ok(a_0)
} else {
return Err("Cannot recover identity_secret_hash from provided shares".into());
Err("Cannot recover identity_secret_hash from provided shares".into())
}
}

View File

@ -237,7 +237,7 @@ impl RLN<'_> {
let (leaves, _) = bytes_le_to_vec_fr(&leaves_byte);
// We set the leaves
return self.tree.set_range(index, leaves);
self.tree.set_range(index, leaves)
}
/// Resets the tree state to default and sets multiple leaves starting from index 0.
@ -251,7 +251,7 @@ impl RLN<'_> {
// NOTE: this requires the tree to be initialized with the correct height initially
// TODO: accept tree_height as a parameter and initialize the tree with that height
self.set_tree(self.tree.depth())?;
return self.set_leaves_from(0, input_data);
self.set_leaves_from(0, input_data)
}
/// Sets a leaf value at the next available never-set leaf index.
@ -419,7 +419,7 @@ impl RLN<'_> {
*/
let proof = generate_proof(
&mut self.witness_calculator,
self.witness_calculator,
self.proving_key.as_ref().unwrap(),
&rln_witness,
)
@ -472,9 +472,9 @@ impl RLN<'_> {
// [ proof<128> | root<32> | epoch<32> | share_x<32> | share_y<32> | nullifier<32> | rln_identifier<32> ]
let mut input_byte: Vec<u8> = Vec::new();
input_data.read_to_end(&mut input_byte)?;
let proof = ArkProof::deserialize(&mut Cursor::new(&input_byte[..128].to_vec())).unwrap();
let proof = ArkProof::deserialize(&mut Cursor::new(&input_byte[..128])).unwrap();
let (proof_values, _) = deserialize_proof_values(&input_byte[128..].to_vec());
let (proof_values, _) = deserialize_proof_values(&input_byte[128..]);
let verified = verify_proof(
self.verification_key.as_ref().unwrap(),
@ -618,7 +618,7 @@ impl RLN<'_> {
let mut all_read = 0;
let proof = ArkProof::deserialize(&mut Cursor::new(&serialized[..128].to_vec())).unwrap();
all_read += 128;
let (proof_values, read) = deserialize_proof_values(&serialized[all_read..].to_vec());
let (proof_values, read) = deserialize_proof_values(&serialized[all_read..]);
all_read += read;
let signal_len =
@ -703,7 +703,7 @@ impl RLN<'_> {
let mut all_read = 0;
let proof = ArkProof::deserialize(&mut Cursor::new(&serialized[..128].to_vec())).unwrap();
all_read += 128;
let (proof_values, read) = deserialize_proof_values(&serialized[all_read..].to_vec());
let (proof_values, read) = deserialize_proof_values(&serialized[all_read..]);
all_read += read;
let signal_len =
@ -726,7 +726,7 @@ impl RLN<'_> {
&& (proof_values.rln_identifier == hash_to_field(RLN_IDENTIFIER));
// We skip root validation if proof is already invalid
if partial_result == false {
if !partial_result {
return Ok(partial_result);
}
@ -749,14 +749,13 @@ impl RLN<'_> {
}
// We validate the root
let roots_verified: bool;
if roots.is_empty() {
let roots_verified: bool = if roots.is_empty() {
// If no root is passed in roots_buffer, we skip proof's root check
roots_verified = true;
true
} else {
// Otherwise we check if proof's root is contained in the passed buffer
roots_verified = roots.contains(&proof_values.root);
}
roots.contains(&proof_values.root)
};
// We combine all checks
Ok(partial_result && roots_verified)
@ -952,14 +951,14 @@ impl RLN<'_> {
let mut serialized: Vec<u8> = Vec::new();
input_proof_data_1.read_to_end(&mut serialized)?;
// We skip deserialization of the zk-proof at the beginning
let (proof_values_1, _) = deserialize_proof_values(&serialized[128..].to_vec());
let (proof_values_1, _) = deserialize_proof_values(&serialized[128..]);
let external_nullifier_1 =
poseidon_hash(&[proof_values_1.epoch, proof_values_1.rln_identifier]);
let mut serialized: Vec<u8> = Vec::new();
input_proof_data_2.read_to_end(&mut serialized)?;
// We skip deserialization of the zk-proof at the beginning
let (proof_values_2, _) = deserialize_proof_values(&serialized[128..].to_vec());
let (proof_values_2, _) = deserialize_proof_values(&serialized[128..]);
let external_nullifier_2 =
poseidon_hash(&[proof_values_2.epoch, proof_values_2.rln_identifier]);
@ -977,8 +976,7 @@ impl RLN<'_> {
compute_id_secret(share1, share2, external_nullifier_1);
// If an identity secret hash is recovered, we write it to output_data, otherwise nothing will be written.
if recovered_identity_secret_hash.is_ok() {
let identity_secret_hash = recovered_identity_secret_hash.unwrap();
if let Ok(identity_secret_hash) = recovered_identity_secret_hash {
output_data.write_all(&fr_to_bytes_le(&identity_secret_hash))?;
}
}

View File

@ -145,7 +145,7 @@ pub fn bytes_le_to_vec_fr(input: &[u8]) -> (Vec<Fr>, usize) {
let el_size = fr_byte_size();
for i in 0..len {
let (curr_el, _) = bytes_le_to_fr(&input[8 + el_size * i..8 + el_size * (i + 1)].to_vec());
let (curr_el, _) = bytes_le_to_fr(&input[8 + el_size * i..8 + el_size * (i + 1)]);
res.push(curr_el);
read += el_size;
}
@ -162,7 +162,7 @@ pub fn bytes_be_to_vec_fr(input: &[u8]) -> (Vec<Fr>, usize) {
let el_size = fr_byte_size();
for i in 0..len {
let (curr_el, _) = bytes_be_to_fr(&input[8 + el_size * i..8 + el_size * (i + 1)].to_vec());
let (curr_el, _) = bytes_be_to_fr(&input[8 + el_size * i..8 + el_size * (i + 1)]);
res.push(curr_el);
read += el_size;
}

View File

@ -16,7 +16,7 @@ ark-ec = { version = "0.3.0", default-features = false, features = ["parallel"]
ark-groth16 = { git = "https://github.com/arkworks-rs/groth16", rev = "765817f", features = ["parallel"] }
ark-relations = { version = "0.3.0", default-features = false }
ark-std = { version = "0.3.0", default-features = false, features = ["parallel"] }
color-eyre = "0.5"
color-eyre = "0.6.1"
num-bigint = { version = "0.4", default-features = false, features = ["rand"] }
once_cell = "1.8"
rand = "0.8.4"
@ -32,10 +32,10 @@ rand_chacha = "0.3.1"
serde_json = "1.0.79"
[build-dependencies]
color-eyre = "0.5"
color-eyre = "0.6.1"
wasmer = { version = "2.0" }
wasmer-engine-dylib = { version = "2.2.1", optional = true }
wasmer-compiler-cranelift = { version = "2.2.1", optional = true }
wasmer-compiler-cranelift = { version = "3.1.1", optional = true }
[profile.release]
codegen-units = 1

View File

@ -37,7 +37,7 @@ fn build_circuit() -> Result<()> {
.current_dir("./vendor/semaphore")
.status()?
.success()
.then(|| ())
.then_some(())
.ok_or(eyre!("procees returned failure"))?;
Ok(())
};

View File

@ -50,7 +50,7 @@ fn from_dylib(path: &Path) -> Mutex<WitnessCalculator> {
#[must_use]
pub fn zkey() -> &'static (ProvingKey<Bn254>, ConstraintMatrices<Fr>) {
&*ZKEY
&ZKEY
}
#[cfg(feature = "dylib")]

View File

@ -86,7 +86,7 @@ impl<H: Hasher> OptimalMerkleTree<H> {
cached_nodes.reverse();
OptimalMerkleTree {
cached_nodes: cached_nodes.clone(),
depth: depth,
depth,
nodes: HashMap::new(),
next_index: 0,
}

View File

@ -1,2 +1,3 @@
#[allow(clippy::module_inception)]
pub mod merkle_tree;
pub use self::merkle_tree::*;

View File

@ -88,8 +88,8 @@ impl PoseidonGrainLFSR {
}
// b50, ..., b79 are set to 1
for i in 50..=79 {
state[i] = true;
for item in state.iter_mut().skip(50) {
*item = true;
}
let head = 0;
@ -111,7 +111,7 @@ impl PoseidonGrainLFSR {
let mut new_bit = self.update();
// Loop until the first bit is true
while new_bit == false {
while !new_bit {
// Discard the second bit
let _ = self.update();
// Obtain another first bit
@ -263,8 +263,8 @@ pub fn find_poseidon_ark_and_mds<F: PrimeField>(
let ys = lfsr.get_field_elements_mod_p::<F>(rate);
for i in 0..(rate) {
for j in 0..(rate) {
mds[i][j] = (xs[i] + &ys[j]).inverse().unwrap();
for (j, ys_item) in ys.iter().enumerate().take(rate) {
mds[i][j] = (xs[i] + ys_item).inverse().unwrap();
}
}

View File

@ -28,8 +28,7 @@ impl<F: PrimeField> Poseidon<F> {
pub fn from(poseidon_params: &[(usize, usize, usize, usize)]) -> Self {
let mut read_params = Vec::<RoundParamenters<F>>::new();
for i in 0..poseidon_params.len() {
let (t, n_rounds_f, n_rounds_p, skip_matrices) = poseidon_params[i];
for &(t, n_rounds_f, n_rounds_p, skip_matrices) in poseidon_params {
let (ark, mds) = find_poseidon_ark_and_mds::<F>(
1, // is_field = 1
0, // is_sbox_inverse = 0
@ -40,10 +39,10 @@ impl<F: PrimeField> Poseidon<F> {
skip_matrices,
);
let rp = RoundParamenters {
t: t,
n_rounds_p: n_rounds_p,
n_rounds_f: n_rounds_f,
skip_matrices: skip_matrices,
t,
n_rounds_p,
n_rounds_f,
skip_matrices,
c: ark,
m: mds,
};
@ -67,11 +66,11 @@ impl<F: PrimeField> Poseidon<F> {
pub fn sbox(&self, n_rounds_f: usize, n_rounds_p: usize, state: &mut [F], i: usize) {
if (i < n_rounds_f / 2) || (i >= n_rounds_f / 2 + n_rounds_p) {
for j in 0..state.len() {
let aux = state[j];
state[j] *= state[j];
state[j] *= state[j];
state[j] *= aux;
for current_state in &mut state.iter_mut() {
let aux = *current_state;
*current_state *= *current_state;
*current_state *= *current_state;
*current_state *= aux;
}
} else {
let aux = state[0];
@ -85,9 +84,9 @@ impl<F: PrimeField> Poseidon<F> {
let mut new_state: Vec<F> = Vec::new();
for i in 0..state.len() {
new_state.push(F::zero());
for j in 0..state.len() {
for (j, state_item) in state.iter().enumerate() {
let mut mij = m[i][j];
mij *= state[j];
mij *= state_item;
new_state[i] += mij;
}
}
@ -116,7 +115,7 @@ impl<F: PrimeField> Poseidon<F> {
self.ark(
&mut state,
&self.round_params[param_index].c,
(i as usize) * self.round_params[param_index].t,
i * self.round_params[param_index].t,
);
self.sbox(
self.round_params[param_index].n_rounds_f,