Da: verifier bench (#672)

* Use reference for DaBlob on verify

* Add verifier bench

* Fix missing reference refactor

* Clippy happy
This commit is contained in:
Daniel Sanchez 2024-07-23 12:43:54 +00:00 committed by GitHub
parent fc815069a2
commit f58c296959
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
5 changed files with 69 additions and 5 deletions

View File

@ -39,3 +39,6 @@ parallel = [
name = "encoder"
harness = false
[[bench]]
name = "verifier"
harness = false

View File

@ -0,0 +1,61 @@
use blst::min_sig::SecretKey;
use divan::counter::BytesCount;
use divan::Bencher;
use kzgrs_backend::common::blob::DaBlob;
use kzgrs_backend::encoder::{DaEncoder, DaEncoderParams};
use kzgrs_backend::verifier::DaVerifier;
use rand::{thread_rng, RngCore};
use std::hint::black_box;
fn main() {
divan::main()
}
const MB: usize = 1024;
pub fn rand_data(elements_count: usize) -> Vec<u8> {
let mut buff = vec![0; elements_count * DaEncoderParams::MAX_BLS12_381_ENCODING_CHUNK_SIZE];
rand::thread_rng().fill_bytes(&mut buff);
buff
}
#[divan::bench(consts = [32, 64, 128, 256, 512, 1024], args = [128, 256, 512, 1024, 2048, 4096], sample_count = 1, sample_size = 30)]
fn verify<const SIZE: usize>(bencher: Bencher, column_size: usize) {
bencher
.with_inputs(|| {
let params = DaEncoderParams::new(column_size, true);
let encoder = DaEncoder::new(params);
let data = rand_data(SIZE * MB / DaEncoderParams::MAX_BLS12_381_ENCODING_CHUNK_SIZE);
let encoded_data = encoder.encode(&data).unwrap();
let mut buff = [0u8; 32];
let mut rng = thread_rng();
rng.fill_bytes(&mut buff);
let sk = SecretKey::key_gen(&buff, &[]).unwrap();
let verifier = DaVerifier::new(sk.clone(), &[sk.sk_to_pk()]);
let da_blob = DaBlob {
column: encoded_data.extended_data.columns().next().unwrap(),
column_commitment: encoded_data
.column_commitments
.iter()
.next()
.copied()
.unwrap(),
aggregated_column_commitment: encoded_data.aggregated_column_commitment.clone(),
aggregated_column_proof: encoded_data
.aggregated_column_proofs
.iter()
.next()
.copied()
.unwrap(),
rows_commitments: encoded_data.row_commitments.clone(),
rows_proofs: encoded_data
.rows_proofs
.iter()
.map(|row| row.iter().next().copied().unwrap())
.collect(),
};
(verifier, da_blob)
})
.input_counter(|_| BytesCount::new(SIZE))
.bench_refs(|(verifier, blob)| black_box(verifier.verify(blob, column_size).unwrap()));
}

View File

@ -283,7 +283,7 @@ mod tests {
.map(|proofs| proofs.get(i).cloned().unwrap())
.collect(),
};
attestations.push(verifier.verify(da_blob, domain_size).unwrap());
attestations.push(verifier.verify(&da_blob, domain_size).unwrap());
}
attestations
}

View File

@ -134,7 +134,7 @@ impl DaVerifier {
}
}
pub fn verify(&self, blob: DaBlob, rows_domain_size: usize) -> Option<Attestation> {
pub fn verify(&self, blob: &DaBlob, rows_domain_size: usize) -> Option<Attestation> {
let rows_domain = PolynomialEvaluationDomain::new(rows_domain_size)
.expect("Domain should be able to build");
let is_column_verified = DaVerifier::verify_column(
@ -159,7 +159,7 @@ impl DaVerifier {
if !are_chunks_verified {
return None;
}
Some(self.build_attestation(&blob))
Some(self.build_attestation(blob))
}
}
@ -251,7 +251,7 @@ mod test {
.map(|proofs| proofs.get(i).cloned().unwrap())
.collect(),
};
assert!(verifier.verify(da_blob, domain_size).is_some());
assert!(verifier.verify(&da_blob, domain_size).is_some());
}
}
}

View File

@ -48,7 +48,7 @@ impl DaVerifier for KzgrsDaVerifier {
// TODO: Prepare the domain depending the size, if fixed, so fixed domain, if not it needs
// to come with some metadata.
let domain_size = 2usize;
match self.verifier.verify(blob, domain_size) {
match self.verifier.verify(&blob, domain_size) {
Some(attestation) => Ok(attestation),
None => Err(KzgrsDaVerifierError::VerificationError),
}