Ci: kzg bench update (#813)

* Update kzg benches

* Add bench check to CI
This commit is contained in:
Daniel Sanchez 2024-10-08 18:39:38 +02:00 committed by GitHub
parent c983eb2260
commit 11ddb23795
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
3 changed files with 19 additions and 4 deletions

View File

@ -12,7 +12,7 @@ name: PR check
jobs: jobs:
check: check:
name: Check name: Check
runs-on: ['self-hosted'] runs-on: [ 'self-hosted' ]
strategy: strategy:
fail-fast: true fail-fast: true
matrix: matrix:
@ -112,6 +112,15 @@ jobs:
with: with:
command: test command: test
args: --no-default-features --features ${{ matrix.feature }} args: --no-default-features --features ${{ matrix.feature }}
- name: Cargo bench (no-run)
if: matrix.os != 'windows-latest'
uses: actions-rs/cargo@v1
env:
RISC0_DEV_MODE: true
CONSENSUS_SLOT_TIME: 5
with:
command: bench
args: --no-run
- uses: actions/upload-artifact@v3 - uses: actions/upload-artifact@v3
if: failure() if: failure()
with: with:

View File

@ -1,6 +1,7 @@
use divan::counter::BytesCount; use divan::counter::BytesCount;
use divan::Bencher; use divan::Bencher;
use kzgrs_backend::encoder::{DaEncoder, DaEncoderParams}; use kzgrs_backend::encoder::{DaEncoder, DaEncoderParams};
use kzgrs_backend::global::GLOBAL_PARAMETERS;
use rand::RngCore; use rand::RngCore;
use std::hint::black_box; use std::hint::black_box;
@ -19,7 +20,7 @@ pub fn rand_data(elements_count: usize) -> Vec<u8> {
fn encode<const SIZE: usize>(bencher: Bencher, column_size: usize) { fn encode<const SIZE: usize>(bencher: Bencher, column_size: usize) {
bencher bencher
.with_inputs(|| { .with_inputs(|| {
let params = DaEncoderParams::new(column_size, true, /* ark_poly_commit::kzg10::data_structures::UniversalParams<ark_ec::models::bls12::Bls12<ark_bls12_381::curves::Config>> */); let params = DaEncoderParams::new(column_size, true, GLOBAL_PARAMETERS.clone());
( (
DaEncoder::new(params), DaEncoder::new(params),
rand_data(SIZE * MB / DaEncoderParams::MAX_BLS12_381_ENCODING_CHUNK_SIZE), rand_data(SIZE * MB / DaEncoderParams::MAX_BLS12_381_ENCODING_CHUNK_SIZE),

View File

@ -3,6 +3,7 @@ use divan::counter::BytesCount;
use divan::Bencher; use divan::Bencher;
use kzgrs_backend::common::blob::DaBlob; use kzgrs_backend::common::blob::DaBlob;
use kzgrs_backend::encoder::{DaEncoder, DaEncoderParams}; use kzgrs_backend::encoder::{DaEncoder, DaEncoderParams};
use kzgrs_backend::global::GLOBAL_PARAMETERS;
use kzgrs_backend::verifier::DaVerifier; use kzgrs_backend::verifier::DaVerifier;
use nomos_core::da::DaEncoder as _; use nomos_core::da::DaEncoder as _;
use rand::{thread_rng, RngCore}; use rand::{thread_rng, RngCore};
@ -23,7 +24,7 @@ pub fn rand_data(elements_count: usize) -> Vec<u8> {
fn verify<const SIZE: usize>(bencher: Bencher, column_size: usize) { fn verify<const SIZE: usize>(bencher: Bencher, column_size: usize) {
bencher bencher
.with_inputs(|| { .with_inputs(|| {
let params = DaEncoderParams::new(column_size, true); let params = DaEncoderParams::new(column_size, true, GLOBAL_PARAMETERS.clone());
let encoder = DaEncoder::new(params); let encoder = DaEncoder::new(params);
let data = rand_data(SIZE * MB / DaEncoderParams::MAX_BLS12_381_ENCODING_CHUNK_SIZE); let data = rand_data(SIZE * MB / DaEncoderParams::MAX_BLS12_381_ENCODING_CHUNK_SIZE);
@ -32,7 +33,11 @@ fn verify<const SIZE: usize>(bencher: Bencher, column_size: usize) {
let mut rng = thread_rng(); let mut rng = thread_rng();
rng.fill_bytes(&mut buff); rng.fill_bytes(&mut buff);
let sk = SecretKey::key_gen(&buff, &[]).unwrap(); let sk = SecretKey::key_gen(&buff, &[]).unwrap();
let verifier = DaVerifier::new(sk.clone(), &[sk.sk_to_pk()]); let verifier = DaVerifier::new(
sk.clone(),
(0..column_size as u32).collect(),
GLOBAL_PARAMETERS.clone(),
);
let da_blob = DaBlob { let da_blob = DaBlob {
column: encoded_data.extended_data.columns().next().unwrap(), column: encoded_data.extended_data.columns().next().unwrap(),
column_idx: 0, column_idx: 0,