Da: v1 verifier (#635)

* Fix encoding and test

* Implement commit row commitments

* Implemented dablob

* Implement verifier new
Implement verifier check column

* Clippy cleanup

* Implement verifier

* Implemented verify column test

* Implemented full verify test
This commit is contained in:
Daniel Sanchez 2024-04-21 08:05:59 +02:00 committed by GitHub
parent 52ac43c53c
commit e1ebcd8c36
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
7 changed files with 322 additions and 34 deletions

View File

@ -7,14 +7,17 @@ edition = "2021"
[dependencies]
ark-ff = "0.4"
blake2 = "0.10"
ark-serialize = "0.4.2"
ark-poly = "0.4.2"
blake2 = "0.10"
blst = "0.3.11"
itertools = "0.12"
kzgrs = { path = "../kzgrs" }
num-bigint = "0.4.4"
rand = "0.8.5"
once_cell = "1.19"
sha3 = "0.10"
[dev-dependencies]
rand = "0.8"
itertools = "0.12"
num-bigint = "0.4.4"

View File

@ -1,6 +1,12 @@
use blake2::digest::{Update, VariableOutput};
use kzgrs::Commitment;
// std
use ark_serialize::CanonicalSerialize;
use std::io::Cursor;
// crates
use blake2::digest::{Update, VariableOutput};
use blst::min_sig::Signature;
use sha3::{Digest, Sha3_256};
// internal
use kzgrs::Commitment;
#[derive(Clone, Eq, PartialEq, Debug)]
pub struct Chunk(pub Vec<u8>);
@ -65,6 +71,18 @@ impl FromIterator<Chunk> for Column {
}
}
impl AsRef<[Chunk]> for Row {
fn as_ref(&self) -> &[Chunk] {
&self.0
}
}
impl AsRef<[Chunk]> for Column {
fn as_ref(&self) -> &[Chunk] {
&self.0
}
}
impl ChunksMatrix {
pub fn len(&self) -> usize {
self.0.len()
@ -101,18 +119,40 @@ pub fn hash_column_and_commitment<const HASH_SIZE: usize>(
column: &Column,
commitment: &Commitment,
) -> [u8; HASH_SIZE] {
use ark_serialize::CanonicalSerialize;
let mut hasher = blake2::Blake2bVar::new(HASH_SIZE)
.unwrap_or_else(|e| panic!("Blake2b should work for size {HASH_SIZE}, {e}"));
hasher.update(column.as_bytes().as_ref());
let mut buff = Cursor::new(vec![]);
commitment
.serialize_uncompressed(&mut buff)
.expect("Serialization of commitment should work");
hasher.update(buff.into_inner().as_ref());
hasher.update(commitment_to_bytes(commitment).as_ref());
hasher
.finalize_boxed()
.to_vec()
.try_into()
.unwrap_or_else(|_| panic!("Size is guaranteed by constant {HASH_SIZE:?}"))
}
pub fn build_attestation_message(
aggregated_column_commitment: &Commitment,
rows_commitments: &[Commitment],
) -> Vec<u8> {
let mut hasher = Sha3_256::new();
Digest::update(
&mut hasher,
commitment_to_bytes(aggregated_column_commitment),
);
for c in rows_commitments {
Digest::update(&mut hasher, commitment_to_bytes(c));
}
hasher.finalize().to_vec()
}
pub fn commitment_to_bytes(commitment: &Commitment) -> Vec<u8> {
let mut buff = Cursor::new(vec![]);
commitment
.serialize_uncompressed(&mut buff)
.expect("Serialization of commitment should work");
buff.into_inner()
}
pub struct Attestation {
pub signature: Signature,
}

View File

@ -1,8 +1,7 @@
// std
use ark_ff::{BigInteger, PrimeField};
use std::ops::Div;
// crates
use ark_ff::{BigInteger, PrimeField};
// internal
use crate::common::{hash_column_and_commitment, Chunk, ChunksMatrix, Row};
use crate::global::{DOMAIN, GLOBAL_PARAMETERS};
@ -18,7 +17,7 @@ pub struct DaEncoderParams {
}
impl DaEncoderParams {
const MAX_BLS12_381_ENCODING_CHUNK_SIZE: usize = 31;
pub const MAX_BLS12_381_ENCODING_CHUNK_SIZE: usize = 31;
const fn default_with(column_count: usize) -> Self {
Self { column_count }
@ -26,14 +25,14 @@ impl DaEncoderParams {
}
pub struct EncodedData {
data: Vec<u8>,
chunked_data: ChunksMatrix,
extended_data: ChunksMatrix,
row_commitments: Vec<Commitment>,
rows_proofs: Vec<Vec<Proof>>,
column_commitments: Vec<Commitment>,
aggregated_column_commitment: Commitment,
aggregated_column_proofs: Vec<Proof>,
pub data: Vec<u8>,
pub chunked_data: ChunksMatrix,
pub extended_data: ChunksMatrix,
pub row_commitments: Vec<Commitment>,
pub rows_proofs: Vec<Vec<Proof>>,
pub column_commitments: Vec<Commitment>,
pub aggregated_column_commitment: Commitment,
pub aggregated_column_proofs: Vec<Proof>,
}
pub struct DaEncoder {
@ -63,6 +62,7 @@ impl DaEncoder {
.collect()
}
#[allow(clippy::type_complexity)]
fn compute_kzg_row_commitments(
matrix: &ChunksMatrix,
) -> Result<Vec<((Evaluations, Polynomial), Commitment)>, KzgRsError> {
@ -106,6 +106,7 @@ impl DaEncoder {
.collect()
}
#[allow(clippy::type_complexity)]
fn compute_kzg_column_commitments(
matrix: &ChunksMatrix,
) -> Result<Vec<((Evaluations, Polynomial), Commitment)>, KzgRsError> {

View File

@ -1,3 +1,4 @@
mod common;
mod encoder;
mod global;
mod verifier;

View File

@ -0,0 +1,241 @@
// std
// crates
use blst::min_sig::{PublicKey, SecretKey, Signature};
use itertools::{izip, Itertools};
use num_bigint::BigUint;
use sha3::{Digest, Sha3_256};
// internal
use crate::common::{
build_attestation_message, hash_column_and_commitment, Attestation, Chunk, Column,
};
use crate::encoder::DaEncoderParams;
use crate::global::{DOMAIN, GLOBAL_PARAMETERS};
use kzgrs::common::field_element_from_bytes_le;
use kzgrs::{
bytes_to_polynomial, commit_polynomial, verify_element_proof, Commitment, FieldElement, Proof,
BYTES_PER_FIELD_ELEMENT,
};
pub struct DaBlob {
column: Column,
column_commitment: Commitment,
aggregated_column_commitment: Commitment,
aggregated_column_proof: Proof,
rows_commitments: Vec<Commitment>,
rows_proofs: Vec<Proof>,
}
impl DaBlob {
pub fn id(&self) -> Vec<u8> {
build_attestation_message(&self.aggregated_column_commitment, &self.rows_commitments)
}
pub fn column_id(&self) -> Vec<u8> {
let mut hasher = Sha3_256::new();
hasher.update(self.column.as_bytes());
hasher.finalize().as_slice().to_vec()
}
}
pub struct DaVerifier {
// TODO: substitute this for an abstraction to sign things over
sk: SecretKey,
index: usize,
}
impl DaVerifier {
pub fn new(sk: SecretKey, nodes_public_keys: &[PublicKey]) -> Self {
// TODO: `is_sorted` is experimental, and by contract `nodes_public_keys` should be shorted
// but not sure how we could enforce it here without re-sorting anyway.
// assert!(nodes_public_keys.is_sorted());
let self_pk = sk.sk_to_pk();
let (index, _) = nodes_public_keys
.iter()
.find_position(|&pk| pk == &self_pk)
.expect("Self pk should be registered");
Self { sk, index }
}
fn verify_column(
column: &Column,
column_commitment: &Commitment,
aggregated_column_commitment: &Commitment,
aggregated_column_proof: &Proof,
index: usize,
) -> bool {
// 1. compute commitment for column
let Ok((_, polynomial)) =
bytes_to_polynomial::<BYTES_PER_FIELD_ELEMENT>(column.as_bytes().as_slice(), *DOMAIN)
else {
return false;
};
let Ok(computed_column_commitment) = commit_polynomial(&polynomial, &GLOBAL_PARAMETERS)
else {
return false;
};
// 2. if computed column commitment != column commitment, fail
if &computed_column_commitment != column_commitment {
return false;
}
// 3. compute column hash
let column_hash = hash_column_and_commitment::<
{ DaEncoderParams::MAX_BLS12_381_ENCODING_CHUNK_SIZE },
>(column, column_commitment);
// 4. check proof with commitment and proof over the aggregated column commitment
let element = field_element_from_bytes_le(column_hash.as_slice());
verify_element_proof(
index,
&element,
aggregated_column_commitment,
aggregated_column_proof,
*DOMAIN,
&GLOBAL_PARAMETERS,
)
}
fn verify_chunk(chunk: &Chunk, commitment: &Commitment, proof: &Proof, index: usize) -> bool {
let element = field_element_from_bytes_le(chunk.as_bytes().as_slice());
verify_element_proof(
index,
&element,
commitment,
proof,
*DOMAIN,
&GLOBAL_PARAMETERS,
)
}
fn verify_chunks(
chunks: &[Chunk],
commitments: &[Commitment],
proofs: &[Proof],
index: usize,
) -> bool {
if ![chunks.len(), commitments.len(), proofs.len()]
.iter()
.all_equal()
{
return false;
}
for (chunk, commitment, proof) in izip!(chunks, commitments, proofs) {
if !DaVerifier::verify_chunk(chunk, commitment, proof, index) {
return false;
}
}
true
}
fn build_attestation(&self, blob: &DaBlob) -> Attestation {
let message =
build_attestation_message(&blob.aggregated_column_commitment, &blob.rows_commitments);
let signature = self.sk.sign(&message, b"", b"");
Attestation { signature }
}
pub fn verify(&self, blob: DaBlob) -> Option<Attestation> {
let is_column_verified = DaVerifier::verify_column(
&blob.column,
&blob.column_commitment,
&blob.aggregated_column_commitment,
&blob.aggregated_column_proof,
self.index,
);
if !is_column_verified {
return None;
}
let are_chunks_verified = DaVerifier::verify_chunks(
blob.column.as_ref(),
&blob.rows_commitments,
&blob.rows_proofs,
self.index,
);
if !are_chunks_verified {
return None;
}
Some(self.build_attestation(&blob))
}
}
#[cfg(test)]
mod test {
use crate::common::{hash_column_and_commitment, Chunk, Column};
use crate::encoder::test::{rand_data, ENCODER};
use crate::encoder::{DaEncoder, DaEncoderParams};
use crate::global::{DOMAIN, GLOBAL_PARAMETERS};
use crate::verifier::{DaBlob, DaVerifier};
use blst::min_sig::{PublicKey, SecretKey};
use kzgrs::{
bytes_to_polynomial, commit_polynomial, generate_element_proof, BYTES_PER_FIELD_ELEMENT,
};
use rand::{thread_rng, RngCore};
#[test]
fn test_verify_column() {
let column: Column = (0..10).map(|i| Chunk(vec![i; 32])).collect();
let (_, column_poly) =
bytes_to_polynomial::<BYTES_PER_FIELD_ELEMENT>(column.as_bytes().as_slice(), *DOMAIN)
.unwrap();
let column_commitment = commit_polynomial(&column_poly, &GLOBAL_PARAMETERS).unwrap();
let (_, aggregated_poly) = bytes_to_polynomial::<
{ DaEncoderParams::MAX_BLS12_381_ENCODING_CHUNK_SIZE },
>(
hash_column_and_commitment::<{ DaEncoderParams::MAX_BLS12_381_ENCODING_CHUNK_SIZE }>(
&column,
&column_commitment,
)
.as_slice(),
*DOMAIN,
)
.unwrap();
let aggregated_commitment =
commit_polynomial(&aggregated_poly, &GLOBAL_PARAMETERS).unwrap();
let column_proof =
generate_element_proof(0, &aggregated_poly, &GLOBAL_PARAMETERS, *DOMAIN).unwrap();
assert!(DaVerifier::verify_column(
&column,
&column_commitment,
&aggregated_commitment,
&column_proof,
0
));
}
#[test]
fn test_verify() {
let encoder = &ENCODER;
let data = rand_data(8);
let mut rng = thread_rng();
let sks: Vec<SecretKey> = (0..16)
.map(|_| {
let mut buff = [0u8; 32];
rng.fill_bytes(&mut buff);
SecretKey::key_gen(&buff, &[]).unwrap()
})
.collect();
let verifiers: Vec<DaVerifier> = sks
.into_iter()
.enumerate()
.map(|(index, sk)| DaVerifier { sk, index })
.collect();
let encoded_data = encoder.encode(&data).unwrap();
for (i, column) in encoded_data.extended_data.columns().enumerate() {
let verifier = &verifiers[i];
let da_blob = DaBlob {
column,
column_commitment: encoded_data.column_commitments[i].clone(),
aggregated_column_commitment: encoded_data.aggregated_column_commitment.clone(),
aggregated_column_proof: encoded_data.aggregated_column_proofs[i].clone(),
rows_commitments: encoded_data.row_commitments.clone(),
rows_proofs: encoded_data
.rows_proofs
.iter()
.map(|proofs| proofs.get(i).cloned().unwrap())
.collect(),
};
assert!(verifier.verify(da_blob).is_some());
}
}
}

View File

@ -1,6 +1,6 @@
// std
// crates
use crate::BYTES_PER_FIELD_ELEMENT;
use crate::{FieldElement, BYTES_PER_FIELD_ELEMENT};
use ark_bls12_381::fr::Fr;
use ark_ff::Zero;
use ark_poly::domain::general::GeneralEvaluationDomain;
@ -32,12 +32,11 @@ pub fn bytes_to_evaluations<const CHUNK_SIZE: usize>(
assert!((data.len() % CHUNK_SIZE).is_zero());
Evaluations::from_vec_and_domain(
data.chunks(CHUNK_SIZE)
.map(|e| {
.map(
// use little endian for convenience as shortening 1 byte (<32 supported)
// do not matter in this endianness
let bui = BigUint::from_bytes_le(e);
Fr::from(bui)
})
field_element_from_bytes_le,
)
.collect(),
domain,
)
@ -78,6 +77,15 @@ pub fn bytes_to_polynomial_unchecked<const CHUNK_SIZE: usize>(
(evals, coefficients)
}
/// Transform arbitrary bytes into a field element
/// This transformation is bounds unchecked, it's up to the caller to know if
/// data fits within the bls modulus.
/// Data len cannot be higher than `BYTES_PER_FIELD_ELEMENT`
pub fn field_element_from_bytes_le(b: &[u8]) -> FieldElement {
assert!(b.len() <= BYTES_PER_FIELD_ELEMENT);
FieldElement::from(BigUint::from_bytes_le(b))
}
#[cfg(test)]
mod test {
use super::{bytes_to_evaluations, bytes_to_polynomial, KzgRsError};

View File

@ -36,13 +36,7 @@ pub fn decode(
let (points, roots_of_unity): (Vec<Fr>, Vec<Fr>) = points
.iter()
.enumerate()
.flat_map(|(i, e)| {
if let Some(e) = e {
Some((*e, domain.element(i)))
} else {
None
}
})
.flat_map(|(i, e)| e.map(|e| (e, domain.element(i))))
.unzip();
let coeffs = lagrange_interpolate(&points, &roots_of_unity);
Evaluations::from_vec_and_domain(