mirror of
https://github.com/logos-storage/dynamic-data-experiments.git
synced 2026-01-02 13:13:08 +00:00
initial impl
This commit is contained in:
parent
c314dacf08
commit
c43a18c373
1
.gitignore
vendored
1
.gitignore
vendored
@ -1 +1,2 @@
|
||||
/target
|
||||
*.bak
|
||||
170
Cargo.lock
generated
170
Cargo.lock
generated
@ -2,6 +2,17 @@
|
||||
# It is not intended for manual editing.
|
||||
version = 4
|
||||
|
||||
[[package]]
|
||||
name = "ahash"
|
||||
version = "0.7.8"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "891477e0c6a8957309ee5c45a6368af3ae14bb510732d2684ffa19af310920f9"
|
||||
dependencies = [
|
||||
"getrandom 0.2.16",
|
||||
"once_cell",
|
||||
"version_check",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "ahash"
|
||||
version = "0.8.12"
|
||||
@ -105,7 +116,7 @@ version = "0.5.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1e0c292754729c8a190e50414fd1a37093c786c709899f29c9f7daccecfa855e"
|
||||
dependencies = [
|
||||
"ahash",
|
||||
"ahash 0.8.12",
|
||||
"ark-crypto-primitives-macros",
|
||||
"ark-ec",
|
||||
"ark-ff",
|
||||
@ -139,7 +150,7 @@ version = "0.5.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "43d68f2d516162846c1238e755a7c4d131b892b70cc70c471a8e3ca3ed818fce"
|
||||
dependencies = [
|
||||
"ahash",
|
||||
"ahash 0.8.12",
|
||||
"ark-ff",
|
||||
"ark-poly",
|
||||
"ark-serialize",
|
||||
@ -205,7 +216,7 @@ version = "0.5.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "579305839da207f02b89cd1679e50e67b4331e2f9294a57693e5051b7703fe27"
|
||||
dependencies = [
|
||||
"ahash",
|
||||
"ahash 0.8.12",
|
||||
"ark-ff",
|
||||
"ark-serialize",
|
||||
"ark-std",
|
||||
@ -221,7 +232,7 @@ version = "0.5.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7d68a105d915bcde6c0687363591c97e72d2d3758f3532d48fd0bf21a3261ce7"
|
||||
dependencies = [
|
||||
"ahash",
|
||||
"ahash 0.8.12",
|
||||
"ark-crypto-primitives",
|
||||
"ark-ec",
|
||||
"ark-ff",
|
||||
@ -311,6 +322,12 @@ version = "1.4.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ace50bade8e6234aa140d9a2f552bbee1db4d353f69b8217bc503490fc1a9f26"
|
||||
|
||||
[[package]]
|
||||
name = "bitflags"
|
||||
version = "1.3.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a"
|
||||
|
||||
[[package]]
|
||||
name = "bitflags"
|
||||
version = "2.9.1"
|
||||
@ -341,6 +358,15 @@ version = "1.5.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b"
|
||||
|
||||
[[package]]
|
||||
name = "cc"
|
||||
version = "1.2.27"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d487aa071b5f64da6f19a3e848e3578944b726ee5a4854b82172f02aa876bfdc"
|
||||
dependencies = [
|
||||
"shlex",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "cfg-if"
|
||||
version = "1.0.1"
|
||||
@ -466,6 +492,7 @@ dependencies = [
|
||||
"anyhow",
|
||||
"ark-bls12-381",
|
||||
"ark-bn254",
|
||||
"ark-crypto-primitives",
|
||||
"ark-ec",
|
||||
"ark-ff",
|
||||
"ark-poly",
|
||||
@ -474,6 +501,7 @@ dependencies = [
|
||||
"clap",
|
||||
"itertools 0.14.0",
|
||||
"rand 0.9.1",
|
||||
"reed-solomon-erasure",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -553,6 +581,15 @@ dependencies = [
|
||||
"wasi 0.14.2+wasi-0.2.4",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "hashbrown"
|
||||
version = "0.12.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888"
|
||||
dependencies = [
|
||||
"ahash 0.7.8",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "hashbrown"
|
||||
version = "0.14.5"
|
||||
@ -577,6 +614,15 @@ version = "0.5.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea"
|
||||
|
||||
[[package]]
|
||||
name = "instant"
|
||||
version = "0.1.13"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e0242819d153cba4b4b05a5a8f2a7e9bbf97b6055b2a002b395c96b5ff3c0222"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "is_terminal_polyfill"
|
||||
version = "1.70.1"
|
||||
@ -622,6 +668,25 @@ version = "0.2.15"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f9fbbcab51052fe104eb5e5d351cf728d30a5be1fe14d9be8a3b097481fb97de"
|
||||
|
||||
[[package]]
|
||||
name = "lock_api"
|
||||
version = "0.4.13"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "96936507f153605bddfcda068dd804796c84324ed2510809e5b2a624c81da765"
|
||||
dependencies = [
|
||||
"autocfg",
|
||||
"scopeguard",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "lru"
|
||||
version = "0.7.8"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e999beba7b6e8345721bd280141ed958096a2e4abdf74f67ff4ce49b4b54e47a"
|
||||
dependencies = [
|
||||
"hashbrown 0.12.3",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "merlin"
|
||||
version = "3.0.0"
|
||||
@ -675,6 +740,31 @@ version = "1.70.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a4895175b425cb1f87721b59f0f286c2092bd4af812243672510e1ac53e2e0ad"
|
||||
|
||||
[[package]]
|
||||
name = "parking_lot"
|
||||
version = "0.11.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7d17b78036a60663b797adeaee46f5c9dfebb86948d1255007a1d6be0271ff99"
|
||||
dependencies = [
|
||||
"instant",
|
||||
"lock_api",
|
||||
"parking_lot_core",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "parking_lot_core"
|
||||
version = "0.8.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "60a2cfe6f0ad2bfc16aefa463b497d5c7a5ecd44a23efa72aa342d90177356dc"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"instant",
|
||||
"libc",
|
||||
"redox_syscall",
|
||||
"smallvec",
|
||||
"winapi",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "paste"
|
||||
version = "1.0.15"
|
||||
@ -799,6 +889,36 @@ dependencies = [
|
||||
"crossbeam-utils",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "redox_syscall"
|
||||
version = "0.2.16"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "fb5a58c1855b4b6819d59012155603f0b22ad30cad752600aadfcb695265519a"
|
||||
dependencies = [
|
||||
"bitflags 1.3.2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "reed-solomon-erasure"
|
||||
version = "6.0.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7263373d500d4d4f505d43a2a662d475a894aa94503a1ee28e9188b5f3960d4f"
|
||||
dependencies = [
|
||||
"cc",
|
||||
"libc",
|
||||
"libm",
|
||||
"lru",
|
||||
"parking_lot",
|
||||
"smallvec",
|
||||
"spin",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "scopeguard"
|
||||
version = "1.2.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49"
|
||||
|
||||
[[package]]
|
||||
name = "sha2"
|
||||
version = "0.10.9"
|
||||
@ -810,6 +930,24 @@ dependencies = [
|
||||
"digest",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "shlex"
|
||||
version = "1.3.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64"
|
||||
|
||||
[[package]]
|
||||
name = "smallvec"
|
||||
version = "1.15.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "67b1b7a3b5fe4f1376887184045fcf45c69e92af734b7aaddc05fb777b6fbd03"
|
||||
|
||||
[[package]]
|
||||
name = "spin"
|
||||
version = "0.9.8"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6980e8d7511241f8acf4aebddbb1ff938df5eebe98691418c4468d0b72a96a67"
|
||||
|
||||
[[package]]
|
||||
name = "strsim"
|
||||
version = "0.11.1"
|
||||
@ -918,6 +1056,28 @@ dependencies = [
|
||||
"wit-bindgen-rt",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "winapi"
|
||||
version = "0.3.9"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419"
|
||||
dependencies = [
|
||||
"winapi-i686-pc-windows-gnu",
|
||||
"winapi-x86_64-pc-windows-gnu",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "winapi-i686-pc-windows-gnu"
|
||||
version = "0.4.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6"
|
||||
|
||||
[[package]]
|
||||
name = "winapi-x86_64-pc-windows-gnu"
|
||||
version = "0.4.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
|
||||
|
||||
[[package]]
|
||||
name = "windows-sys"
|
||||
version = "0.59.0"
|
||||
@ -997,7 +1157,7 @@ version = "0.39.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6f42320e61fe2cfd34354ecb597f86f413484a798ba44a8ca1165c58d42da6c1"
|
||||
dependencies = [
|
||||
"bitflags",
|
||||
"bitflags 2.9.1",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
||||
@ -17,4 +17,6 @@ ark-bls12-381 = "0.5.0"
|
||||
ark-bn254 = "0.5.0"
|
||||
ark-poly = "0.5.0"
|
||||
ark-poly-commit = "0.5.0"
|
||||
anyhow = "1.0.95"
|
||||
anyhow = "1.0.95"
|
||||
reed-solomon-erasure = { version = "6.0.0", features = [ "simd-accel" ] }
|
||||
ark-crypto-primitives = "0.5.0"
|
||||
38
README.md
38
README.md
@ -1,5 +1,43 @@
|
||||
Dynamic Data Experiments
|
||||
================================
|
||||
This is a prototype implementation of the proposed Codex storage proofs for dynamic data.
|
||||
|
||||
### Erasure Coding & Commitment
|
||||
- [x] Organize data as byte Matrix with `k` rows and `m` columns
|
||||
- [x] Convert the byte Matrix to Field Matrix with `k` rows and `m` columns
|
||||
- [ ] Each cell in the Field matrix is "fat" (fat cell = `z` field elements) -> end up with `(k/z)`*`m` Matrix
|
||||
- [x] Erasure code the columns -> end up with `n`*`m` Matrix
|
||||
- [ ] Commit to each "fat" cell in each row independently with KZG
|
||||
- [x] Commit to each row independently with KZG
|
||||
- [ ] Build a Merkle tree with the KZG commitments
|
||||
|
||||
**Note:** in the above I switched the directions of the encoding and commitment (opposite of the [proposal](https://hackmd.io/kPGC3VIZSaWj8DBYOjd4vA?view)) just because it was easier to implement but basically it is same thing.
|
||||
|
||||
### Sampling
|
||||
- [ ] Select a set of columns randomly
|
||||
- [ ] Generate a KZG evaluation proof at random point for each column
|
||||
- [ ] Aggregate the KZG evaluation proofs
|
||||
|
||||
### Updating the Data
|
||||
- [x] Select a row (or multiple)
|
||||
- [x] Query the original row
|
||||
- [x] Update the cells in that row
|
||||
- [x] Erasure code the updated row
|
||||
|
||||
### Updating the Commitments
|
||||
- [x] Query the old row and receive the new row
|
||||
- [ ] Compute the `delta` = `r'` - `r`
|
||||
- [ ] Query the old the "fat" cell commitment and compute the new one
|
||||
- [ ] Compute the `delta_comm` = `fat_comm'` - `fat_comm`
|
||||
- [ ] Compute the new row commitment `row_comm'` = `row_comm` + `delta`
|
||||
|
||||
### Prove Data Update
|
||||
- [ ] TODO...
|
||||
|
||||
### TODO:
|
||||
- [ ] Clean up and optimize
|
||||
- [ ] Simulate interactions between Client (Data Owner) and SP (Storage Provider)
|
||||
- [ ] Add details and write-up & experimentation/benchmark results
|
||||
|
||||
**WARNING**: This repository contains work-in-progress prototypes, and has not received careful code review. It is NOT ready for production use.
|
||||
|
||||
|
||||
80
src/byte_data.rs
Normal file
80
src/byte_data.rs
Normal file
@ -0,0 +1,80 @@
|
||||
use rand::Rng;
|
||||
use crate::traits::DataMatrix;
|
||||
|
||||
/// parameters for the data
|
||||
/// - k: number of data rows
|
||||
/// - n: number of data + parity rows
|
||||
/// - m: number of columns
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct Params{
|
||||
pub k: usize,
|
||||
pub n: usize,
|
||||
pub m: usize,
|
||||
}
|
||||
/// data struct contains shards matrix where each "shard" is row
|
||||
/// the matrix contains n rows, k of which are source data and the rest (n-k) are parity
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct Data<T>{
|
||||
pub params: Params,
|
||||
pub matrix: Vec<Vec<T>>,
|
||||
}
|
||||
|
||||
impl DataMatrix<u8> for Data<u8> {
|
||||
type Params = Params;
|
||||
|
||||
/// new from random
|
||||
fn new_random(params: Self::Params) -> Self {
|
||||
let mut rng = rand::rng();
|
||||
// generate random data shards
|
||||
let matrix: Vec<Vec<u8>> = (0..params.n)
|
||||
.map(|i| {
|
||||
if i < params.k {
|
||||
// data shard: random u8
|
||||
(0..params.m).map(|_| rng.random::<u8>()).collect()
|
||||
} else {
|
||||
// parity shard: zero
|
||||
vec![0u8; params.m]
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
Self{
|
||||
params,
|
||||
matrix,
|
||||
}
|
||||
}
|
||||
|
||||
/// Update col `c` in shards.
|
||||
/// given `new_col` will replace the column `c` or `shards[0..k][c]`
|
||||
fn update_col(&mut self, c: usize, new_col: &[u8]) {
|
||||
// sanity checks
|
||||
assert!(
|
||||
new_col.len() == self.params.k,
|
||||
"new_row length ({}) must equal k ({})",
|
||||
new_col.len(),
|
||||
self.params.k
|
||||
);
|
||||
assert!(
|
||||
c < self.params.m,
|
||||
"row index {} out of bounds; must be < {}",
|
||||
c,
|
||||
self.params.m
|
||||
);
|
||||
|
||||
// write into each of the k data shards at position c
|
||||
for i in 0..self.params.k {
|
||||
self.matrix[i][c] = new_col[i];
|
||||
}
|
||||
}
|
||||
|
||||
/// Print all shards
|
||||
fn pretty_print(&self) {
|
||||
for (i, shard) in self.matrix.iter().enumerate() {
|
||||
print!("Row {:>2}: ", i);
|
||||
for &b in shard {
|
||||
print!("{:>3} ", b);
|
||||
}
|
||||
println!();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -1,117 +0,0 @@
|
||||
use ark_bls12_381::Bls12_381;
|
||||
use ark_ec::pairing::Pairing;
|
||||
use ark_poly::{univariate::DensePolynomial, DenseUVPolynomial, Evaluations, EvaluationDomain, GeneralEvaluationDomain};
|
||||
use ark_poly_commit::{Polynomial, marlin_pc::MarlinKZG10, LabeledPolynomial, PolynomialCommitment, QuerySet, LabeledCommitment};
|
||||
use ark_poly_commit::marlin_pc::{Commitment, Randomness,};
|
||||
use anyhow::{anyhow, Result};
|
||||
use ark_poly_commit::sonic_pc::UniversalParams;
|
||||
use ark_std::{end_timer, start_timer, test_rng};
|
||||
use crate::matrix::Matrix;
|
||||
|
||||
type E = Bls12_381;
|
||||
type F = <E as Pairing>::ScalarField;
|
||||
type UniPoly381 = DensePolynomial<F>;
|
||||
type PCS = MarlinKZG10<E, UniPoly381>;
|
||||
|
||||
// struct for the dynamic data scheme,
|
||||
// contains the params and functions needed for the dynamic data scheme
|
||||
pub struct DynamicData{
|
||||
n: usize, // the row size of the data matrix - un-coded
|
||||
k: usize, // the row size of the erasure coded data matrix
|
||||
m: usize, // the column size of the matrix
|
||||
|
||||
ploycommit_domain: GeneralEvaluationDomain<F>,
|
||||
encoding_domain: GeneralEvaluationDomain<F>,
|
||||
|
||||
pp: UniversalParams<Bls12_381>
|
||||
}
|
||||
|
||||
impl DynamicData {
|
||||
// setup the dynamic data scheme
|
||||
pub fn setup(n: usize, k:usize, m:usize) -> Result<Self>{
|
||||
let rng = &mut test_rng();
|
||||
let pp = PCS::setup(m,None, rng)?;
|
||||
let ploycommit_domain = EvaluationDomain::<F>::new(m).ok_or(anyhow!("polycommit domain error"))?;
|
||||
let encoding_domain = EvaluationDomain::<F>::new(n).ok_or(anyhow!("encoding domain error"))?;
|
||||
Ok(Self{
|
||||
n,
|
||||
k,
|
||||
m,
|
||||
ploycommit_domain,
|
||||
encoding_domain,
|
||||
pp,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn commit_columns(&self, matrix: Matrix<F>) -> Result<(
|
||||
Vec<LabeledPolynomial<F, UniPoly381>>,
|
||||
Vec<LabeledCommitment<Commitment<E>>>,
|
||||
Vec<Randomness<F, UniPoly381>>,
|
||||
)>{
|
||||
let rng = &mut test_rng();
|
||||
let degree = self.m;
|
||||
let (ck, vk) = PCS::trim(&self.pp, degree, 2, Some(&[degree])).unwrap();
|
||||
let mut col_polynomials = vec![];
|
||||
let timer = start_timer!(|| format!("Poly evaluations and interpolation for {} columns", degree));
|
||||
for i in 0..matrix.cols(){
|
||||
let poly_evals = Evaluations::from_vec_and_domain(matrix.column(i), self.ploycommit_domain.clone());
|
||||
let col_poly = poly_evals.interpolate();
|
||||
let label = String::from(format!("column_poly_{}", i));
|
||||
let labeled_poly = LabeledPolynomial::new(
|
||||
label,
|
||||
col_poly,
|
||||
Some(degree),
|
||||
Some(2),
|
||||
);
|
||||
col_polynomials.push(labeled_poly);
|
||||
}
|
||||
|
||||
let timer = start_timer!(|| format!("KZG commitment for {} columns", degree));
|
||||
let (labeled_comms, states) = PCS::commit(&ck, &col_polynomials, Some(rng)).unwrap();
|
||||
end_timer!(timer);
|
||||
Ok((col_polynomials,labeled_comms, states))
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use ark_std::test_rng;
|
||||
|
||||
#[test]
|
||||
fn test_commit_columns_roundtrip() {
|
||||
// dimensions: 3 rows, 2 columns
|
||||
let n = 8;
|
||||
let k = 4;
|
||||
let m = 8;
|
||||
|
||||
// setup
|
||||
let rng = &mut test_rng();
|
||||
let dd = DynamicData::setup(n, k, m).expect("setup should succeed");
|
||||
|
||||
// make a random n×m matrix
|
||||
let matrix = Matrix::new_random(n, m, rng);
|
||||
|
||||
// commit to its columns
|
||||
let (col_polys, commitments, randomness) =
|
||||
dd.commit_columns(matrix.clone()).expect("commit_columns should succeed");
|
||||
|
||||
// we produced exactly one polynomial, one comm, one rand per column
|
||||
assert_eq!(col_polys.len(), m);
|
||||
assert_eq!(commitments.len(), m);
|
||||
assert_eq!(randomness.len(), m);
|
||||
|
||||
// check that each polynomial really interpolates its original column
|
||||
for (i, poly) in col_polys.iter().enumerate() {
|
||||
let col = matrix.column(i);
|
||||
// evaluate poly at each domain point and collect
|
||||
let evals: Vec<_> = dd
|
||||
.ploycommit_domain
|
||||
.elements()
|
||||
.map(|x| poly.polynomial().evaluate(&x))
|
||||
.collect();
|
||||
assert_eq!(evals, col);
|
||||
}
|
||||
}
|
||||
}
|
||||
76
src/encoder.rs
Normal file
76
src/encoder.rs
Normal file
@ -0,0 +1,76 @@
|
||||
use anyhow::{anyhow, Result};
|
||||
use reed_solomon_erasure::galois_8::ReedSolomon;
|
||||
use crate::byte_data::Data;
|
||||
use crate::traits::Encoder;
|
||||
|
||||
|
||||
impl Encoder<u8> for Data<u8> {
|
||||
/// encode the columns of the data matrix in place
|
||||
fn encode(&mut self) -> Result<()> {
|
||||
let n = self.params.n;
|
||||
assert!(self.params.k < n, "k must be less than total shards");
|
||||
let p = n - self.params.k;
|
||||
|
||||
// ensure all shards are same length
|
||||
let shard_size = self.matrix[0].len();
|
||||
for shard in &self.matrix[1..] {
|
||||
assert_eq!(shard.len(), shard_size, "all shards must have equal length");
|
||||
}
|
||||
|
||||
// build the encoder
|
||||
let rse = ReedSolomon::new(self.params.k, p)?;
|
||||
|
||||
// prepare mutable slice references for in-place encode
|
||||
let mut shards_refs: Vec<&mut [u8]> = self.matrix.iter_mut()
|
||||
.map(|v| v.as_mut_slice())
|
||||
.collect();
|
||||
|
||||
// encode
|
||||
rse.encode(&mut shards_refs)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn encode_col(&mut self, c: usize) -> Result<Vec<u8>>{
|
||||
// bounds check
|
||||
if c >= self.params.m {
|
||||
return Err(anyhow!("shard index {} out of bounds (< {})", c, self.params.m));
|
||||
}
|
||||
|
||||
let n = self.params.n;
|
||||
let k = self.params.k;
|
||||
let p = n - k;
|
||||
|
||||
// Build the column: data = existing byte, parity = zero
|
||||
let mut temp: Vec<Vec<u8>> = (0..n)
|
||||
.map(|i| {
|
||||
let byte = self.matrix[i][c];
|
||||
if i < k {
|
||||
vec![byte]
|
||||
} else {
|
||||
vec![0u8]
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
let mut refs: Vec<&mut [u8]> = temp.iter_mut().map(|v| v.as_mut_slice()).collect();
|
||||
|
||||
// Encode that stripe
|
||||
let rse = ReedSolomon::new(k, p)?;
|
||||
rse.encode(&mut refs)?;
|
||||
|
||||
// Write back parity and collect full col
|
||||
let mut full_col = Vec::with_capacity(n);
|
||||
for i in 0..n {
|
||||
let b = refs[i][0];
|
||||
if i >= k {
|
||||
self.matrix[i][c] = b;
|
||||
}
|
||||
full_col.push(b);
|
||||
}
|
||||
|
||||
Ok(full_col)
|
||||
}
|
||||
|
||||
fn reconstruct(&mut self) -> Result<()>{
|
||||
todo!()
|
||||
}
|
||||
}
|
||||
81
src/field_matrix.rs
Normal file
81
src/field_matrix.rs
Normal file
@ -0,0 +1,81 @@
|
||||
use ark_ff::Field;
|
||||
use ark_std::rand::Rng;
|
||||
use ark_std::UniformRand;
|
||||
use crate::byte_data::Data;
|
||||
|
||||
|
||||
/// a Field matrix with `row` number of rows and `cols` number of columns
|
||||
#[derive(Clone, Debug, PartialEq)]
|
||||
pub struct Matrix<F: Field + Clone> {
|
||||
pub rows: usize,
|
||||
pub cols: usize,
|
||||
pub data: Vec<Vec<F>>,
|
||||
}
|
||||
|
||||
impl<F: Field + Clone> Matrix<F> {
|
||||
/// Creates a new matrix from given field data.
|
||||
pub fn new(rows: usize, cols: usize, data: Vec<Vec<F>>) -> Self {
|
||||
assert!(data.len() == rows, "number of rows must match");
|
||||
for row in &data {
|
||||
assert!(row.len() == cols, "each row must have `cols` elements");
|
||||
}
|
||||
Matrix { rows, cols, data }
|
||||
}
|
||||
|
||||
/// Generates a random matrix with given dimensions, uses given rng for randomness.
|
||||
pub fn new_random<R: Rng + ?Sized>(rows: usize, cols: usize, rng: &mut R) -> Self
|
||||
where
|
||||
F: UniformRand,
|
||||
{
|
||||
let mut data = Vec::with_capacity(rows);
|
||||
for _ in 0..rows {
|
||||
let mut row = Vec::with_capacity(cols);
|
||||
for _ in 0..cols {
|
||||
row.push(F::rand(rng));
|
||||
}
|
||||
data.push(row);
|
||||
}
|
||||
Matrix { rows, cols, data }
|
||||
}
|
||||
|
||||
/// Creates a new matrix from given data struct
|
||||
pub fn from_data(data: &Data<u8>) -> Self{
|
||||
let rows = data.params.n;
|
||||
let cols = data.params.m;
|
||||
|
||||
let mut field_data = Vec::with_capacity(rows);
|
||||
for i in 0..rows {
|
||||
let mut row = Vec::with_capacity(cols);
|
||||
for j in 0..cols {
|
||||
row.push(F::from(data.matrix[i][j]));
|
||||
}
|
||||
field_data.push(row);
|
||||
}
|
||||
Matrix { rows, cols, data:field_data }
|
||||
}
|
||||
|
||||
/// get the row at 0<idx<n
|
||||
pub fn row(&self, idx: usize) -> Vec<F>{
|
||||
assert!(idx < self.rows, "Row index out of bounds");
|
||||
self.data[idx].to_vec()
|
||||
}
|
||||
|
||||
/// get mut the row at 0<idx<n
|
||||
pub fn row_mut(&mut self, idx: usize) -> &mut Vec<F>{
|
||||
assert!(idx < self.rows, "Row index out of bounds");
|
||||
&mut self.data[idx]
|
||||
}
|
||||
|
||||
/// Print matrix
|
||||
pub fn pretty_print(&self) {
|
||||
for (i, shard) in self.data.iter().enumerate() {
|
||||
print!("row {:>2}: ", i);
|
||||
for &b in shard {
|
||||
print!("{:>3} ", b);
|
||||
}
|
||||
println!();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
262
src/kzg.rs
Normal file
262
src/kzg.rs
Normal file
@ -0,0 +1,262 @@
|
||||
use ark_ff::{One, PrimeField};
|
||||
use std::ops::{Add, AddAssign};
|
||||
use ark_poly::univariate::DensePolynomial;
|
||||
use ark_poly::{EvaluationDomain, Evaluations, GeneralEvaluationDomain};
|
||||
use ark_poly_commit::{
|
||||
PolynomialCommitment,
|
||||
LabeledPolynomial,
|
||||
LabeledCommitment,
|
||||
marlin_pc::{ Commitment, Randomness},
|
||||
};
|
||||
use ark_std::{end_timer, start_timer, test_rng};
|
||||
use anyhow::{anyhow, Result};
|
||||
use ark_bls12_381::Bls12_381;
|
||||
use ark_crypto_primitives::sponge::CryptographicSponge;
|
||||
use ark_ec::CurveGroup;
|
||||
use ark_ec::pairing::Pairing;
|
||||
use ark_poly_commit::marlin_pc::MarlinKZG10;
|
||||
use ark_poly_commit::sonic_pc::UniversalParams;
|
||||
use crate::byte_data::Params;
|
||||
use crate::field_matrix::Matrix;
|
||||
use crate::traits::PolynomialCommitmentScheme;
|
||||
use ark_crypto_primitives::sponge::poseidon::{PoseidonSponge, PoseidonConfig};
|
||||
use ark_poly_commit::kzg10::Proof;
|
||||
|
||||
pub type E = Bls12_381;
|
||||
pub type F = <E as Pairing>::ScalarField;
|
||||
pub type UniPoly381 = DensePolynomial<F>;
|
||||
pub type PCS = MarlinKZG10<E, UniPoly381>;
|
||||
|
||||
pub struct KZGSRS{
|
||||
pub ploycommit_domain: GeneralEvaluationDomain<F>,
|
||||
pub pp: UniversalParams<E>
|
||||
}
|
||||
|
||||
pub struct KZGPolyComm{
|
||||
params: Params,
|
||||
}
|
||||
|
||||
impl PolynomialCommitmentScheme for KZGPolyComm {
|
||||
type Params = Params;
|
||||
type Field = F;
|
||||
type FieldMatrix<F> = Matrix<Self::Field>;
|
||||
type SRS = KZGSRS;
|
||||
type Commitment = KZGCommitments;
|
||||
type Proof = Proof<E>;
|
||||
|
||||
fn new(params: Params) -> Self{
|
||||
Self{
|
||||
params,
|
||||
}
|
||||
}
|
||||
|
||||
fn setup(&self) -> Result<Self::SRS> {
|
||||
let rng = &mut test_rng();
|
||||
let pp = PCS::setup(self.params.m,None, rng)?;
|
||||
let ploycommit_domain = EvaluationDomain::<F>::new(self.params.m).ok_or(anyhow!("polycommit domain error"))?;
|
||||
Ok(KZGSRS{
|
||||
ploycommit_domain,
|
||||
pp,
|
||||
})
|
||||
}
|
||||
|
||||
fn commit(&self, srs: &Self::SRS, matrix: &Self::FieldMatrix<F>) -> Result<Self::Commitment> {
|
||||
let rng = &mut test_rng();
|
||||
let degree = self.params.m;
|
||||
let (ck, _vk) = PCS::trim(&srs.pp, degree, degree, Some(&[degree]))?;
|
||||
let mut row_polynomials = vec![];
|
||||
let timer = start_timer!(|| format!("Poly evaluations and interpolation for {} rows", degree));
|
||||
for i in 0..matrix.rows{
|
||||
let poly_evals = Evaluations::from_vec_and_domain(matrix.row(i), srs.ploycommit_domain.clone());
|
||||
let row_poly = poly_evals.interpolate();
|
||||
let label = String::from(format!("row_poly_{}", i));
|
||||
let labeled_poly = LabeledPolynomial::new(
|
||||
label,
|
||||
row_poly,
|
||||
Some(degree),
|
||||
Some(degree),
|
||||
);
|
||||
row_polynomials.push(labeled_poly);
|
||||
}
|
||||
end_timer!(timer);
|
||||
let timer = start_timer!(|| format!("KZG commitment for {} columns", degree));
|
||||
let (labeled_comms, states) = PCS::commit(&ck, &row_polynomials, Some(rng)).unwrap();
|
||||
end_timer!(timer);
|
||||
Ok(
|
||||
KZGCommitments::new(row_polynomials, labeled_comms, states)
|
||||
)
|
||||
}
|
||||
|
||||
fn open(comms: &Self::Commitment, srs: &Self::SRS, row: usize, col: usize) -> Result<Self::Proof> {
|
||||
// point
|
||||
let z = srs.ploycommit_domain.element(col);
|
||||
|
||||
// trim the srs
|
||||
let m = srs.ploycommit_domain.size();
|
||||
let (ck, _vk) = PCS::trim(&srs.pp, m, m, Some(&[m]))?;
|
||||
|
||||
let (polys, comms_vec, states) = comms.get_refs();
|
||||
let poly = &polys[row];
|
||||
let commit = &comms_vec[row];
|
||||
let state = &states[row];
|
||||
|
||||
let mut sponge = test_sponge::<F>();
|
||||
|
||||
let proof = PCS::open(
|
||||
&ck,
|
||||
std::iter::once(poly),
|
||||
std::iter::once(commit),
|
||||
&z,
|
||||
&mut sponge,
|
||||
&mut std::iter::once(state),
|
||||
None,
|
||||
)?;
|
||||
|
||||
Ok(proof)
|
||||
}
|
||||
|
||||
fn batch_open(_: &Self::Commitment, _: &Self::SRS, _rows: Vec<usize>, _cols: Vec<usize>) -> Result<Vec<Self::Proof>> {
|
||||
todo!()
|
||||
}
|
||||
|
||||
fn verify(
|
||||
comms: &Self::Commitment,
|
||||
srs: &Self::SRS,
|
||||
row: usize,
|
||||
col: usize,
|
||||
value: F,
|
||||
proof: &Self::Proof,
|
||||
) -> Result<bool> {
|
||||
let z = srs.ploycommit_domain.element(col);
|
||||
|
||||
let m = srs.ploycommit_domain.size();
|
||||
let (_ck, vk) = PCS::trim(&srs.pp, m, m, Some(&[m]))?;
|
||||
|
||||
// get labeled commitment
|
||||
let (_polys, commits, _states) = comms.get_refs();
|
||||
let commit = &commits[row];
|
||||
|
||||
let mut sponge = test_sponge::<F>();
|
||||
Ok( PCS::check(
|
||||
&vk,
|
||||
std::iter::once(commit),
|
||||
&z,
|
||||
std::iter::once(value),
|
||||
proof,
|
||||
&mut sponge,
|
||||
None,
|
||||
)? )
|
||||
}
|
||||
|
||||
fn batch_verify(_comms: &Self::Commitment, _srs: &Self::SRS, _rows: Vec<usize>, _cols: Vec<usize>, _values: Vec<F>, _proof: &Vec<Self::Proof>) -> Result<bool> {
|
||||
todo!()
|
||||
}
|
||||
|
||||
fn update_commitments(
|
||||
srs: &KZGSRS,
|
||||
comm: &mut KZGCommitments,
|
||||
row_idx: usize,
|
||||
old_row: &[F],
|
||||
new_row: &[F],
|
||||
) -> Result<()> {
|
||||
|
||||
let n = comm.poly.len();
|
||||
let domain = &srs.ploycommit_domain;
|
||||
let m = domain.size();
|
||||
let (ck, _vk) = PCS::trim(&srs.pp, m, 2, Some(&[m]))?;
|
||||
// Bounds and length checks
|
||||
assert!(row_idx < n, "row_idx {} out of bounds ({} rows)", row_idx, n);
|
||||
assert_eq!(old_row.len(), m, "old_row must have length {}", m);
|
||||
assert_eq!(new_row.len(), m, "new_row must have length {}", m);
|
||||
|
||||
let deltas: Vec<F> = old_row.iter()
|
||||
.zip(new_row.iter())
|
||||
.map(|(o, n)| *n - *o)
|
||||
.collect();
|
||||
|
||||
let delta_poly: DensePolynomial<F> =
|
||||
Evaluations::from_vec_and_domain(deltas, domain.clone())
|
||||
.interpolate();
|
||||
|
||||
let label = format!("row_diff_{}", row_idx);
|
||||
let labeled = LabeledPolynomial::new(label, delta_poly.clone(), Some(m), None);
|
||||
let rng = &mut test_rng();
|
||||
let (diff_comms, diff_rands) = PCS::commit(&ck, std::iter::once(&labeled), Some(rng))?;
|
||||
let diff_comm = &diff_comms[0];
|
||||
let diff_rand = &diff_rands[0];
|
||||
|
||||
let f_row = comm.poly[row_idx].polynomial_mut();
|
||||
f_row.add_assign(&delta_poly);
|
||||
|
||||
let mut cmt = comm.comm[row_idx].commitment().clone();
|
||||
let main_patch = diff_comm.commitment().comm.0;
|
||||
cmt.comm.0 = cmt.comm.0.add(&main_patch).into_affine();
|
||||
if let (Some(mut shifted), Some(diff_shifted)) = (
|
||||
cmt.shifted_comm.clone(),
|
||||
diff_comm.commitment().shifted_comm.clone(),
|
||||
) {
|
||||
shifted.0 = shifted.0.add(&diff_shifted.0).into_affine();
|
||||
cmt.shifted_comm = Some(shifted);
|
||||
}
|
||||
let lbl = comm.comm[row_idx].label().to_string();
|
||||
let dgb = comm.comm[row_idx].degree_bound();
|
||||
comm.comm[row_idx] = LabeledCommitment::new(lbl, cmt, dgb);
|
||||
|
||||
comm.rand[row_idx].add_assign((F::one(), diff_rand));
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
fn test_sponge<F: PrimeField>() -> PoseidonSponge<F> {
|
||||
let full_rounds = 8;
|
||||
let partial_rounds = 31;
|
||||
let alpha = 17;
|
||||
|
||||
let mds = vec![
|
||||
vec![F::one(), F::zero(), F::one()],
|
||||
vec![F::one(), F::one(), F::zero()],
|
||||
vec![F::zero(), F::one(), F::one()],
|
||||
];
|
||||
|
||||
let mut v = Vec::new();
|
||||
let mut ark_rng = test_rng();
|
||||
|
||||
for _ in 0..(full_rounds + partial_rounds) {
|
||||
let mut res = Vec::new();
|
||||
|
||||
for _ in 0..3 {
|
||||
res.push(F::rand(&mut ark_rng));
|
||||
}
|
||||
v.push(res);
|
||||
}
|
||||
let config = PoseidonConfig::new(full_rounds, partial_rounds, alpha, mds, v, 2, 1);
|
||||
PoseidonSponge::new(&config)
|
||||
}
|
||||
|
||||
pub struct KZGCommitments{
|
||||
pub poly: Vec<LabeledPolynomial<F, UniPoly381>>,
|
||||
pub comm: Vec<LabeledCommitment<Commitment<E>>>,
|
||||
pub rand: Vec<Randomness<F, UniPoly381>>,
|
||||
}
|
||||
|
||||
impl KZGCommitments {
|
||||
pub fn new(
|
||||
poly: Vec<LabeledPolynomial<F, UniPoly381>>,
|
||||
comm: Vec<LabeledCommitment<Commitment<E>>>,
|
||||
rand: Vec<Randomness<F, UniPoly381>>,
|
||||
) -> Self{
|
||||
Self{
|
||||
poly,
|
||||
comm,
|
||||
rand,
|
||||
}
|
||||
}
|
||||
pub fn get_refs(&self) ->(
|
||||
&Vec<LabeledPolynomial<F, UniPoly381>>,
|
||||
&Vec<LabeledCommitment<Commitment<E>>>,
|
||||
&Vec<Randomness<F, UniPoly381>>,
|
||||
){
|
||||
(&self.poly, &self.comm, &self.rand)
|
||||
}
|
||||
}
|
||||
@ -1,2 +1,6 @@
|
||||
pub mod matrix;
|
||||
pub mod dynamic_data;
|
||||
pub mod byte_data;
|
||||
pub mod kzg;
|
||||
pub mod field_matrix;
|
||||
pub mod test;
|
||||
pub mod traits;
|
||||
pub mod encoder;
|
||||
207
src/matrix.rs
207
src/matrix.rs
@ -1,207 +0,0 @@
|
||||
use ark_ff::Field;
|
||||
use std::ops::{Index, IndexMut};
|
||||
use ark_std::rand::Rng;
|
||||
use ark_std::UniformRand;
|
||||
|
||||
|
||||
/// a generic dense matrix stored in row-major order.
|
||||
#[derive(Clone, Debug, PartialEq)]
|
||||
pub struct Matrix<T: Field + Clone> {
|
||||
rows: usize,
|
||||
cols: usize,
|
||||
data: Vec<T>,
|
||||
}
|
||||
|
||||
impl<T: Field + Clone> Matrix<T> {
|
||||
/// Creates a new matrix from raw data.
|
||||
pub fn new(rows: usize, cols: usize, data: Vec<T>) -> Self {
|
||||
assert!(data.len() == rows * cols, "Data length must equal rows*cols");
|
||||
Matrix { rows, cols, data }
|
||||
}
|
||||
|
||||
/// Generates a random matrix with given dimensions, uses given rng for randomness.
|
||||
pub fn new_random<R: Rng + ?Sized>(rows: usize, cols: usize, rng: &mut R) -> Self
|
||||
where
|
||||
T: UniformRand,
|
||||
{
|
||||
let mut data = Vec::with_capacity(rows * cols);
|
||||
for _ in 0..rows * cols {
|
||||
data.push(T::rand(rng));
|
||||
}
|
||||
Matrix { rows, cols, data }
|
||||
}
|
||||
|
||||
/// Creates a zero matrix (rows x cols).
|
||||
pub fn zeros(rows: usize, cols: usize) -> Self {
|
||||
Matrix { rows, cols, data: vec![T::zero(); rows * cols] }
|
||||
}
|
||||
|
||||
/// Creates an identity matrix of size n x n.
|
||||
pub fn identity(n: usize) -> Self {
|
||||
let mut m = Self::zeros(n, n);
|
||||
for i in 0..n {
|
||||
m[(i, i)] = T::one();
|
||||
}
|
||||
m
|
||||
}
|
||||
|
||||
/// Constructs from a nested Vec
|
||||
pub fn from_nested_vec(nested: Vec<Vec<T>>) -> Self {
|
||||
let rows = nested.len();
|
||||
assert!(rows > 0, "must have at least one row");
|
||||
let cols = nested[0].len();
|
||||
let mut data = Vec::with_capacity(rows * cols);
|
||||
for row in nested.into_iter() {
|
||||
assert!(row.len() == cols, "all rows must have the same length");
|
||||
data.extend(row);
|
||||
}
|
||||
Matrix { rows, cols, data }
|
||||
}
|
||||
|
||||
/// Returns the number of rows.
|
||||
#[inline]
|
||||
pub fn rows(&self) -> usize { self.rows }
|
||||
|
||||
/// Returns the number of columns.
|
||||
#[inline]
|
||||
pub fn cols(&self) -> usize { self.cols }
|
||||
|
||||
/// Returns both dimensions (rows, cols).
|
||||
#[inline]
|
||||
pub fn dims(&self) -> (usize, usize) { (self.rows, self.cols) }
|
||||
|
||||
/// Returns a reference to the element at (row, col). Panics if out of bounds.
|
||||
#[inline]
|
||||
pub fn get(&self, row: usize, col: usize) -> &T {
|
||||
assert!(row < self.rows && col < self.cols, "Index out of bounds");
|
||||
&self.data[row * self.cols + col]
|
||||
}
|
||||
|
||||
/// Returns a mutable reference to the element at (row, col).
|
||||
#[inline]
|
||||
pub fn get_mut(&mut self, row: usize, col: usize) -> &mut T {
|
||||
assert!(row < self.rows && col < self.cols, "Index out of bounds");
|
||||
&mut self.data[row * self.cols + col]
|
||||
}
|
||||
|
||||
/// Returns a slice for the given row.
|
||||
pub fn row(&self, row: usize) -> &[T] {
|
||||
assert!(row < self.rows, "Row index out of bounds");
|
||||
let start = row * self.cols;
|
||||
&self.data[start..start + self.cols]
|
||||
}
|
||||
|
||||
/// Returns a mutable slice for the given row.
|
||||
pub fn row_mut(&mut self, row: usize) -> &mut [T] {
|
||||
assert!(row < self.rows, "Row index out of bounds");
|
||||
let start = row * self.cols;
|
||||
&mut self.data[start..start + self.cols]
|
||||
}
|
||||
|
||||
/// Swaps two rows in-place.
|
||||
pub fn swap_rows(&mut self, i: usize, j: usize) {
|
||||
assert!(i < self.rows && j < self.rows, "Row index out of bounds");
|
||||
for col in 0..self.cols {
|
||||
let a = i * self.cols + col;
|
||||
let b = j * self.cols + col;
|
||||
self.data.swap(a, b);
|
||||
}
|
||||
}
|
||||
|
||||
/// Horizontal concatenation: [self | other].
|
||||
pub fn hcat(&self, other: &Self) -> Self {
|
||||
assert!(self.rows == other.rows, "Row counts must match");
|
||||
let mut result = Self::zeros(self.rows, self.cols + other.cols);
|
||||
for r in 0..self.rows {
|
||||
// copy self
|
||||
let src = r * self.cols;
|
||||
let dst = r * result.cols;
|
||||
result.data[dst..dst + self.cols]
|
||||
.copy_from_slice(&self.data[src..src + self.cols]);
|
||||
// copy other
|
||||
let src2 = r * other.cols;
|
||||
result.data[dst + self.cols..dst + self.cols + other.cols]
|
||||
.copy_from_slice(&other.data[src2..src2 + other.cols]);
|
||||
}
|
||||
result
|
||||
}
|
||||
|
||||
/// Selects a subset of columns by index.
|
||||
pub fn select_columns(&self, cols_idx: &[usize]) -> Self {
|
||||
let mut result = Self::zeros(self.rows, cols_idx.len());
|
||||
for r in 0..self.rows {
|
||||
for (j, &c) in cols_idx.iter().enumerate() {
|
||||
result.data[r * result.cols + j] = self.get(r, c).clone();
|
||||
}
|
||||
}
|
||||
result
|
||||
}
|
||||
|
||||
/// Returns a Vec of all elements in the given column.
|
||||
pub fn column(&self, col: usize) -> Vec<T> {
|
||||
assert!(col < self.cols, "Column index out of bounds");
|
||||
let mut v = Vec::with_capacity(self.rows);
|
||||
for r in 0..self.rows {
|
||||
v.push(self.get(r, col).clone());
|
||||
}
|
||||
v
|
||||
}
|
||||
|
||||
/// Computes the inverse via in-place Gauss–Jordan; returns None if singular.
|
||||
pub fn invert(&self) -> Option<Self> {
|
||||
assert!(self.rows == self.cols, "Can only invert square matrices");
|
||||
let n = self.rows;
|
||||
let mut aug = self.hcat(&Self::identity(n));
|
||||
|
||||
for i in 0..n {
|
||||
// pivot check and swap if zero
|
||||
if aug[(i, i)].is_zero() {
|
||||
if let Some(k) = (i + 1..n).find(|&k| !aug[(k, i)].is_zero()) {
|
||||
aug.swap_rows(i, k);
|
||||
} else {
|
||||
return None;
|
||||
}
|
||||
}
|
||||
// normalize pivot row
|
||||
let inv_pivot = aug[(i, i)].inverse().unwrap();
|
||||
for col in i..2 * n {
|
||||
let idx = i * aug.cols + col;
|
||||
aug.data[idx] = aug.data[idx].clone() * inv_pivot.clone();
|
||||
}
|
||||
// Clone pivot row slice
|
||||
let pivot_start = i * aug.cols + i;
|
||||
let pivot_len = 2 * n - i;
|
||||
let pivot_row: Vec<T> = aug.data[pivot_start..pivot_start + pivot_len].to_vec();
|
||||
|
||||
// remove other rows
|
||||
for r in 0..n {
|
||||
if r != i {
|
||||
let factor = aug[(r, i)].clone();
|
||||
if !factor.is_zero() {
|
||||
let row_offset = r * aug.cols;
|
||||
for k in 0..pivot_len {
|
||||
let idx = row_offset + i + k;
|
||||
aug.data[idx] = aug.data[idx].clone() - factor.clone() * pivot_row[k].clone();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
Some(aug.select_columns(&(n..2 * n).collect::<Vec<_>>()))
|
||||
}
|
||||
}
|
||||
|
||||
// indexing with (row, col)
|
||||
impl<T: Field + Clone> Index<(usize, usize)> for Matrix<T> {
|
||||
type Output = T;
|
||||
fn index(&self, (row, col): (usize, usize)) -> &Self::Output {
|
||||
self.get(row, col)
|
||||
}
|
||||
}
|
||||
|
||||
// mutable indexing with (row, col)
|
||||
impl<T: Field + Clone> IndexMut<(usize, usize)> for Matrix<T> {
|
||||
fn index_mut(&mut self, (row, col): (usize, usize)) -> &mut Self::Output {
|
||||
self.get_mut(row, col)
|
||||
}
|
||||
}
|
||||
265
src/test.rs
Normal file
265
src/test.rs
Normal file
@ -0,0 +1,265 @@
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::byte_data::{Data, Params};
|
||||
use ark_poly::{EvaluationDomain};
|
||||
use reed_solomon_erasure::galois_8::ReedSolomon;
|
||||
use crate::kzg::{F, KZGPolyComm};
|
||||
use crate::field_matrix::Matrix;
|
||||
use ark_poly_commit::{Polynomial};
|
||||
use crate::traits::{DataMatrix, Encoder, PolynomialCommitmentScheme};
|
||||
|
||||
#[test]
|
||||
fn test_encode_columns() {
|
||||
// test parameters
|
||||
let k = 4;
|
||||
let p = 4;
|
||||
let n = k + p;
|
||||
let m = 8;
|
||||
|
||||
// generate Data with random content
|
||||
let params = Params {
|
||||
k,
|
||||
n,
|
||||
m,
|
||||
};
|
||||
let mut data = Data::new_random(params);
|
||||
println!("data #row ={}", data.matrix.len());
|
||||
println!("data #col ={}", data.matrix[0].len());
|
||||
println!("data before encoding:");
|
||||
data.pretty_print();
|
||||
// original data matrix
|
||||
let original: Vec<Vec<u8>> = data.matrix[..k].to_vec();
|
||||
|
||||
// encode
|
||||
data.encode().expect("encode failed");
|
||||
println!("data after encoding:");
|
||||
data.pretty_print();
|
||||
|
||||
// verify data matrix unchanged
|
||||
assert_eq!(data.matrix[..k], original[..]);
|
||||
|
||||
// simulate loss of one data and one parity rows
|
||||
let rse = ReedSolomon::new(k, p).unwrap();
|
||||
let mut matrix_opts: Vec<_> = data.matrix.iter().cloned().map(Some).collect();
|
||||
matrix_opts[1] = None;
|
||||
matrix_opts[k] = None;
|
||||
|
||||
// reconstruct missing rows
|
||||
rse.reconstruct(&mut matrix_opts).expect("reconstruct failed");
|
||||
|
||||
// verify reconstruction for data shards
|
||||
for i in 0..k {
|
||||
let recovered = matrix_opts[i].clone().unwrap();
|
||||
assert_eq!(recovered, &original[i][..]);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_commit_rows() {
|
||||
// dimensions: 8 rows (4 parity), 8 columns
|
||||
let n = 8;
|
||||
let k = 4;
|
||||
let m = 8;
|
||||
|
||||
// generate Data with random content
|
||||
let params = Params {
|
||||
k,
|
||||
n,
|
||||
m,
|
||||
};
|
||||
let mut data = Data::new_random(params.clone());
|
||||
data.encode().expect("encode failed");
|
||||
|
||||
// make a random n×m matrix
|
||||
let matrix = Matrix::from_data(&data);
|
||||
|
||||
// new kzg
|
||||
let kzg = KZGPolyComm::new(params);
|
||||
// setup kzg
|
||||
let srs = kzg.setup().expect("setup should succeed");
|
||||
|
||||
// commit to its rows
|
||||
let kzg_comm = kzg.commit(&srs, &matrix).expect("commit_rows should succeed");
|
||||
|
||||
|
||||
let (row_polys, commitments, randomness) =
|
||||
kzg_comm.get_refs();
|
||||
|
||||
// we produced exactly one polynomial, one comm, one rand per column
|
||||
assert_eq!(row_polys.len(), m);
|
||||
assert_eq!(commitments.len(), m);
|
||||
assert_eq!(randomness.len(), m);
|
||||
|
||||
// check that each polynomial really interpolates its original rows
|
||||
for (i, poly) in row_polys.iter().enumerate() {
|
||||
let row = matrix.row(i);
|
||||
// evaluate poly at each domain point and collect
|
||||
let evals: Vec<_> = srs
|
||||
.ploycommit_domain
|
||||
.elements()
|
||||
.map(|x| poly.polynomial().evaluate(&x))
|
||||
.collect();
|
||||
assert_eq!(evals, row);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_open_commitments() {
|
||||
// dimensions: 8 rows (4 parity), 8 columns
|
||||
let n = 8;
|
||||
let k = 4;
|
||||
let m = 8;
|
||||
|
||||
// generate Data with random content
|
||||
let params = Params {
|
||||
k,
|
||||
n,
|
||||
m,
|
||||
};
|
||||
let mut data = Data::new_random(params.clone());
|
||||
data.encode().expect("encode failed");
|
||||
|
||||
// make a random n×m matrix
|
||||
let matrix = Matrix::from_data(&data);
|
||||
|
||||
// new kzg
|
||||
let kzg = KZGPolyComm::new(params);
|
||||
// setup kzg
|
||||
let srs = kzg.setup().expect("setup should succeed");
|
||||
|
||||
// commit to its rows
|
||||
let kzg_comm = kzg.commit(&srs, &matrix).expect("commit_rows should succeed");
|
||||
// check all cells
|
||||
for row in 0..n {
|
||||
for col in 0..m {
|
||||
let proof = KZGPolyComm::open(&kzg_comm, &srs, row, col)
|
||||
.expect("open should succeed");
|
||||
let expected: F = matrix.row(row)[col].clone();
|
||||
|
||||
assert!(
|
||||
KZGPolyComm::verify(&kzg_comm, &srs, row, col, expected, &proof)
|
||||
.expect("verify should succeed"),
|
||||
"KZG open/verify failed for row={}, col={}",
|
||||
row,
|
||||
col
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_update_col() {
|
||||
// dimensions: 8 rows (4 parity), 8 columns
|
||||
let n = 8;
|
||||
let k = 4;
|
||||
let m = 8;
|
||||
|
||||
// generate Data with random content
|
||||
let params = Params {
|
||||
k,
|
||||
n,
|
||||
m,
|
||||
};
|
||||
// snapshot of original
|
||||
let mut data = Data::new_random(params);
|
||||
data.encode().expect("encode failed");
|
||||
println!("original data:");
|
||||
data.pretty_print();
|
||||
|
||||
// pick a col and a new data‐col
|
||||
let c = 5;
|
||||
let new_col: Vec<u8> = (0..k)
|
||||
.map(|i| i as u8)
|
||||
.collect();
|
||||
|
||||
// apply update
|
||||
data.update_col(c, &new_col);
|
||||
println!("data after update:");
|
||||
data.pretty_print();
|
||||
|
||||
//data matrix [0..k) at col c must match new_col
|
||||
for i in 0..k {
|
||||
assert_eq!(
|
||||
data.matrix[i][c],
|
||||
new_col[i],
|
||||
"data matrix {} at row {} should be updated", i, c
|
||||
);
|
||||
}
|
||||
|
||||
let _coded_row = data.encode_col(c).unwrap();
|
||||
println!("data after encoding update:");
|
||||
data.pretty_print();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_update_commitments() -> anyhow::Result<()> {
|
||||
// dimensions: 8 rows (4 parity), 8 columns
|
||||
let n = 8;
|
||||
let k = 4;
|
||||
let m = 8;
|
||||
|
||||
// generate Data with random content
|
||||
let params = Params {
|
||||
k,
|
||||
n,
|
||||
m,
|
||||
};
|
||||
// snapshot of original
|
||||
let mut data = Data::new_random(params.clone());
|
||||
data.encode().expect("encode failed");
|
||||
|
||||
// Build a matrix where entry (i,j) = i * m + j
|
||||
let mut matrix = Matrix::<F>::from_data(&data);
|
||||
matrix.pretty_print();
|
||||
|
||||
// new kzg
|
||||
let kzg = KZGPolyComm::new(params);
|
||||
// setup kzg
|
||||
let srs = kzg.setup().expect("setup should succeed");
|
||||
|
||||
// commit to its rows
|
||||
let mut kzg_comm = kzg.commit(&srs, &matrix).expect("commit_rows should succeed");
|
||||
|
||||
// a row to update
|
||||
let row_idx = 1;
|
||||
let old_row = matrix.row(row_idx);
|
||||
|
||||
// a new row by adding a constant to each element
|
||||
let new_row: Vec<_> = old_row.iter()
|
||||
.map(|v| *v + F::from(10u64))
|
||||
.collect();
|
||||
|
||||
// Apply the change to the in-memory matrix
|
||||
{
|
||||
let row_slice = matrix.row_mut(row_idx);
|
||||
for (j, val) in new_row.iter().enumerate() {
|
||||
row_slice[j] = *val;
|
||||
}
|
||||
}
|
||||
matrix.pretty_print();
|
||||
|
||||
// do the comm update
|
||||
KZGPolyComm::update_commitments(&srs, &mut kzg_comm, row_idx, &old_row, &new_row)?;
|
||||
|
||||
// Verify that each row polynomial now evaluates to the updated matrix
|
||||
for (i, poly) in kzg_comm.get_refs().0.iter().enumerate() {
|
||||
let evals: Vec<F> = srs.ploycommit_domain
|
||||
.elements()
|
||||
.map(|x| poly.polynomial().evaluate(&x))
|
||||
.collect();
|
||||
assert_eq!(evals, matrix.row(i));
|
||||
}
|
||||
|
||||
// === new fresh commit on updated matrix ===
|
||||
let kzg_comm_fresh = kzg.commit(&srs, &matrix)?;
|
||||
// Compare each row commitment
|
||||
for (i, old_lbl_comm) in kzg_comm.get_refs().1.iter().enumerate() {
|
||||
let updated_comm = old_lbl_comm.commitment();
|
||||
let fresh_comm = kzg_comm_fresh.get_refs().1[i].commitment();
|
||||
assert_eq!(updated_comm, fresh_comm, "Row commitment mismatch at row {}", i);
|
||||
}
|
||||
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
68
src/traits.rs
Normal file
68
src/traits.rs
Normal file
@ -0,0 +1,68 @@
|
||||
use anyhow::Result;
|
||||
|
||||
pub trait DataMatrix<T>{
|
||||
type Params;
|
||||
fn new_random(_: Self::Params) -> Self;
|
||||
fn update_col(&mut self, c: usize, new_col: &[T]);
|
||||
fn pretty_print(&self);
|
||||
}
|
||||
|
||||
/// Encoder trait
|
||||
pub trait Encoder<T>{
|
||||
|
||||
/// encode in place the input data matrix
|
||||
fn encode(&mut self) -> Result<()>;
|
||||
/// encode a single column in place
|
||||
fn encode_col(&mut self, c: usize) -> Result<Vec<T>>;
|
||||
/// reconstruct in place
|
||||
fn reconstruct(&mut self) -> Result<()>;
|
||||
}
|
||||
|
||||
/// Polynomial Commitment scheme (e.g. KZG) trait
|
||||
pub trait PolynomialCommitmentScheme{
|
||||
type Params;
|
||||
type Field;
|
||||
type FieldMatrix<F>;
|
||||
type SRS;
|
||||
type Commitment;
|
||||
type Proof;
|
||||
|
||||
fn new(_params: Self::Params) -> Self;
|
||||
fn setup(&self) -> Result<Self::SRS>;
|
||||
fn commit(&self, _srs: &Self::SRS, _matrix:&Self::FieldMatrix<Self::Field>) -> Result<Self::Commitment>;
|
||||
fn update_commitments(
|
||||
srs: &Self::SRS,
|
||||
comm: &mut Self::Commitment,
|
||||
row_idx: usize,
|
||||
old_row: &[Self::Field],
|
||||
new_row: &[Self::Field],
|
||||
) -> Result<()>;
|
||||
fn open(
|
||||
_: &Self::Commitment,
|
||||
_: &Self::SRS,
|
||||
_row: usize,
|
||||
_col: usize,
|
||||
) -> Result<Self::Proof>;
|
||||
fn batch_open(
|
||||
_: &Self::Commitment,
|
||||
_: &Self::SRS,
|
||||
_rows: Vec<usize>,
|
||||
_cols: Vec<usize>,
|
||||
) -> Result<Vec<Self::Proof>>;
|
||||
fn verify(
|
||||
comms: &Self::Commitment,
|
||||
srs: &Self::SRS,
|
||||
row: usize,
|
||||
col: usize,
|
||||
value: Self::Field,
|
||||
proof: &Self::Proof,
|
||||
) -> Result<bool>;
|
||||
fn batch_verify(
|
||||
comms: &Self::Commitment,
|
||||
srs: &Self::SRS,
|
||||
rows: Vec<usize>,
|
||||
cols: Vec<usize>,
|
||||
values: Vec<Self::Field>,
|
||||
proof: &Vec<Self::Proof>,
|
||||
) -> Result<bool>;
|
||||
}
|
||||
Loading…
x
Reference in New Issue
Block a user