mirror of
https://github.com/logos-storage/plonky2.git
synced 2026-01-12 10:43:07 +00:00
memory row generation
This commit is contained in:
parent
7ed2aa6315
commit
fcf216012c
@ -162,6 +162,10 @@ mod tests {
|
||||
.unwrap()
|
||||
})
|
||||
.collect();
|
||||
let memory_trace = memory_stark.generate_trace(keccak_inputs);
|
||||
let column_to_copy: Vec<_> = keccak_trace[keccak_looked_col].values[..].into();
|
||||
|
||||
let default = vec![F::ONE; 1];
|
||||
|
||||
let mut cpu_trace_rows = vec![];
|
||||
for i in 0..num_keccak_perms {
|
||||
|
||||
@ -152,7 +152,34 @@ pub fn generate_range_check_value<F: RichField>(
|
||||
}
|
||||
|
||||
impl<F: RichField + Extendable<D>, const D: usize> MemoryStark<F, D> {
|
||||
pub(crate) fn generate_memory(trace_cols: &mut [Vec<F>]) {
|
||||
fn generate_trace_rows(&self, memory_ops: Vec<(F, F, F, [F; 8], F, F)>) -> Vec<[F; NUM_REGISTERS]> {
|
||||
let num_ops = memory_ops.len();
|
||||
|
||||
let mut trace_cols: [Vec<F>; NUM_REGISTERS] = vec![vec![F::ZERO; num_ops]; NUM_REGISTERS].try_into().unwrap();
|
||||
for i in 0..num_ops {
|
||||
let (context, segment, virt, values, is_read, timestamp) = memory_ops[i];
|
||||
trace_cols[MEMORY_ADDR_CONTEXT][i] = context;
|
||||
trace_cols[MEMORY_ADDR_SEGMENT][i] = segment;
|
||||
trace_cols[MEMORY_ADDR_VIRTUAL][i] = virt;
|
||||
for j in 0..8 {
|
||||
trace_cols[memory_value_limb(j)][i] = values[j];
|
||||
}
|
||||
trace_cols[MEMORY_IS_READ][i] = is_read;
|
||||
trace_cols[MEMORY_TIMESTAMP][i] = timestamp;
|
||||
}
|
||||
|
||||
self.generate_memory(&mut trace_cols);
|
||||
|
||||
let mut trace_rows = vec![[F::ZERO; NUM_REGISTERS]];
|
||||
for (i, col) in trace_cols.iter().enumerate() {
|
||||
for (j, &val) in col.iter().enumerate() {
|
||||
trace_rows[j][i] = val;
|
||||
}
|
||||
}
|
||||
trace_rows
|
||||
}
|
||||
|
||||
fn generate_memory(&self, trace_cols: &mut [Vec<F>]) {
|
||||
let context = &trace_cols[MEMORY_ADDR_CONTEXT];
|
||||
let segment = &trace_cols[MEMORY_ADDR_SEGMENT];
|
||||
let virtuals = &trace_cols[MEMORY_ADDR_VIRTUAL];
|
||||
@ -441,8 +468,13 @@ impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for MemoryStark<F
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use std::collections::HashMap;
|
||||
use std::iter::Map;
|
||||
|
||||
use anyhow::Result;
|
||||
use plonky2::field::field_types::Field;
|
||||
use plonky2::plonk::config::{GenericConfig, PoseidonGoldilocksConfig};
|
||||
use rand::{thread_rng, Rng};
|
||||
|
||||
use crate::memory::memory_stark::MemoryStark;
|
||||
use crate::stark_testing::{test_stark_circuit_constraints, test_stark_low_degree};
|
||||
@ -472,4 +504,59 @@ mod tests {
|
||||
};
|
||||
test_stark_circuit_constraints::<F, C, S, D>(stark)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_memory_stark() -> Result<()> {
|
||||
const D: usize = 2;
|
||||
type C = PoseidonGoldilocksConfig;
|
||||
type F = <C as GenericConfig<D>>::F;
|
||||
type S = MemoryStark<F, D>;
|
||||
|
||||
let stark = S {
|
||||
f: Default::default(),
|
||||
};
|
||||
|
||||
const MAX_CONTEXT: usize = 256;
|
||||
const MAX_SEGMENT: usize = 8;
|
||||
const MAX_VIRTUAL: usize = 1 << 12;
|
||||
|
||||
let mut rng = thread_rng();
|
||||
|
||||
let num_ops = 20;
|
||||
let mut memory_ops = Vec::new();
|
||||
let current_memory_values: HashMap<(F, F, F), [F; 8]> = HashMap::new();
|
||||
let mut cur_timestamp = 0;
|
||||
for i in 0..num_ops {
|
||||
let is_read = if i == 0 { false } else { rng.gen() };
|
||||
let is_read_F = F::from_bool(is_read);
|
||||
|
||||
let (context, segment, virt, vals) = if is_read {
|
||||
let written: Vec<_> = current_memory_values.keys().collect();
|
||||
let &(context, segment, virt) = written[rng.gen_range(0..written.len())];
|
||||
let &vals = current_memory_values.get(&(context, segment, virt)).unwrap();
|
||||
|
||||
(context, segment, virt, vals)
|
||||
} else {
|
||||
let context = F::from_canonical_usize(rng.gen_range(0..256));
|
||||
let segment = F::from_canonical_usize(rng.gen_range(0..8));
|
||||
let virt = F::from_canonical_usize(rng.gen_range(0..20));
|
||||
|
||||
let val: [u32; 8] = rng.gen();
|
||||
let vals: [F; 8] = val.iter().map(|&x| F::from_canonical_u32(x)).collect::<Vec<_>>().try_into().unwrap();
|
||||
|
||||
current_memory_values.insert((context, segment, virt), vals);
|
||||
|
||||
(context, segment, virt, vals)
|
||||
};
|
||||
|
||||
let timestamp = F::from_canonical_usize(cur_timestamp);
|
||||
cur_timestamp += 1;
|
||||
|
||||
memory_ops.push((context, segment, virt, is_read_F, vals, timestamp))
|
||||
}
|
||||
|
||||
let rows = stark.generate_trace_rows(memory_ops);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
71
plonky2/src/gadgets/curve_msm.rs
Normal file
71
plonky2/src/gadgets/curve_msm.rs
Normal file
@ -0,0 +1,71 @@
|
||||
use plonky2_field::extension_field::Extendable;
|
||||
use plonky2_field::field_types::Field;
|
||||
|
||||
use crate::curve::curve_types::{AffinePoint, Curve, CurveScalar};
|
||||
use crate::gadgets::curve::AffinePointTarget;
|
||||
use crate::gadgets::nonnative::NonNativeTarget;
|
||||
use crate::hash::hash_types::RichField;
|
||||
use crate::plonk::circuit_builder::CircuitBuilder;
|
||||
|
||||
const DIGITS_PER_CHUNK: usize = 80;
|
||||
|
||||
const WINDOW_SIZE: usize = 4;
|
||||
|
||||
pub struct MsmPrecomputationTarget<C: Curve> {
|
||||
/// For each generator (in the order they were passed to `msm_precompute`), contains a vector
|
||||
/// of powers, i.e. [(2^w)^i] for i < DIGITS.
|
||||
powers_per_generator: Vec<Vec<AffinePointTarget<C>>>,
|
||||
}
|
||||
|
||||
impl<F: RichField + Extendable<D>, const D: usize> CircuitBuilder<F, D> {
|
||||
pub fn precompute_single_generator<C: Curve>(
|
||||
&mut self,
|
||||
g: AffinePointTarget<C>,
|
||||
) -> Vec<AffinePointTarget<C>> {
|
||||
let digits = (C::ScalarField::BITS + WINDOW_SIZE - 1) / WINDOW_SIZE;
|
||||
let mut powers: Vec<AffinePointTarget<C>> = Vec::with_capacity(digits);
|
||||
powers.push(g);
|
||||
for i in 1..digits {
|
||||
let mut power_i_proj = powers[i - 1].clone();
|
||||
for _j in 0..WINDOW_SIZE {
|
||||
power_i_proj = self.curve_double(&power_i_proj);
|
||||
}
|
||||
powers.push(power_i_proj);
|
||||
}
|
||||
powers
|
||||
}
|
||||
|
||||
pub fn msm_precompute<C: Curve>(
|
||||
&mut self,
|
||||
generators: &[AffinePointTarget<C>],
|
||||
) -> MsmPrecomputationTarget<C> {
|
||||
MsmPrecomputationTarget {
|
||||
powers_per_generator: generators
|
||||
.into_iter()
|
||||
.map(|g| self.precompute_single_generator(g.clone()))
|
||||
.collect(),
|
||||
w,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn msm_execute<C: Curve>(
|
||||
&mut self,
|
||||
precomputation: &MsmPrecomputationTarget<C>,
|
||||
scalars: &[NonNativeTarget<C::ScalarField>],
|
||||
) -> AffinePointTarget<C> {
|
||||
debug_assert_eq!(precomputation.powers_per_generator.len(), scalars.len());
|
||||
|
||||
let digits = (C::ScalarField::BITS + WINDOW_SIZE - 1) / WINDOW_SIZE;
|
||||
let base = 1 << WINDOW_SIZE;
|
||||
|
||||
for (i, scalar) in scalars.iter().enumerate() {
|
||||
let digits = self.split_nonnative_to_4_bit_limbs(scalar);
|
||||
}
|
||||
|
||||
let digits: Vec<_> = (0..base).map(|i| self.constant(F::from_canonical_usize(i))).collect();
|
||||
let mut digit_acc: Vec<ProjectivePoint<C>> = Vec::new();
|
||||
for i in 0..base {
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
244
starky/src/factorial_stark.rs
Normal file
244
starky/src/factorial_stark.rs
Normal file
@ -0,0 +1,244 @@
|
||||
use std::marker::PhantomData;
|
||||
|
||||
use plonky2::field::extension_field::{Extendable, FieldExtension};
|
||||
use plonky2::field::packed_field::PackedField;
|
||||
use plonky2::field::polynomial::PolynomialValues;
|
||||
use plonky2::hash::hash_types::RichField;
|
||||
use plonky2::plonk::circuit_builder::CircuitBuilder;
|
||||
|
||||
use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
|
||||
use crate::permutation::PermutationPair;
|
||||
use crate::stark::Stark;
|
||||
use crate::util::trace_rows_to_poly_values;
|
||||
use crate::vars::{StarkEvaluationTargets, StarkEvaluationVars};
|
||||
|
||||
/// Toy STARK system used for testing.
|
||||
/// Computes a Factorial sequence with state `[fact, n]` using the state transition
|
||||
/// `fact' <- fact * (n + 1), n' <- n + 1`.
|
||||
#[derive(Copy, Clone)]
|
||||
struct FactorialStark<F: RichField + Extendable<D>, const D: usize> {
|
||||
num_rows: usize,
|
||||
_phantom: PhantomData<F>,
|
||||
}
|
||||
|
||||
impl<F: RichField + Extendable<D>, const D: usize> FactorialStark<F, D> {
|
||||
// The first public input is `x0`.
|
||||
const PI_INDEX_X0: usize = 0;
|
||||
// The second public input is the first element of the last row, which should be equal to
|
||||
// `num_rows` factorial.
|
||||
const PI_INDEX_RES: usize = 1;
|
||||
|
||||
fn new(num_rows: usize) -> Self {
|
||||
Self {
|
||||
num_rows,
|
||||
_phantom: PhantomData,
|
||||
}
|
||||
}
|
||||
|
||||
/// Generate the trace using `x0, 1` as initial state values.
|
||||
fn generate_trace(&self, x0: F) -> Vec<PolynomialValues<F>> {
|
||||
let mut trace_rows = (0..self.num_rows)
|
||||
.scan([x0, F::ONE], |acc, _| {
|
||||
let tmp = *acc;
|
||||
acc[0] = tmp[0] * (tmp[1] + F::ONE);
|
||||
acc[1] = tmp[1] + F::ONE;
|
||||
Some(tmp)
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
trace_rows_to_poly_values(trace_rows)
|
||||
}
|
||||
}
|
||||
|
||||
impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for FactorialStark<F, D> {
|
||||
const COLUMNS: usize = 2;
|
||||
const PUBLIC_INPUTS: usize = 2;
|
||||
|
||||
fn eval_packed_generic<FE, P, const D2: usize>(
|
||||
&self,
|
||||
vars: StarkEvaluationVars<FE, P, { Self::COLUMNS }, { Self::PUBLIC_INPUTS }>,
|
||||
yield_constr: &mut ConstraintConsumer<P>,
|
||||
) where
|
||||
FE: FieldExtension<D2, BaseField = F>,
|
||||
P: PackedField<Scalar = FE>,
|
||||
{
|
||||
// Check public inputs.
|
||||
yield_constr
|
||||
.constraint_first_row(vars.local_values[0] - vars.public_inputs[Self::PI_INDEX_X0]);
|
||||
yield_constr
|
||||
.constraint_last_row(vars.local_values[0] - vars.public_inputs[Self::PI_INDEX_RES]);
|
||||
|
||||
// x0' <- x0 * (x1 + 1)
|
||||
yield_constr.constraint_transition(
|
||||
vars.next_values[0] - vars.local_values[0] * (vars.local_values[1] + FE::ONE),
|
||||
);
|
||||
// x1' <- x1 + 1
|
||||
yield_constr.constraint_transition(vars.next_values[1] - vars.local_values[1] - FE::ONE);
|
||||
}
|
||||
|
||||
fn eval_ext_recursively(
|
||||
&self,
|
||||
builder: &mut CircuitBuilder<F, D>,
|
||||
vars: StarkEvaluationTargets<D, { Self::COLUMNS }, { Self::PUBLIC_INPUTS }>,
|
||||
yield_constr: &mut RecursiveConstraintConsumer<F, D>,
|
||||
) {
|
||||
// Check public inputs.
|
||||
let pis_constraints = [
|
||||
builder.sub_extension(vars.local_values[0], vars.public_inputs[Self::PI_INDEX_X0]),
|
||||
builder.sub_extension(vars.local_values[0], vars.public_inputs[Self::PI_INDEX_RES]),
|
||||
];
|
||||
yield_constr.constraint_first_row(builder, pis_constraints[0]);
|
||||
yield_constr.constraint_last_row(builder, pis_constraints[1]);
|
||||
|
||||
let one = builder.one_extension();
|
||||
// x0' <- x0 * (x1 + 1)
|
||||
let first_col_constraint = {
|
||||
let tmp1 = builder.add_extension(vars.local_values[1], one);
|
||||
let tmp2 = builder.mul_extension(vars.local_values[0], tmp1);
|
||||
builder.sub_extension(vars.next_values[0], tmp2)
|
||||
};
|
||||
yield_constr.constraint_transition(builder, first_col_constraint);
|
||||
// x1' <- x1 + 1
|
||||
let second_col_constraint = {
|
||||
let tmp = builder.add_extension(vars.local_values[1], one);
|
||||
builder.sub_extension(vars.next_values[1], tmp)
|
||||
};
|
||||
yield_constr.constraint_transition(builder, second_col_constraint);
|
||||
}
|
||||
|
||||
fn constraint_degree(&self) -> usize {
|
||||
2
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use anyhow::Result;
|
||||
use plonky2::field::extension_field::Extendable;
|
||||
use plonky2::field::field_types::Field;
|
||||
use plonky2::hash::hash_types::RichField;
|
||||
use plonky2::iop::witness::PartialWitness;
|
||||
use plonky2::plonk::circuit_builder::CircuitBuilder;
|
||||
use plonky2::plonk::circuit_data::CircuitConfig;
|
||||
use plonky2::plonk::config::{
|
||||
AlgebraicHasher, GenericConfig, Hasher, PoseidonGoldilocksConfig,
|
||||
};
|
||||
use plonky2::util::timing::TimingTree;
|
||||
|
||||
use crate::config::StarkConfig;
|
||||
use crate::factorial_stark::FactorialStark;
|
||||
use crate::proof::StarkProofWithPublicInputs;
|
||||
use crate::prover::prove;
|
||||
use crate::recursive_verifier::{
|
||||
add_virtual_stark_proof_with_pis, recursively_verify_stark_proof,
|
||||
set_stark_proof_with_pis_target,
|
||||
};
|
||||
use crate::stark::Stark;
|
||||
use crate::stark_testing::test_stark_low_degree;
|
||||
use crate::verifier::verify_stark_proof;
|
||||
|
||||
fn factorial<F: Field>(n: usize, x0: F) -> F {
|
||||
(0..n)
|
||||
.fold((x0, F::ONE), |x, _| (x.0 * (x.1 + F::ONE), x.1 + F::ONE))
|
||||
.0
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_factorial_stark() -> Result<()> {
|
||||
const D: usize = 2;
|
||||
type C = PoseidonGoldilocksConfig;
|
||||
type F = <C as GenericConfig<D>>::F;
|
||||
type S = FactorialStark<F, D>;
|
||||
|
||||
let config = StarkConfig::standard_fast_config();
|
||||
let num_rows = 1 << 3;
|
||||
let public_inputs = [F::ONE, factorial(num_rows - 1, F::ONE)];
|
||||
let stark = S::new(num_rows);
|
||||
let trace = stark.generate_trace(public_inputs[0]);
|
||||
let proof = prove::<F, C, S, D>(
|
||||
stark,
|
||||
&config,
|
||||
trace,
|
||||
public_inputs,
|
||||
&mut TimingTree::default(),
|
||||
)?;
|
||||
|
||||
verify_stark_proof(stark, proof, &config)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_factorial_stark_degree() -> Result<()> {
|
||||
const D: usize = 2;
|
||||
type C = PoseidonGoldilocksConfig;
|
||||
type F = <C as GenericConfig<D>>::F;
|
||||
type S = FactorialStark<F, D>;
|
||||
|
||||
let num_rows = 1 << 3;
|
||||
let stark = S::new(num_rows);
|
||||
test_stark_low_degree(stark)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_recursive_stark_verifier() -> Result<()> {
|
||||
init_logger();
|
||||
const D: usize = 2;
|
||||
type C = PoseidonGoldilocksConfig;
|
||||
type F = <C as GenericConfig<D>>::F;
|
||||
type S = FactorialStark<F, D>;
|
||||
|
||||
let config = StarkConfig::standard_fast_config();
|
||||
let num_rows = 1 << 5;
|
||||
let public_inputs = [F::ONE, factorial(num_rows - 1, F::ONE)];
|
||||
let stark = S::new(num_rows);
|
||||
let trace = stark.generate_trace(public_inputs[0]);
|
||||
let proof = prove::<F, C, S, D>(
|
||||
stark,
|
||||
&config,
|
||||
trace,
|
||||
public_inputs,
|
||||
&mut TimingTree::default(),
|
||||
)?;
|
||||
verify_stark_proof(stark, proof.clone(), &config)?;
|
||||
|
||||
recursive_proof::<F, C, S, C, D>(stark, proof, &config, true)
|
||||
}
|
||||
|
||||
fn recursive_proof<
|
||||
F: RichField + Extendable<D>,
|
||||
C: GenericConfig<D, F = F>,
|
||||
S: Stark<F, D> + Copy,
|
||||
InnerC: GenericConfig<D, F = F>,
|
||||
const D: usize,
|
||||
>(
|
||||
stark: S,
|
||||
inner_proof: StarkProofWithPublicInputs<F, InnerC, D>,
|
||||
inner_config: &StarkConfig,
|
||||
print_gate_counts: bool,
|
||||
) -> Result<()>
|
||||
where
|
||||
InnerC::Hasher: AlgebraicHasher<F>,
|
||||
[(); S::COLUMNS]:,
|
||||
[(); S::PUBLIC_INPUTS]:,
|
||||
[(); C::Hasher::HASH_SIZE]:,
|
||||
{
|
||||
let circuit_config = CircuitConfig::standard_recursion_config();
|
||||
let mut builder = CircuitBuilder::<F, D>::new(circuit_config);
|
||||
let mut pw = PartialWitness::new();
|
||||
let degree_bits = inner_proof.proof.recover_degree_bits(inner_config);
|
||||
let pt = add_virtual_stark_proof_with_pis(&mut builder, stark, inner_config, degree_bits);
|
||||
set_stark_proof_with_pis_target(&mut pw, &pt, &inner_proof);
|
||||
|
||||
recursively_verify_stark_proof::<F, InnerC, S, D>(&mut builder, stark, pt, inner_config);
|
||||
|
||||
if print_gate_counts {
|
||||
builder.print_gate_counts(0);
|
||||
}
|
||||
|
||||
let data = builder.build::<C>();
|
||||
let proof = data.prove(pw)?;
|
||||
data.verify(proof)
|
||||
}
|
||||
|
||||
fn init_logger() {
|
||||
let _ = env_logger::builder().format_timestamp(None).try_init();
|
||||
}
|
||||
}
|
||||
253
starky/src/permutation_stark.rs
Normal file
253
starky/src/permutation_stark.rs
Normal file
@ -0,0 +1,253 @@
|
||||
use std::marker::PhantomData;
|
||||
|
||||
use plonky2::field::extension_field::{Extendable, FieldExtension};
|
||||
use plonky2::field::packed_field::PackedField;
|
||||
use plonky2::field::polynomial::PolynomialValues;
|
||||
use plonky2::hash::hash_types::RichField;
|
||||
use plonky2::plonk::circuit_builder::CircuitBuilder;
|
||||
|
||||
use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
|
||||
use crate::permutation::PermutationPair;
|
||||
use crate::stark::Stark;
|
||||
use crate::util::trace_rows_to_poly_values;
|
||||
use crate::vars::{StarkEvaluationTargets, StarkEvaluationVars};
|
||||
|
||||
/// Toy STARK system used for testing.
|
||||
/// Proves that the last three public inputs are a sorted version of the first three.
|
||||
/// `x0' <- x1, x1' <- x0 + x1, i' <- i+1, j' <- j+1`.
|
||||
/// Note: The `i, j` columns are only used to test the permutation argument.
|
||||
#[derive(Copy, Clone)]
|
||||
struct SortingStark<F: RichField + Extendable<D>, const D: usize> {
|
||||
num_rows: usize,
|
||||
_phantom: PhantomData<F>,
|
||||
}
|
||||
|
||||
impl<F: RichField + Extendable<D>, const D: usize> SortingStark<F, D> {
|
||||
// The first public input is `a`.
|
||||
const PI_INDEX_X0: usize = 0;
|
||||
// The second public input is `b`.
|
||||
const PI_INDEX_X1: usize = 1;
|
||||
// The second public input is `c`.
|
||||
const PI_INDEX_X1: usize = 2;
|
||||
// The third public input is the second element of the last row, which should be equal to the
|
||||
// `num_rows`-th Fibonacci number.
|
||||
const PI_INDEX_RES: usize = 2;
|
||||
|
||||
fn new(num_rows: usize) -> Self {
|
||||
Self {
|
||||
num_rows,
|
||||
_phantom: PhantomData,
|
||||
}
|
||||
}
|
||||
|
||||
/// Generate the trace using `x0, x1, 0, 1` as initial state values.
|
||||
fn generate_trace(&self, x0: F, x1: F) -> Vec<PolynomialValues<F>> {
|
||||
let mut trace_rows = (0..self.num_rows)
|
||||
.scan([x0, x1, F::ZERO, F::ONE], |acc, _| {
|
||||
let tmp = *acc;
|
||||
acc[0] = tmp[1];
|
||||
acc[1] = tmp[0] + tmp[1];
|
||||
acc[2] = tmp[2] + F::ONE;
|
||||
acc[3] = tmp[3] + F::ONE;
|
||||
Some(tmp)
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
trace_rows[self.num_rows - 1][3] = F::ZERO; // So that column 2 and 3 are permutation of one another.
|
||||
trace_rows_to_poly_values(trace_rows)
|
||||
}
|
||||
}
|
||||
|
||||
impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for SortingStark<F, D> {
|
||||
const COLUMNS: usize = 4;
|
||||
const PUBLIC_INPUTS: usize = 3;
|
||||
|
||||
fn eval_packed_generic<FE, P, const D2: usize>(
|
||||
&self,
|
||||
vars: StarkEvaluationVars<FE, P, { Self::COLUMNS }, { Self::PUBLIC_INPUTS }>,
|
||||
yield_constr: &mut ConstraintConsumer<P>,
|
||||
) where
|
||||
FE: FieldExtension<D2, BaseField = F>,
|
||||
P: PackedField<Scalar = FE>,
|
||||
{
|
||||
// Check public inputs.
|
||||
yield_constr
|
||||
.constraint_first_row(vars.local_values[0] - vars.public_inputs[Self::PI_INDEX_X0]);
|
||||
yield_constr
|
||||
.constraint_first_row(vars.local_values[1] - vars.public_inputs[Self::PI_INDEX_X1]);
|
||||
yield_constr
|
||||
.constraint_last_row(vars.local_values[1] - vars.public_inputs[Self::PI_INDEX_RES]);
|
||||
|
||||
// x0' <- x1
|
||||
yield_constr.constraint_transition(vars.next_values[0] - vars.local_values[1]);
|
||||
// x1' <- x0 + x1
|
||||
yield_constr.constraint_transition(
|
||||
vars.next_values[1] - vars.local_values[0] - vars.local_values[1],
|
||||
);
|
||||
}
|
||||
|
||||
fn eval_ext_recursively(
|
||||
&self,
|
||||
builder: &mut CircuitBuilder<F, D>,
|
||||
vars: StarkEvaluationTargets<D, { Self::COLUMNS }, { Self::PUBLIC_INPUTS }>,
|
||||
yield_constr: &mut RecursiveConstraintConsumer<F, D>,
|
||||
) {
|
||||
// Check public inputs.
|
||||
let pis_constraints = [
|
||||
builder.sub_extension(vars.local_values[0], vars.public_inputs[Self::PI_INDEX_X0]),
|
||||
builder.sub_extension(vars.local_values[1], vars.public_inputs[Self::PI_INDEX_X1]),
|
||||
builder.sub_extension(vars.local_values[1], vars.public_inputs[Self::PI_INDEX_RES]),
|
||||
];
|
||||
yield_constr.constraint_first_row(builder, pis_constraints[0]);
|
||||
yield_constr.constraint_first_row(builder, pis_constraints[1]);
|
||||
yield_constr.constraint_last_row(builder, pis_constraints[2]);
|
||||
|
||||
// x0' <- x1
|
||||
let first_col_constraint = builder.sub_extension(vars.next_values[0], vars.local_values[1]);
|
||||
yield_constr.constraint_transition(builder, first_col_constraint);
|
||||
// x1' <- x0 + x1
|
||||
let second_col_constraint = {
|
||||
let tmp = builder.sub_extension(vars.next_values[1], vars.local_values[0]);
|
||||
builder.sub_extension(tmp, vars.local_values[1])
|
||||
};
|
||||
yield_constr.constraint_transition(builder, second_col_constraint);
|
||||
}
|
||||
|
||||
fn constraint_degree(&self) -> usize {
|
||||
2
|
||||
}
|
||||
|
||||
fn permutation_pairs(&self) -> Vec<PermutationPair> {
|
||||
vec![PermutationPair::singletons(2, 3)]
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use anyhow::Result;
|
||||
use plonky2::field::extension_field::Extendable;
|
||||
use plonky2::field::field_types::Field;
|
||||
use plonky2::hash::hash_types::RichField;
|
||||
use plonky2::iop::witness::PartialWitness;
|
||||
use plonky2::plonk::circuit_builder::CircuitBuilder;
|
||||
use plonky2::plonk::circuit_data::CircuitConfig;
|
||||
use plonky2::plonk::config::{
|
||||
AlgebraicHasher, GenericConfig, Hasher, PoseidonGoldilocksConfig,
|
||||
};
|
||||
use plonky2::util::timing::TimingTree;
|
||||
|
||||
use crate::config::StarkConfig;
|
||||
use crate::fibonacci_stark::FibonacciStark;
|
||||
use crate::proof::StarkProofWithPublicInputs;
|
||||
use crate::prover::prove;
|
||||
use crate::recursive_verifier::{
|
||||
add_virtual_stark_proof_with_pis, recursively_verify_stark_proof,
|
||||
set_stark_proof_with_pis_target,
|
||||
};
|
||||
use crate::stark::Stark;
|
||||
use crate::stark_testing::test_stark_low_degree;
|
||||
use crate::verifier::verify_stark_proof;
|
||||
|
||||
fn fibonacci<F: Field>(n: usize, x0: F, x1: F) -> F {
|
||||
(0..n).fold((x0, x1), |x, _| (x.1, x.0 + x.1)).1
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_fibonacci_stark() -> Result<()> {
|
||||
const D: usize = 2;
|
||||
type C = PoseidonGoldilocksConfig;
|
||||
type F = <C as GenericConfig<D>>::F;
|
||||
type S = FibonacciStark<F, D>;
|
||||
|
||||
let config = StarkConfig::standard_fast_config();
|
||||
let num_rows = 1 << 5;
|
||||
let public_inputs = [F::ZERO, F::ONE, fibonacci(num_rows - 1, F::ZERO, F::ONE)];
|
||||
let stark = S::new(num_rows);
|
||||
let trace = stark.generate_trace(public_inputs[0], public_inputs[1]);
|
||||
let proof = prove::<F, C, S, D>(
|
||||
stark,
|
||||
&config,
|
||||
trace,
|
||||
public_inputs,
|
||||
&mut TimingTree::default(),
|
||||
)?;
|
||||
|
||||
verify_stark_proof(stark, proof, &config)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_fibonacci_stark_degree() -> Result<()> {
|
||||
const D: usize = 2;
|
||||
type C = PoseidonGoldilocksConfig;
|
||||
type F = <C as GenericConfig<D>>::F;
|
||||
type S = FibonacciStark<F, D>;
|
||||
|
||||
let num_rows = 1 << 5;
|
||||
let stark = S::new(num_rows);
|
||||
test_stark_low_degree(stark)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_recursive_stark_verifier() -> Result<()> {
|
||||
init_logger();
|
||||
const D: usize = 2;
|
||||
type C = PoseidonGoldilocksConfig;
|
||||
type F = <C as GenericConfig<D>>::F;
|
||||
type S = FibonacciStark<F, D>;
|
||||
|
||||
let config = StarkConfig::standard_fast_config();
|
||||
let num_rows = 1 << 5;
|
||||
let public_inputs = [F::ZERO, F::ONE, fibonacci(num_rows - 1, F::ZERO, F::ONE)];
|
||||
let stark = S::new(num_rows);
|
||||
let trace = stark.generate_trace(public_inputs[0], public_inputs[1]);
|
||||
let proof = prove::<F, C, S, D>(
|
||||
stark,
|
||||
&config,
|
||||
trace,
|
||||
public_inputs,
|
||||
&mut TimingTree::default(),
|
||||
)?;
|
||||
verify_stark_proof(stark, proof.clone(), &config)?;
|
||||
|
||||
recursive_proof::<F, C, S, C, D>(stark, proof, &config, true)
|
||||
}
|
||||
|
||||
fn recursive_proof<
|
||||
F: RichField + Extendable<D>,
|
||||
C: GenericConfig<D, F = F>,
|
||||
S: Stark<F, D> + Copy,
|
||||
InnerC: GenericConfig<D, F = F>,
|
||||
const D: usize,
|
||||
>(
|
||||
stark: S,
|
||||
inner_proof: StarkProofWithPublicInputs<F, InnerC, D>,
|
||||
inner_config: &StarkConfig,
|
||||
print_gate_counts: bool,
|
||||
) -> Result<()>
|
||||
where
|
||||
InnerC::Hasher: AlgebraicHasher<F>,
|
||||
[(); S::COLUMNS]:,
|
||||
[(); S::PUBLIC_INPUTS]:,
|
||||
[(); C::Hasher::HASH_SIZE]:,
|
||||
{
|
||||
let circuit_config = CircuitConfig::standard_recursion_config();
|
||||
let mut builder = CircuitBuilder::<F, D>::new(circuit_config);
|
||||
let mut pw = PartialWitness::new();
|
||||
let degree_bits = inner_proof.proof.recover_degree_bits(inner_config);
|
||||
let pt = add_virtual_stark_proof_with_pis(&mut builder, stark, inner_config, degree_bits);
|
||||
set_stark_proof_with_pis_target(&mut pw, &pt, &inner_proof);
|
||||
|
||||
recursively_verify_stark_proof::<F, InnerC, S, D>(&mut builder, stark, pt, inner_config);
|
||||
|
||||
if print_gate_counts {
|
||||
builder.print_gate_counts(0);
|
||||
}
|
||||
|
||||
let data = builder.build::<C>();
|
||||
let proof = data.prove(pw)?;
|
||||
data.verify(proof)
|
||||
}
|
||||
|
||||
fn init_logger() {
|
||||
let _ = env_logger::builder().format_timestamp(None).try_init();
|
||||
}
|
||||
}
|
||||
Loading…
x
Reference in New Issue
Block a user