Beginning of STARK implementation (#413)

* Beginning of STARK implementation

* PR feedback

* minor

* Suppress warnings for now
This commit is contained in:
Daniel Lubarov 2022-01-26 00:09:29 -08:00 committed by GitHub
parent 483799746b
commit c0ac79e2e1
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
23 changed files with 808 additions and 11 deletions

View File

@ -1,5 +1,5 @@
[workspace]
members = ["field", "insertion", "plonky2", "util", "waksman"]
members = ["field", "insertion", "plonky2", "starky", "system_zero", "util", "waksman"]
[profile.release]
opt-level = 3

View File

@ -36,7 +36,7 @@ impl<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, const D: usize>
PolynomialBatch<F, C, D>
{
/// Creates a list polynomial commitment for the polynomials interpolating the values in `values`.
pub(crate) fn from_values(
pub fn from_values(
values: Vec<PolynomialValues<F>>,
rate_bits: usize,
blinding: bool,
@ -61,7 +61,7 @@ impl<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, const D: usize>
}
/// Creates a list polynomial commitment for the polynomials `polynomials`.
pub(crate) fn from_coeffs(
pub fn from_coeffs(
polynomials: Vec<PolynomialCoeffs<F>>,
rate_bits: usize,
blinding: bool,

View File

@ -22,7 +22,7 @@ pub enum FriReductionStrategy {
impl FriReductionStrategy {
/// The arity of each FRI reduction step, expressed as the log2 of the actual arity.
pub(crate) fn reduction_arity_bits(
pub fn reduction_arity_bits(
&self,
mut degree_bits: usize,
rate_bits: usize,

View File

@ -21,10 +21,10 @@ use crate::plonk::config::{AlgebraicHasher, Hasher};
//
// NB: Changing any of these values will require regenerating all of
// the precomputed constant arrays in this file.
pub(crate) const HALF_N_FULL_ROUNDS: usize = 4;
pub const HALF_N_FULL_ROUNDS: usize = 4;
pub(crate) const N_FULL_ROUNDS_TOTAL: usize = 2 * HALF_N_FULL_ROUNDS;
pub(crate) const N_PARTIAL_ROUNDS: usize = 22;
pub(crate) const N_ROUNDS: usize = N_FULL_ROUNDS_TOTAL + N_PARTIAL_ROUNDS;
pub const N_PARTIAL_ROUNDS: usize = 22;
pub const N_ROUNDS: usize = N_FULL_ROUNDS_TOTAL + N_PARTIAL_ROUNDS;
const MAX_WIDTH: usize = 12; // we only have width 8 and 12, and 12 is bigger. :)
#[inline(always)]

View File

@ -1,5 +1,5 @@
//! Logic common to multiple IOPs.
pub(crate) mod challenger;
pub mod challenger;
pub mod ext_target;
pub mod generator;
pub mod target;

View File

@ -389,7 +389,7 @@ impl<F: RichField + Extendable<D>, const D: usize> CircuitBuilder<F, D> {
let fri_config = &self.config.fri_config;
let reduction_arity_bits = fri_config.reduction_strategy.reduction_arity_bits(
degree_bits,
self.config.fri_config.rate_bits,
fri_config.rate_bits,
fri_config.num_query_rounds,
);
FriParams {

View File

@ -6,8 +6,8 @@ pub(crate) mod marking;
pub(crate) mod partial_products;
pub mod reducing;
pub mod serialization;
pub(crate) mod strided_view;
pub(crate) mod timing;
pub mod strided_view;
pub mod timing;
pub(crate) fn transpose_poly_values<F: Field>(polys: Vec<PolynomialValues<F>>) -> Vec<Vec<F>> {
let poly_values = polys.into_iter().map(|p| p.values).collect::<Vec<_>>();

14
starky/Cargo.toml Normal file
View File

@ -0,0 +1,14 @@
[package]
name = "starky"
description = "Implementation of STARKs"
version = "0.1.0"
edition = "2021"
[dependencies]
plonky2 = { path = "../plonky2" }
plonky2_util = { path = "../util" }
anyhow = "1.0.40"
env_logger = "0.9.0"
itertools = "0.10.0"
log = "0.4.14"
rayon = "1.5.1"

45
starky/src/config.rs Normal file
View File

@ -0,0 +1,45 @@
use plonky2::fri::reduction_strategies::FriReductionStrategy;
use plonky2::fri::{FriConfig, FriParams};
pub struct StarkConfig {
pub security_bits: usize,
/// The number of challenge points to generate, for IOPs that have soundness errors of (roughly)
/// `degree / |F|`.
pub num_challenges: usize,
pub fri_config: FriConfig,
}
impl StarkConfig {
/// A typical configuration with a rate of 2, resulting in fast but large proofs.
/// Targets ~100 bit conjectured security.
pub fn standard_fast_config() -> Self {
Self {
security_bits: 100,
num_challenges: 2,
fri_config: FriConfig {
rate_bits: 1,
cap_height: 4,
proof_of_work_bits: 10,
reduction_strategy: FriReductionStrategy::ConstantArityBits(4, 5),
num_query_rounds: 90,
},
}
}
pub(crate) fn fri_params(&self, degree_bits: usize) -> FriParams {
let fri_config = &self.fri_config;
let reduction_arity_bits = fri_config.reduction_strategy.reduction_arity_bits(
degree_bits,
fri_config.rate_bits,
fri_config.num_query_rounds,
);
FriParams {
config: fri_config.clone(),
hiding: false,
degree_bits,
reduction_arity_bits,
}
}
}

View File

@ -0,0 +1,110 @@
use std::marker::PhantomData;
use plonky2::field::extension_field::Extendable;
use plonky2::field::packed_field::PackedField;
use plonky2::hash::hash_types::RichField;
use plonky2::iop::ext_target::ExtensionTarget;
use plonky2::iop::target::Target;
use plonky2::plonk::circuit_builder::CircuitBuilder;
pub struct ConstraintConsumer<P: PackedField> {
/// A random value used to combine multiple constraints into one.
alpha: P::Scalar,
/// A running sum of constraints that have been emitted so far, scaled by powers of alpha.
constraint_acc: P,
/// The evaluation of the Lagrange basis polynomial which is nonzero at the point associated
/// with the first trace row, and zero at other points in the subgroup.
lagrange_basis_first: P::Scalar,
/// The evaluation of the Lagrange basis polynomial which is nonzero at the point associated
/// with the last trace row, and zero at other points in the subgroup.
lagrange_basis_last: P::Scalar,
}
impl<P: PackedField> ConstraintConsumer<P> {
/// Add one constraint.
pub fn one(&mut self, constraint: P) {
self.constraint_acc *= self.alpha;
self.constraint_acc += constraint;
}
/// Add a series of constraints.
pub fn many(&mut self, constraints: impl IntoIterator<Item = P>) {
constraints
.into_iter()
.for_each(|constraint| self.one(constraint));
}
/// Add one constraint, but first multiply it by a filter such that it will only apply to the
/// first row of the trace.
pub fn one_first_row(&mut self, constraint: P) {
self.one(constraint * self.lagrange_basis_first);
}
/// Add one constraint, but first multiply it by a filter such that it will only apply to the
/// last row of the trace.
pub fn one_last_row(&mut self, constraint: P) {
self.one(constraint * self.lagrange_basis_last);
}
}
pub struct RecursiveConstraintConsumer<F: RichField + Extendable<D>, const D: usize> {
/// A random value used to combine multiple constraints into one.
alpha: Target,
/// A running sum of constraints that have been emitted so far, scaled by powers of alpha.
constraint_acc: ExtensionTarget<D>,
/// The evaluation of the Lagrange basis polynomial which is nonzero at the point associated
/// with the first trace row, and zero at other points in the subgroup.
lagrange_basis_first: ExtensionTarget<D>,
/// The evaluation of the Lagrange basis polynomial which is nonzero at the point associated
/// with the last trace row, and zero at other points in the subgroup.
lagrange_basis_last: ExtensionTarget<D>,
_phantom: PhantomData<F>,
}
impl<F: RichField + Extendable<D>, const D: usize> RecursiveConstraintConsumer<F, D> {
/// Add one constraint.
pub fn one(&mut self, builder: &mut CircuitBuilder<F, D>, constraint: ExtensionTarget<D>) {
self.constraint_acc =
builder.scalar_mul_add_extension(self.alpha, self.constraint_acc, constraint);
}
/// Add a series of constraints.
pub fn many(
&mut self,
builder: &mut CircuitBuilder<F, D>,
constraints: impl IntoIterator<Item = ExtensionTarget<D>>,
) {
constraints
.into_iter()
.for_each(|constraint| self.one(builder, constraint));
}
/// Add one constraint, but first multiply it by a filter such that it will only apply to the
/// first row of the trace.
pub fn one_first_row(
&mut self,
builder: &mut CircuitBuilder<F, D>,
constraint: ExtensionTarget<D>,
) {
let filtered_constraint = builder.mul_extension(constraint, self.lagrange_basis_first);
self.one(builder, filtered_constraint);
}
/// Add one constraint, but first multiply it by a filter such that it will only apply to the
/// last row of the trace.
pub fn one_last_row(
&mut self,
builder: &mut CircuitBuilder<F, D>,
constraint: ExtensionTarget<D>,
) {
let filtered_constraint = builder.mul_extension(constraint, self.lagrange_basis_last);
self.one(builder, filtered_constraint);
}
}

14
starky/src/lib.rs Normal file
View File

@ -0,0 +1,14 @@
// TODO: Remove these when crate is closer to being finished.
#![allow(dead_code)]
#![allow(unused_variables)]
#![allow(unreachable_code)]
#![allow(clippy::diverging_sub_expression)]
#![allow(incomplete_features)]
#![feature(generic_const_exprs)]
pub mod config;
pub mod constraint_consumer;
pub mod proof;
pub mod prover;
pub mod stark;
pub mod vars;

35
starky/src/proof.rs Normal file
View File

@ -0,0 +1,35 @@
use plonky2::field::extension_field::Extendable;
use plonky2::fri::proof::{CompressedFriProof, FriProof};
use plonky2::hash::hash_types::RichField;
use plonky2::hash::merkle_tree::MerkleCap;
use plonky2::plonk::config::GenericConfig;
pub struct StarkProof<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, const D: usize> {
/// Merkle cap of LDEs of trace values.
pub trace_cap: MerkleCap<F, C::Hasher>,
/// Purported values of each polynomial at the challenge point.
pub openings: StarkOpeningSet<F, D>,
/// A batch FRI argument for all openings.
pub opening_proof: FriProof<F, C::Hasher, D>,
}
pub struct CompressedStarkProof<
F: RichField + Extendable<D>,
C: GenericConfig<D, F = F>,
const D: usize,
> {
/// Merkle cap of LDEs of trace values.
pub trace_cap: MerkleCap<F, C::Hasher>,
/// Purported values of each polynomial at the challenge point.
pub openings: StarkOpeningSet<F, D>,
/// A batch FRI argument for all openings.
pub opening_proof: CompressedFriProof<F, C::Hasher, D>,
}
/// Purported values of each polynomial at the challenge point.
pub struct StarkOpeningSet<F: RichField + Extendable<D>, const D: usize> {
pub local_values: Vec<F::Extension>,
pub next_values: Vec<F::Extension>,
pub permutation_zs: Vec<F::Extension>,
pub quotient_polys: Vec<F::Extension>,
}

83
starky/src/prover.rs Normal file
View File

@ -0,0 +1,83 @@
use itertools::Itertools;
use plonky2::field::extension_field::Extendable;
use plonky2::field::polynomial::PolynomialValues;
use plonky2::fri::oracle::PolynomialBatch;
use plonky2::fri::prover::fri_proof;
use plonky2::hash::hash_types::RichField;
use plonky2::iop::challenger::Challenger;
use plonky2::plonk::config::GenericConfig;
use plonky2::timed;
use plonky2::util::timing::TimingTree;
use plonky2::util::transpose;
use plonky2_util::log2_strict;
use rayon::prelude::*;
use crate::config::StarkConfig;
use crate::proof::StarkProof;
use crate::stark::Stark;
pub fn prove<F, C, S, const D: usize>(
stark: S,
config: StarkConfig,
trace: Vec<[F; S::COLUMNS]>,
timing: &mut TimingTree,
) -> StarkProof<F, C, D>
where
F: RichField + Extendable<D>,
C: GenericConfig<D, F = F>,
S: Stark<F, D>,
[(); S::COLUMNS]:,
{
let degree_bits = log2_strict(trace.len());
let trace_vecs = trace.into_iter().map(|row| row.to_vec()).collect_vec();
let trace_col_major: Vec<Vec<F>> = transpose(&trace_vecs);
let trace_poly_values: Vec<PolynomialValues<F>> = timed!(
timing,
"compute trace polynomials",
trace_col_major
.par_iter()
.map(|column| PolynomialValues::new(column.clone()))
.collect()
);
let rate_bits = config.fri_config.rate_bits;
let cap_height = config.fri_config.cap_height;
let trace_commitment = timed!(
timing,
"compute trace commitment",
PolynomialBatch::<F, C, D>::from_values(
trace_poly_values,
rate_bits,
false,
cap_height,
timing,
None,
)
);
let trace_cap = trace_commitment.merkle_tree.cap;
let openings = todo!();
let initial_merkle_trees = todo!();
let lde_polynomial_coeffs = todo!();
let lde_polynomial_values = todo!();
let mut challenger = Challenger::new();
let fri_params = config.fri_params(degree_bits);
let opening_proof = fri_proof::<F, C, D>(
initial_merkle_trees,
lde_polynomial_coeffs,
lde_polynomial_values,
&mut challenger,
&fri_params,
timing,
);
StarkProof {
trace_cap,
openings,
opening_proof,
}
}

62
starky/src/stark.rs Normal file
View File

@ -0,0 +1,62 @@
use plonky2::field::extension_field::{Extendable, FieldExtension};
use plonky2::field::packed_field::PackedField;
use plonky2::hash::hash_types::RichField;
use plonky2::plonk::circuit_builder::CircuitBuilder;
use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
use crate::vars::StarkEvaluationTargets;
use crate::vars::StarkEvaluationVars;
/// Represents a STARK system.
pub trait Stark<F: RichField + Extendable<D>, const D: usize> {
/// The total number of columns in the trace.
const COLUMNS: usize;
/// The number of public inputs.
const PUBLIC_INPUTS: usize;
/// Evaluate constraints at a vector of points.
///
/// The points are elements of a field `FE`, a degree `D2` extension of `F`. This lets us
/// evaluate constraints over a larger domain if desired. This can also be called with `FE = F`
/// and `D2 = 1`, in which case we are using the trivial extension, i.e. just evaluating
/// constraints over `F`.
fn eval_packed_generic<FE, P, const D2: usize>(
&self,
vars: StarkEvaluationVars<FE, P, { Self::COLUMNS }, { Self::PUBLIC_INPUTS }>,
yield_constr: &mut ConstraintConsumer<P>,
) where
FE: FieldExtension<D2, BaseField = F>,
P: PackedField<Scalar = FE>;
/// Evaluate constraints at a vector of points from the base field `F`.
fn eval_packed_base<P: PackedField<Scalar = F>>(
&self,
vars: StarkEvaluationVars<F, P, { Self::COLUMNS }, { Self::PUBLIC_INPUTS }>,
yield_constr: &mut ConstraintConsumer<P>,
) {
self.eval_packed_generic(vars, yield_constr)
}
/// Evaluate constraints at a single point from the degree `D` extension field.
fn eval_ext(
&self,
vars: StarkEvaluationVars<
F::Extension,
F::Extension,
{ Self::COLUMNS },
{ Self::PUBLIC_INPUTS },
>,
yield_constr: &mut ConstraintConsumer<F::Extension>,
) {
self.eval_packed_generic(vars, yield_constr)
}
/// Evaluate constraints at a vector of points from the degree `D` extension field. This is like
/// `eval_ext`, except in the context of a recursive circuit.
fn eval_ext_recursively(
&self,
builder: &mut CircuitBuilder<F, D>,
vars: StarkEvaluationTargets<D, { Self::COLUMNS }, { Self::PUBLIC_INPUTS }>,
yield_constr: &mut RecursiveConstraintConsumer<F, D>,
);
}

26
starky/src/vars.rs Normal file
View File

@ -0,0 +1,26 @@
use plonky2::field::field_types::Field;
use plonky2::field::packed_field::PackedField;
use plonky2::iop::ext_target::ExtensionTarget;
#[derive(Debug, Copy, Clone)]
pub struct StarkEvaluationVars<'a, F, P, const COLUMNS: usize, const PUBLIC_INPUTS: usize>
where
F: Field,
P: PackedField<Scalar = F>,
{
pub local_values: &'a [P; COLUMNS],
pub next_values: &'a [P; COLUMNS],
pub public_inputs: &'a [P::Scalar; PUBLIC_INPUTS],
}
#[derive(Debug, Copy, Clone)]
pub struct StarkEvaluationTargets<
'a,
const D: usize,
const COLUMNS: usize,
const PUBLIC_INPUTS: usize,
> {
pub local_values: &'a [ExtensionTarget<D>; COLUMNS],
pub next_values: &'a [ExtensionTarget<D>; COLUMNS],
pub public_inputs: &'a [ExtensionTarget<D>; PUBLIC_INPUTS],
}

12
system_zero/Cargo.toml Normal file
View File

@ -0,0 +1,12 @@
[package]
name = "system_zero"
description = "A VM whose execution can be verified with STARKs; designed for proving Ethereum transactions"
version = "0.1.0"
edition = "2021"
[dependencies]
plonky2 = { path = "../plonky2" }
starky = { path = "../starky" }
anyhow = "1.0.40"
env_logger = "0.9.0"
log = "0.4.14"

View File

@ -0,0 +1,86 @@
use plonky2::hash::hashing::SPONGE_WIDTH;
use plonky2::hash::poseidon;
//// CORE REGISTERS
/// A cycle counter. Starts at 0; increments by 1.
pub(crate) const COL_CLOCK: usize = 0;
/// A column which contains the values `[0, ... 2^16 - 1]`, potentially with duplicates. Used for
/// 16-bit range checks.
///
/// For ease of verification, we enforce that it must begin with 0 and end with `2^16 - 1`, and each
/// delta must be either 0 or 1.
pub(crate) const COL_RANGE_16: usize = COL_CLOCK + 1;
/// Pointer to the current instruction.
pub(crate) const COL_INSTRUCTION_PTR: usize = COL_RANGE_16 + 1;
/// Pointer to the base of the current call's stack frame.
pub(crate) const COL_FRAME_PTR: usize = COL_INSTRUCTION_PTR + 1;
/// Pointer to the tip of the current call's stack frame.
pub(crate) const COL_STACK_PTR: usize = COL_FRAME_PTR + 1;
//// PERMUTATION UNIT
const START_PERMUTATION_UNIT: usize = COL_STACK_PTR + 1;
pub(crate) const fn col_permutation_full_first(round: usize, i: usize) -> usize {
debug_assert!(round < poseidon::HALF_N_FULL_ROUNDS);
debug_assert!(i < SPONGE_WIDTH);
START_PERMUTATION_UNIT + round * SPONGE_WIDTH + i
}
const START_PERMUTATION_PARTIAL: usize =
col_permutation_full_first(poseidon::HALF_N_FULL_ROUNDS - 1, SPONGE_WIDTH - 1) + 1;
pub(crate) const fn col_permutation_partial(round: usize) -> usize {
debug_assert!(round < poseidon::N_PARTIAL_ROUNDS);
START_PERMUTATION_PARTIAL + round
}
const START_PERMUTATION_FULL_SECOND: usize = COL_STACK_PTR + 1;
pub(crate) const fn col_permutation_full_second(round: usize, i: usize) -> usize {
debug_assert!(round <= poseidon::HALF_N_FULL_ROUNDS);
debug_assert!(i < SPONGE_WIDTH);
START_PERMUTATION_FULL_SECOND + round * SPONGE_WIDTH + i
}
pub(crate) const fn col_permutation_input(i: usize) -> usize {
col_permutation_full_first(0, i)
}
pub(crate) const fn col_permutation_output(i: usize) -> usize {
debug_assert!(i < SPONGE_WIDTH);
col_permutation_full_second(poseidon::HALF_N_FULL_ROUNDS, i)
}
const END_PERMUTATION_UNIT: usize = col_permutation_output(SPONGE_WIDTH - 1);
//// MEMORY UNITS
//// DECOMPOSITION UNITS
const START_DECOMPOSITION_UNITS: usize = END_PERMUTATION_UNIT + 1;
const NUM_DECOMPOSITION_UNITS: usize = 4;
/// The number of bits associated with a single decomposition unit.
const DECOMPOSITION_UNIT_BITS: usize = 32;
/// One column for the value being decomposed, plus one column per bit.
const DECOMPOSITION_UNIT_COLS: usize = 1 + DECOMPOSITION_UNIT_BITS;
pub(crate) const fn col_decomposition_input(unit: usize) -> usize {
debug_assert!(unit < NUM_DECOMPOSITION_UNITS);
START_DECOMPOSITION_UNITS + unit * DECOMPOSITION_UNIT_COLS
}
pub(crate) const fn col_decomposition_bit(unit: usize, bit: usize) -> usize {
debug_assert!(unit < NUM_DECOMPOSITION_UNITS);
debug_assert!(bit < DECOMPOSITION_UNIT_BITS);
START_DECOMPOSITION_UNITS + unit * DECOMPOSITION_UNIT_COLS + 1 + bit
}
const END_DECOMPOSITION_UNITS: usize =
START_DECOMPOSITION_UNITS + DECOMPOSITION_UNIT_COLS * NUM_DECOMPOSITION_UNITS;
pub(crate) const NUM_COLUMNS: usize = END_DECOMPOSITION_UNITS;

View File

@ -0,0 +1,80 @@
use plonky2::field::extension_field::{Extendable, FieldExtension};
use plonky2::field::packed_field::PackedField;
use plonky2::hash::hash_types::RichField;
use plonky2::plonk::circuit_builder::CircuitBuilder;
use starky::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
use starky::vars::StarkEvaluationTargets;
use starky::vars::StarkEvaluationVars;
use crate::column_layout::{
COL_CLOCK, COL_FRAME_PTR, COL_INSTRUCTION_PTR, COL_RANGE_16, COL_STACK_PTR, NUM_COLUMNS,
};
use crate::public_input_layout::NUM_PUBLIC_INPUTS;
use crate::system_zero::SystemZero;
impl<F: RichField + Extendable<D>, const D: usize> SystemZero<F, D> {
pub(crate) fn generate_first_row_core_registers(&self, first_values: &mut [F; NUM_COLUMNS]) {
first_values[COL_CLOCK] = F::ZERO;
first_values[COL_RANGE_16] = F::ZERO;
first_values[COL_INSTRUCTION_PTR] = F::ZERO;
first_values[COL_FRAME_PTR] = F::ZERO;
first_values[COL_STACK_PTR] = F::ZERO;
}
pub(crate) fn generate_next_row_core_registers(
&self,
local_values: &[F; NUM_COLUMNS],
next_values: &mut [F; NUM_COLUMNS],
) {
// We increment the clock by 1.
next_values[COL_CLOCK] = local_values[COL_CLOCK] + F::ONE;
// We increment the 16-bit table by 1, unless we've reached the max value of 2^16 - 1, in
// which case we repeat that value.
let prev_range_16 = local_values[COL_RANGE_16].to_canonical_u64();
let next_range_16 = (prev_range_16 + 1).min((1 << 16) - 1);
next_values[COL_RANGE_16] = F::from_canonical_u64(next_range_16);
next_values[COL_INSTRUCTION_PTR] = todo!();
next_values[COL_FRAME_PTR] = todo!();
next_values[COL_STACK_PTR] = todo!();
}
#[inline]
pub(crate) fn eval_core_registers<FE, P, const D2: usize>(
&self,
vars: StarkEvaluationVars<FE, P, NUM_COLUMNS, NUM_PUBLIC_INPUTS>,
yield_constr: &mut ConstraintConsumer<P>,
) where
FE: FieldExtension<D2, BaseField = F>,
P: PackedField<Scalar = FE>,
{
// The clock must start with 0, and increment by 1.
let local_clock = vars.local_values[COL_CLOCK];
let next_clock = vars.next_values[COL_CLOCK];
let delta_clock = next_clock - local_clock;
yield_constr.one_first_row(local_clock);
yield_constr.one(delta_clock - FE::ONE);
// The 16-bit table must start with 0, end with 2^16 - 1, and increment by 0 or 1.
let local_range_16 = vars.local_values[COL_RANGE_16];
let next_range_16 = vars.next_values[COL_RANGE_16];
let delta_range_16 = next_range_16 - local_range_16;
yield_constr.one_first_row(local_range_16);
yield_constr.one_last_row(local_range_16 - FE::from_canonical_u64((1 << 16) - 1));
yield_constr.one(delta_range_16 * (delta_range_16 - FE::ONE));
todo!()
}
pub(crate) fn eval_core_registers_recursively(
&self,
builder: &mut CircuitBuilder<F, D>,
vars: StarkEvaluationTargets<D, NUM_COLUMNS, NUM_PUBLIC_INPUTS>,
yield_constr: &mut RecursiveConstraintConsumer<F, D>,
) {
todo!()
}
}

12
system_zero/src/lib.rs Normal file
View File

@ -0,0 +1,12 @@
// TODO: Remove these when crate is closer to being finished.
#![allow(dead_code)]
#![allow(unused_variables)]
#![allow(unreachable_code)]
#![allow(clippy::diverging_sub_expression)]
mod column_layout;
mod core_registers;
mod memory;
mod permutation_unit;
mod public_input_layout;
pub mod system_zero;

16
system_zero/src/memory.rs Normal file
View File

@ -0,0 +1,16 @@
#[derive(Default)]
pub struct TransactionMemory {
pub calls: Vec<ContractMemory>,
}
/// A virtual memory space specific to the current contract call.
pub struct ContractMemory {
pub code: MemorySegment,
pub main: MemorySegment,
pub calldata: MemorySegment,
pub returndata: MemorySegment,
}
pub struct MemorySegment {
pub content: Vec<u8>,
}

View File

@ -0,0 +1,86 @@
use plonky2::field::extension_field::{Extendable, FieldExtension};
use plonky2::field::packed_field::PackedField;
use plonky2::hash::hash_types::RichField;
use plonky2::hash::hashing::SPONGE_WIDTH;
use plonky2::plonk::circuit_builder::CircuitBuilder;
use starky::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
use starky::vars::StarkEvaluationTargets;
use starky::vars::StarkEvaluationVars;
use crate::column_layout::{col_permutation_input, col_permutation_output, NUM_COLUMNS};
use crate::public_input_layout::NUM_PUBLIC_INPUTS;
use crate::system_zero::SystemZero;
impl<F: RichField + Extendable<D>, const D: usize> SystemZero<F, D> {
pub(crate) fn generate_permutation_unit(&self, values: &mut [F; NUM_COLUMNS]) {
// Load inputs.
let mut state = [F::ZERO; SPONGE_WIDTH];
for i in 0..SPONGE_WIDTH {
state[i] = values[col_permutation_input(i)];
}
// TODO: First full rounds.
// TODO: Partial rounds.
// TODO: Second full rounds.
// Write outputs.
for i in 0..SPONGE_WIDTH {
values[col_permutation_output(i)] = state[i];
}
}
#[inline]
pub(crate) fn eval_permutation_unit<FE, P, const D2: usize>(
&self,
vars: StarkEvaluationVars<FE, P, NUM_COLUMNS, NUM_PUBLIC_INPUTS>,
yield_constr: &mut ConstraintConsumer<P>,
) where
FE: FieldExtension<D2, BaseField = F>,
P: PackedField<Scalar = FE>,
{
let local_values = &vars.local_values;
// Load inputs.
let mut state = [P::ZEROS; SPONGE_WIDTH];
for i in 0..SPONGE_WIDTH {
state[i] = local_values[col_permutation_input(i)];
}
// TODO: First full rounds.
// TODO: Partial rounds.
// TODO: Second full rounds.
// Assert that the computed output matches the outputs in the trace.
for i in 0..SPONGE_WIDTH {
let out = local_values[col_permutation_output(i)];
yield_constr.one(state[i] - out);
}
}
pub(crate) fn eval_permutation_unit_recursively(
&self,
builder: &mut CircuitBuilder<F, D>,
vars: StarkEvaluationTargets<D, NUM_COLUMNS, NUM_PUBLIC_INPUTS>,
yield_constr: &mut RecursiveConstraintConsumer<F, D>,
) {
let zero = builder.zero_extension();
let local_values = &vars.local_values;
// Load inputs.
let mut state = [zero; SPONGE_WIDTH];
for i in 0..SPONGE_WIDTH {
state[i] = local_values[col_permutation_input(i)];
}
// TODO: First full rounds.
// TODO: Partial rounds.
// TODO: Second full rounds.
// Assert that the computed output matches the outputs in the trace.
for i in 0..SPONGE_WIDTH {
let out = local_values[col_permutation_output(i)];
let diff = builder.sub_extension(state[i], out);
yield_constr.one(builder, diff);
}
}
}

View File

@ -0,0 +1,7 @@
/// The previous state root, before these transactions were executed.
const PI_OLD_STATE_ROOT: usize = 0;
/// The updated state root, after these transactions were executed.
const PI_NEW_STATE_ROOT: usize = PI_OLD_STATE_ROOT + 1;
pub(crate) const NUM_PUBLIC_INPUTS: usize = PI_NEW_STATE_ROOT + 1;

View File

@ -0,0 +1,109 @@
use std::marker::PhantomData;
use plonky2::field::extension_field::{Extendable, FieldExtension};
use plonky2::field::packed_field::PackedField;
use plonky2::hash::hash_types::RichField;
use plonky2::plonk::circuit_builder::CircuitBuilder;
use starky::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
use starky::stark::Stark;
use starky::vars::StarkEvaluationTargets;
use starky::vars::StarkEvaluationVars;
use crate::column_layout::NUM_COLUMNS;
use crate::memory::TransactionMemory;
use crate::public_input_layout::NUM_PUBLIC_INPUTS;
/// We require at least 2^16 rows as it helps support efficient 16-bit range checks.
const MIN_TRACE_ROWS: usize = 1 << 16;
pub struct SystemZero<F: RichField + Extendable<D>, const D: usize> {
_phantom: PhantomData<F>,
}
impl<F: RichField + Extendable<D>, const D: usize> SystemZero<F, D> {
fn generate_trace(&self) -> Vec<[F; NUM_COLUMNS]> {
let memory = TransactionMemory::default();
let mut row = [F::ZERO; NUM_COLUMNS];
self.generate_first_row_core_registers(&mut row);
self.generate_permutation_unit(&mut row);
let mut trace = Vec::with_capacity(MIN_TRACE_ROWS);
loop {
let mut next_row = [F::ZERO; NUM_COLUMNS];
self.generate_next_row_core_registers(&row, &mut next_row);
self.generate_permutation_unit(&mut next_row);
trace.push(row);
row = next_row;
}
trace.push(row);
trace
}
}
impl<F: RichField + Extendable<D>, const D: usize> Default for SystemZero<F, D> {
fn default() -> Self {
Self {
_phantom: PhantomData,
}
}
}
impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for SystemZero<F, D> {
const COLUMNS: usize = NUM_COLUMNS;
const PUBLIC_INPUTS: usize = NUM_PUBLIC_INPUTS;
fn eval_packed_generic<FE, P, const D2: usize>(
&self,
vars: StarkEvaluationVars<FE, P, NUM_COLUMNS, NUM_PUBLIC_INPUTS>,
yield_constr: &mut ConstraintConsumer<P>,
) where
FE: FieldExtension<D2, BaseField = F>,
P: PackedField<Scalar = FE>,
{
self.eval_core_registers(vars, yield_constr);
self.eval_permutation_unit(vars, yield_constr);
todo!()
}
fn eval_ext_recursively(
&self,
builder: &mut CircuitBuilder<F, D>,
vars: StarkEvaluationTargets<D, NUM_COLUMNS, NUM_PUBLIC_INPUTS>,
yield_constr: &mut RecursiveConstraintConsumer<F, D>,
) {
self.eval_core_registers_recursively(builder, vars, yield_constr);
self.eval_permutation_unit_recursively(builder, vars, yield_constr);
todo!()
}
}
#[cfg(test)]
mod tests {
use log::Level;
use plonky2::field::goldilocks_field::GoldilocksField;
use plonky2::plonk::config::PoseidonGoldilocksConfig;
use plonky2::util::timing::TimingTree;
use starky::config::StarkConfig;
use starky::prover::prove;
use crate::system_zero::SystemZero;
#[test]
#[ignore] // TODO
fn run() {
type F = GoldilocksField;
type C = PoseidonGoldilocksConfig;
const D: usize = 2;
type S = SystemZero<F, D>;
let system = S::default();
let config = StarkConfig::standard_fast_config();
let mut timing = TimingTree::new("prove", Level::Debug);
let trace = system.generate_trace();
prove::<F, C, S, D>(system, config, trace, &mut timing);
}
}