diff --git a/Cargo.toml b/Cargo.toml index 2bd67bb6..cc070d96 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,5 +1,5 @@ [workspace] -members = ["field", "insertion", "plonky2", "util", "waksman"] +members = ["field", "insertion", "plonky2", "starky", "system_zero", "util", "waksman"] [profile.release] opt-level = 3 diff --git a/plonky2/src/fri/oracle.rs b/plonky2/src/fri/oracle.rs index 02db3140..c016f0ee 100644 --- a/plonky2/src/fri/oracle.rs +++ b/plonky2/src/fri/oracle.rs @@ -36,7 +36,7 @@ impl, C: GenericConfig, const D: usize> PolynomialBatch { /// Creates a list polynomial commitment for the polynomials interpolating the values in `values`. - pub(crate) fn from_values( + pub fn from_values( values: Vec>, rate_bits: usize, blinding: bool, @@ -61,7 +61,7 @@ impl, C: GenericConfig, const D: usize> } /// Creates a list polynomial commitment for the polynomials `polynomials`. - pub(crate) fn from_coeffs( + pub fn from_coeffs( polynomials: Vec>, rate_bits: usize, blinding: bool, diff --git a/plonky2/src/fri/reduction_strategies.rs b/plonky2/src/fri/reduction_strategies.rs index c0423c2c..49eda3ba 100644 --- a/plonky2/src/fri/reduction_strategies.rs +++ b/plonky2/src/fri/reduction_strategies.rs @@ -22,7 +22,7 @@ pub enum FriReductionStrategy { impl FriReductionStrategy { /// The arity of each FRI reduction step, expressed as the log2 of the actual arity. - pub(crate) fn reduction_arity_bits( + pub fn reduction_arity_bits( &self, mut degree_bits: usize, rate_bits: usize, diff --git a/plonky2/src/hash/poseidon.rs b/plonky2/src/hash/poseidon.rs index 81fc3937..9dc5f394 100644 --- a/plonky2/src/hash/poseidon.rs +++ b/plonky2/src/hash/poseidon.rs @@ -21,10 +21,10 @@ use crate::plonk::config::{AlgebraicHasher, Hasher}; // // NB: Changing any of these values will require regenerating all of // the precomputed constant arrays in this file. -pub(crate) const HALF_N_FULL_ROUNDS: usize = 4; +pub const HALF_N_FULL_ROUNDS: usize = 4; pub(crate) const N_FULL_ROUNDS_TOTAL: usize = 2 * HALF_N_FULL_ROUNDS; -pub(crate) const N_PARTIAL_ROUNDS: usize = 22; -pub(crate) const N_ROUNDS: usize = N_FULL_ROUNDS_TOTAL + N_PARTIAL_ROUNDS; +pub const N_PARTIAL_ROUNDS: usize = 22; +pub const N_ROUNDS: usize = N_FULL_ROUNDS_TOTAL + N_PARTIAL_ROUNDS; const MAX_WIDTH: usize = 12; // we only have width 8 and 12, and 12 is bigger. :) #[inline(always)] diff --git a/plonky2/src/iop/mod.rs b/plonky2/src/iop/mod.rs index cc11fb56..de315a09 100644 --- a/plonky2/src/iop/mod.rs +++ b/plonky2/src/iop/mod.rs @@ -1,5 +1,5 @@ //! Logic common to multiple IOPs. -pub(crate) mod challenger; +pub mod challenger; pub mod ext_target; pub mod generator; pub mod target; diff --git a/plonky2/src/plonk/circuit_builder.rs b/plonky2/src/plonk/circuit_builder.rs index e4abe611..d9bcc1cf 100644 --- a/plonky2/src/plonk/circuit_builder.rs +++ b/plonky2/src/plonk/circuit_builder.rs @@ -389,7 +389,7 @@ impl, const D: usize> CircuitBuilder { let fri_config = &self.config.fri_config; let reduction_arity_bits = fri_config.reduction_strategy.reduction_arity_bits( degree_bits, - self.config.fri_config.rate_bits, + fri_config.rate_bits, fri_config.num_query_rounds, ); FriParams { diff --git a/plonky2/src/util/mod.rs b/plonky2/src/util/mod.rs index 4cf7119a..9342a75e 100644 --- a/plonky2/src/util/mod.rs +++ b/plonky2/src/util/mod.rs @@ -6,8 +6,8 @@ pub(crate) mod marking; pub(crate) mod partial_products; pub mod reducing; pub mod serialization; -pub(crate) mod strided_view; -pub(crate) mod timing; +pub mod strided_view; +pub mod timing; pub(crate) fn transpose_poly_values(polys: Vec>) -> Vec> { let poly_values = polys.into_iter().map(|p| p.values).collect::>(); diff --git a/starky/Cargo.toml b/starky/Cargo.toml new file mode 100644 index 00000000..4e67856d --- /dev/null +++ b/starky/Cargo.toml @@ -0,0 +1,14 @@ +[package] +name = "starky" +description = "Implementation of STARKs" +version = "0.1.0" +edition = "2021" + +[dependencies] +plonky2 = { path = "../plonky2" } +plonky2_util = { path = "../util" } +anyhow = "1.0.40" +env_logger = "0.9.0" +itertools = "0.10.0" +log = "0.4.14" +rayon = "1.5.1" diff --git a/starky/src/config.rs b/starky/src/config.rs new file mode 100644 index 00000000..24fb725a --- /dev/null +++ b/starky/src/config.rs @@ -0,0 +1,45 @@ +use plonky2::fri::reduction_strategies::FriReductionStrategy; +use plonky2::fri::{FriConfig, FriParams}; + +pub struct StarkConfig { + pub security_bits: usize, + + /// The number of challenge points to generate, for IOPs that have soundness errors of (roughly) + /// `degree / |F|`. + pub num_challenges: usize, + + pub fri_config: FriConfig, +} + +impl StarkConfig { + /// A typical configuration with a rate of 2, resulting in fast but large proofs. + /// Targets ~100 bit conjectured security. + pub fn standard_fast_config() -> Self { + Self { + security_bits: 100, + num_challenges: 2, + fri_config: FriConfig { + rate_bits: 1, + cap_height: 4, + proof_of_work_bits: 10, + reduction_strategy: FriReductionStrategy::ConstantArityBits(4, 5), + num_query_rounds: 90, + }, + } + } + + pub(crate) fn fri_params(&self, degree_bits: usize) -> FriParams { + let fri_config = &self.fri_config; + let reduction_arity_bits = fri_config.reduction_strategy.reduction_arity_bits( + degree_bits, + fri_config.rate_bits, + fri_config.num_query_rounds, + ); + FriParams { + config: fri_config.clone(), + hiding: false, + degree_bits, + reduction_arity_bits, + } + } +} diff --git a/starky/src/constraint_consumer.rs b/starky/src/constraint_consumer.rs new file mode 100644 index 00000000..09b5397f --- /dev/null +++ b/starky/src/constraint_consumer.rs @@ -0,0 +1,110 @@ +use std::marker::PhantomData; + +use plonky2::field::extension_field::Extendable; +use plonky2::field::packed_field::PackedField; +use plonky2::hash::hash_types::RichField; +use plonky2::iop::ext_target::ExtensionTarget; +use plonky2::iop::target::Target; +use plonky2::plonk::circuit_builder::CircuitBuilder; + +pub struct ConstraintConsumer { + /// A random value used to combine multiple constraints into one. + alpha: P::Scalar, + + /// A running sum of constraints that have been emitted so far, scaled by powers of alpha. + constraint_acc: P, + + /// The evaluation of the Lagrange basis polynomial which is nonzero at the point associated + /// with the first trace row, and zero at other points in the subgroup. + lagrange_basis_first: P::Scalar, + + /// The evaluation of the Lagrange basis polynomial which is nonzero at the point associated + /// with the last trace row, and zero at other points in the subgroup. + lagrange_basis_last: P::Scalar, +} + +impl ConstraintConsumer

{ + /// Add one constraint. + pub fn one(&mut self, constraint: P) { + self.constraint_acc *= self.alpha; + self.constraint_acc += constraint; + } + + /// Add a series of constraints. + pub fn many(&mut self, constraints: impl IntoIterator) { + constraints + .into_iter() + .for_each(|constraint| self.one(constraint)); + } + + /// Add one constraint, but first multiply it by a filter such that it will only apply to the + /// first row of the trace. + pub fn one_first_row(&mut self, constraint: P) { + self.one(constraint * self.lagrange_basis_first); + } + + /// Add one constraint, but first multiply it by a filter such that it will only apply to the + /// last row of the trace. + pub fn one_last_row(&mut self, constraint: P) { + self.one(constraint * self.lagrange_basis_last); + } +} + +pub struct RecursiveConstraintConsumer, const D: usize> { + /// A random value used to combine multiple constraints into one. + alpha: Target, + + /// A running sum of constraints that have been emitted so far, scaled by powers of alpha. + constraint_acc: ExtensionTarget, + + /// The evaluation of the Lagrange basis polynomial which is nonzero at the point associated + /// with the first trace row, and zero at other points in the subgroup. + lagrange_basis_first: ExtensionTarget, + + /// The evaluation of the Lagrange basis polynomial which is nonzero at the point associated + /// with the last trace row, and zero at other points in the subgroup. + lagrange_basis_last: ExtensionTarget, + + _phantom: PhantomData, +} + +impl, const D: usize> RecursiveConstraintConsumer { + /// Add one constraint. + pub fn one(&mut self, builder: &mut CircuitBuilder, constraint: ExtensionTarget) { + self.constraint_acc = + builder.scalar_mul_add_extension(self.alpha, self.constraint_acc, constraint); + } + + /// Add a series of constraints. + pub fn many( + &mut self, + builder: &mut CircuitBuilder, + constraints: impl IntoIterator>, + ) { + constraints + .into_iter() + .for_each(|constraint| self.one(builder, constraint)); + } + + /// Add one constraint, but first multiply it by a filter such that it will only apply to the + /// first row of the trace. + pub fn one_first_row( + &mut self, + builder: &mut CircuitBuilder, + constraint: ExtensionTarget, + ) { + let filtered_constraint = builder.mul_extension(constraint, self.lagrange_basis_first); + self.one(builder, filtered_constraint); + } + + /// Add one constraint, but first multiply it by a filter such that it will only apply to the + /// last row of the trace. + pub fn one_last_row( + &mut self, + builder: &mut CircuitBuilder, + constraint: ExtensionTarget, + ) { + let filtered_constraint = builder.mul_extension(constraint, self.lagrange_basis_last); + self.one(builder, filtered_constraint); + } +} diff --git a/starky/src/lib.rs b/starky/src/lib.rs new file mode 100644 index 00000000..be28a01e --- /dev/null +++ b/starky/src/lib.rs @@ -0,0 +1,14 @@ +// TODO: Remove these when crate is closer to being finished. +#![allow(dead_code)] +#![allow(unused_variables)] +#![allow(unreachable_code)] +#![allow(clippy::diverging_sub_expression)] +#![allow(incomplete_features)] +#![feature(generic_const_exprs)] + +pub mod config; +pub mod constraint_consumer; +pub mod proof; +pub mod prover; +pub mod stark; +pub mod vars; diff --git a/starky/src/proof.rs b/starky/src/proof.rs new file mode 100644 index 00000000..1cdbbd3c --- /dev/null +++ b/starky/src/proof.rs @@ -0,0 +1,35 @@ +use plonky2::field::extension_field::Extendable; +use plonky2::fri::proof::{CompressedFriProof, FriProof}; +use plonky2::hash::hash_types::RichField; +use plonky2::hash::merkle_tree::MerkleCap; +use plonky2::plonk::config::GenericConfig; + +pub struct StarkProof, C: GenericConfig, const D: usize> { + /// Merkle cap of LDEs of trace values. + pub trace_cap: MerkleCap, + /// Purported values of each polynomial at the challenge point. + pub openings: StarkOpeningSet, + /// A batch FRI argument for all openings. + pub opening_proof: FriProof, +} + +pub struct CompressedStarkProof< + F: RichField + Extendable, + C: GenericConfig, + const D: usize, +> { + /// Merkle cap of LDEs of trace values. + pub trace_cap: MerkleCap, + /// Purported values of each polynomial at the challenge point. + pub openings: StarkOpeningSet, + /// A batch FRI argument for all openings. + pub opening_proof: CompressedFriProof, +} + +/// Purported values of each polynomial at the challenge point. +pub struct StarkOpeningSet, const D: usize> { + pub local_values: Vec, + pub next_values: Vec, + pub permutation_zs: Vec, + pub quotient_polys: Vec, +} diff --git a/starky/src/prover.rs b/starky/src/prover.rs new file mode 100644 index 00000000..bda478e5 --- /dev/null +++ b/starky/src/prover.rs @@ -0,0 +1,83 @@ +use itertools::Itertools; +use plonky2::field::extension_field::Extendable; +use plonky2::field::polynomial::PolynomialValues; +use plonky2::fri::oracle::PolynomialBatch; +use plonky2::fri::prover::fri_proof; +use plonky2::hash::hash_types::RichField; +use plonky2::iop::challenger::Challenger; +use plonky2::plonk::config::GenericConfig; +use plonky2::timed; +use plonky2::util::timing::TimingTree; +use plonky2::util::transpose; +use plonky2_util::log2_strict; +use rayon::prelude::*; + +use crate::config::StarkConfig; +use crate::proof::StarkProof; +use crate::stark::Stark; + +pub fn prove( + stark: S, + config: StarkConfig, + trace: Vec<[F; S::COLUMNS]>, + timing: &mut TimingTree, +) -> StarkProof +where + F: RichField + Extendable, + C: GenericConfig, + S: Stark, + [(); S::COLUMNS]:, +{ + let degree_bits = log2_strict(trace.len()); + + let trace_vecs = trace.into_iter().map(|row| row.to_vec()).collect_vec(); + let trace_col_major: Vec> = transpose(&trace_vecs); + + let trace_poly_values: Vec> = timed!( + timing, + "compute trace polynomials", + trace_col_major + .par_iter() + .map(|column| PolynomialValues::new(column.clone())) + .collect() + ); + + let rate_bits = config.fri_config.rate_bits; + let cap_height = config.fri_config.cap_height; + let trace_commitment = timed!( + timing, + "compute trace commitment", + PolynomialBatch::::from_values( + trace_poly_values, + rate_bits, + false, + cap_height, + timing, + None, + ) + ); + + let trace_cap = trace_commitment.merkle_tree.cap; + let openings = todo!(); + + let initial_merkle_trees = todo!(); + let lde_polynomial_coeffs = todo!(); + let lde_polynomial_values = todo!(); + let mut challenger = Challenger::new(); + let fri_params = config.fri_params(degree_bits); + + let opening_proof = fri_proof::( + initial_merkle_trees, + lde_polynomial_coeffs, + lde_polynomial_values, + &mut challenger, + &fri_params, + timing, + ); + + StarkProof { + trace_cap, + openings, + opening_proof, + } +} diff --git a/starky/src/stark.rs b/starky/src/stark.rs new file mode 100644 index 00000000..8d6abb69 --- /dev/null +++ b/starky/src/stark.rs @@ -0,0 +1,62 @@ +use plonky2::field::extension_field::{Extendable, FieldExtension}; +use plonky2::field::packed_field::PackedField; +use plonky2::hash::hash_types::RichField; +use plonky2::plonk::circuit_builder::CircuitBuilder; + +use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer}; +use crate::vars::StarkEvaluationTargets; +use crate::vars::StarkEvaluationVars; + +/// Represents a STARK system. +pub trait Stark, const D: usize> { + /// The total number of columns in the trace. + const COLUMNS: usize; + /// The number of public inputs. + const PUBLIC_INPUTS: usize; + + /// Evaluate constraints at a vector of points. + /// + /// The points are elements of a field `FE`, a degree `D2` extension of `F`. This lets us + /// evaluate constraints over a larger domain if desired. This can also be called with `FE = F` + /// and `D2 = 1`, in which case we are using the trivial extension, i.e. just evaluating + /// constraints over `F`. + fn eval_packed_generic( + &self, + vars: StarkEvaluationVars, + yield_constr: &mut ConstraintConsumer

, + ) where + FE: FieldExtension, + P: PackedField; + + /// Evaluate constraints at a vector of points from the base field `F`. + fn eval_packed_base>( + &self, + vars: StarkEvaluationVars, + yield_constr: &mut ConstraintConsumer

, + ) { + self.eval_packed_generic(vars, yield_constr) + } + + /// Evaluate constraints at a single point from the degree `D` extension field. + fn eval_ext( + &self, + vars: StarkEvaluationVars< + F::Extension, + F::Extension, + { Self::COLUMNS }, + { Self::PUBLIC_INPUTS }, + >, + yield_constr: &mut ConstraintConsumer, + ) { + self.eval_packed_generic(vars, yield_constr) + } + + /// Evaluate constraints at a vector of points from the degree `D` extension field. This is like + /// `eval_ext`, except in the context of a recursive circuit. + fn eval_ext_recursively( + &self, + builder: &mut CircuitBuilder, + vars: StarkEvaluationTargets, + yield_constr: &mut RecursiveConstraintConsumer, + ); +} diff --git a/starky/src/vars.rs b/starky/src/vars.rs new file mode 100644 index 00000000..cb83aeb7 --- /dev/null +++ b/starky/src/vars.rs @@ -0,0 +1,26 @@ +use plonky2::field::field_types::Field; +use plonky2::field::packed_field::PackedField; +use plonky2::iop::ext_target::ExtensionTarget; + +#[derive(Debug, Copy, Clone)] +pub struct StarkEvaluationVars<'a, F, P, const COLUMNS: usize, const PUBLIC_INPUTS: usize> +where + F: Field, + P: PackedField, +{ + pub local_values: &'a [P; COLUMNS], + pub next_values: &'a [P; COLUMNS], + pub public_inputs: &'a [P::Scalar; PUBLIC_INPUTS], +} + +#[derive(Debug, Copy, Clone)] +pub struct StarkEvaluationTargets< + 'a, + const D: usize, + const COLUMNS: usize, + const PUBLIC_INPUTS: usize, +> { + pub local_values: &'a [ExtensionTarget; COLUMNS], + pub next_values: &'a [ExtensionTarget; COLUMNS], + pub public_inputs: &'a [ExtensionTarget; PUBLIC_INPUTS], +} diff --git a/system_zero/Cargo.toml b/system_zero/Cargo.toml new file mode 100644 index 00000000..b908dea0 --- /dev/null +++ b/system_zero/Cargo.toml @@ -0,0 +1,12 @@ +[package] +name = "system_zero" +description = "A VM whose execution can be verified with STARKs; designed for proving Ethereum transactions" +version = "0.1.0" +edition = "2021" + +[dependencies] +plonky2 = { path = "../plonky2" } +starky = { path = "../starky" } +anyhow = "1.0.40" +env_logger = "0.9.0" +log = "0.4.14" diff --git a/system_zero/src/column_layout.rs b/system_zero/src/column_layout.rs new file mode 100644 index 00000000..3d8fc2c0 --- /dev/null +++ b/system_zero/src/column_layout.rs @@ -0,0 +1,86 @@ +use plonky2::hash::hashing::SPONGE_WIDTH; +use plonky2::hash::poseidon; + +//// CORE REGISTERS + +/// A cycle counter. Starts at 0; increments by 1. +pub(crate) const COL_CLOCK: usize = 0; + +/// A column which contains the values `[0, ... 2^16 - 1]`, potentially with duplicates. Used for +/// 16-bit range checks. +/// +/// For ease of verification, we enforce that it must begin with 0 and end with `2^16 - 1`, and each +/// delta must be either 0 or 1. +pub(crate) const COL_RANGE_16: usize = COL_CLOCK + 1; + +/// Pointer to the current instruction. +pub(crate) const COL_INSTRUCTION_PTR: usize = COL_RANGE_16 + 1; +/// Pointer to the base of the current call's stack frame. +pub(crate) const COL_FRAME_PTR: usize = COL_INSTRUCTION_PTR + 1; +/// Pointer to the tip of the current call's stack frame. +pub(crate) const COL_STACK_PTR: usize = COL_FRAME_PTR + 1; + +//// PERMUTATION UNIT + +const START_PERMUTATION_UNIT: usize = COL_STACK_PTR + 1; + +pub(crate) const fn col_permutation_full_first(round: usize, i: usize) -> usize { + debug_assert!(round < poseidon::HALF_N_FULL_ROUNDS); + debug_assert!(i < SPONGE_WIDTH); + START_PERMUTATION_UNIT + round * SPONGE_WIDTH + i +} + +const START_PERMUTATION_PARTIAL: usize = + col_permutation_full_first(poseidon::HALF_N_FULL_ROUNDS - 1, SPONGE_WIDTH - 1) + 1; + +pub(crate) const fn col_permutation_partial(round: usize) -> usize { + debug_assert!(round < poseidon::N_PARTIAL_ROUNDS); + START_PERMUTATION_PARTIAL + round +} + +const START_PERMUTATION_FULL_SECOND: usize = COL_STACK_PTR + 1; + +pub(crate) const fn col_permutation_full_second(round: usize, i: usize) -> usize { + debug_assert!(round <= poseidon::HALF_N_FULL_ROUNDS); + debug_assert!(i < SPONGE_WIDTH); + START_PERMUTATION_FULL_SECOND + round * SPONGE_WIDTH + i +} + +pub(crate) const fn col_permutation_input(i: usize) -> usize { + col_permutation_full_first(0, i) +} + +pub(crate) const fn col_permutation_output(i: usize) -> usize { + debug_assert!(i < SPONGE_WIDTH); + col_permutation_full_second(poseidon::HALF_N_FULL_ROUNDS, i) +} + +const END_PERMUTATION_UNIT: usize = col_permutation_output(SPONGE_WIDTH - 1); + +//// MEMORY UNITS + +//// DECOMPOSITION UNITS + +const START_DECOMPOSITION_UNITS: usize = END_PERMUTATION_UNIT + 1; + +const NUM_DECOMPOSITION_UNITS: usize = 4; +/// The number of bits associated with a single decomposition unit. +const DECOMPOSITION_UNIT_BITS: usize = 32; +/// One column for the value being decomposed, plus one column per bit. +const DECOMPOSITION_UNIT_COLS: usize = 1 + DECOMPOSITION_UNIT_BITS; + +pub(crate) const fn col_decomposition_input(unit: usize) -> usize { + debug_assert!(unit < NUM_DECOMPOSITION_UNITS); + START_DECOMPOSITION_UNITS + unit * DECOMPOSITION_UNIT_COLS +} + +pub(crate) const fn col_decomposition_bit(unit: usize, bit: usize) -> usize { + debug_assert!(unit < NUM_DECOMPOSITION_UNITS); + debug_assert!(bit < DECOMPOSITION_UNIT_BITS); + START_DECOMPOSITION_UNITS + unit * DECOMPOSITION_UNIT_COLS + 1 + bit +} + +const END_DECOMPOSITION_UNITS: usize = + START_DECOMPOSITION_UNITS + DECOMPOSITION_UNIT_COLS * NUM_DECOMPOSITION_UNITS; + +pub(crate) const NUM_COLUMNS: usize = END_DECOMPOSITION_UNITS; diff --git a/system_zero/src/core_registers.rs b/system_zero/src/core_registers.rs new file mode 100644 index 00000000..249c16a3 --- /dev/null +++ b/system_zero/src/core_registers.rs @@ -0,0 +1,80 @@ +use plonky2::field::extension_field::{Extendable, FieldExtension}; +use plonky2::field::packed_field::PackedField; +use plonky2::hash::hash_types::RichField; +use plonky2::plonk::circuit_builder::CircuitBuilder; +use starky::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer}; +use starky::vars::StarkEvaluationTargets; +use starky::vars::StarkEvaluationVars; + +use crate::column_layout::{ + COL_CLOCK, COL_FRAME_PTR, COL_INSTRUCTION_PTR, COL_RANGE_16, COL_STACK_PTR, NUM_COLUMNS, +}; +use crate::public_input_layout::NUM_PUBLIC_INPUTS; +use crate::system_zero::SystemZero; + +impl, const D: usize> SystemZero { + pub(crate) fn generate_first_row_core_registers(&self, first_values: &mut [F; NUM_COLUMNS]) { + first_values[COL_CLOCK] = F::ZERO; + first_values[COL_RANGE_16] = F::ZERO; + first_values[COL_INSTRUCTION_PTR] = F::ZERO; + first_values[COL_FRAME_PTR] = F::ZERO; + first_values[COL_STACK_PTR] = F::ZERO; + } + + pub(crate) fn generate_next_row_core_registers( + &self, + local_values: &[F; NUM_COLUMNS], + next_values: &mut [F; NUM_COLUMNS], + ) { + // We increment the clock by 1. + next_values[COL_CLOCK] = local_values[COL_CLOCK] + F::ONE; + + // We increment the 16-bit table by 1, unless we've reached the max value of 2^16 - 1, in + // which case we repeat that value. + let prev_range_16 = local_values[COL_RANGE_16].to_canonical_u64(); + let next_range_16 = (prev_range_16 + 1).min((1 << 16) - 1); + next_values[COL_RANGE_16] = F::from_canonical_u64(next_range_16); + + next_values[COL_INSTRUCTION_PTR] = todo!(); + + next_values[COL_FRAME_PTR] = todo!(); + + next_values[COL_STACK_PTR] = todo!(); + } + + #[inline] + pub(crate) fn eval_core_registers( + &self, + vars: StarkEvaluationVars, + yield_constr: &mut ConstraintConsumer

, + ) where + FE: FieldExtension, + P: PackedField, + { + // The clock must start with 0, and increment by 1. + let local_clock = vars.local_values[COL_CLOCK]; + let next_clock = vars.next_values[COL_CLOCK]; + let delta_clock = next_clock - local_clock; + yield_constr.one_first_row(local_clock); + yield_constr.one(delta_clock - FE::ONE); + + // The 16-bit table must start with 0, end with 2^16 - 1, and increment by 0 or 1. + let local_range_16 = vars.local_values[COL_RANGE_16]; + let next_range_16 = vars.next_values[COL_RANGE_16]; + let delta_range_16 = next_range_16 - local_range_16; + yield_constr.one_first_row(local_range_16); + yield_constr.one_last_row(local_range_16 - FE::from_canonical_u64((1 << 16) - 1)); + yield_constr.one(delta_range_16 * (delta_range_16 - FE::ONE)); + + todo!() + } + + pub(crate) fn eval_core_registers_recursively( + &self, + builder: &mut CircuitBuilder, + vars: StarkEvaluationTargets, + yield_constr: &mut RecursiveConstraintConsumer, + ) { + todo!() + } +} diff --git a/system_zero/src/lib.rs b/system_zero/src/lib.rs new file mode 100644 index 00000000..029c2abd --- /dev/null +++ b/system_zero/src/lib.rs @@ -0,0 +1,12 @@ +// TODO: Remove these when crate is closer to being finished. +#![allow(dead_code)] +#![allow(unused_variables)] +#![allow(unreachable_code)] +#![allow(clippy::diverging_sub_expression)] + +mod column_layout; +mod core_registers; +mod memory; +mod permutation_unit; +mod public_input_layout; +pub mod system_zero; diff --git a/system_zero/src/memory.rs b/system_zero/src/memory.rs new file mode 100644 index 00000000..0cc42d30 --- /dev/null +++ b/system_zero/src/memory.rs @@ -0,0 +1,16 @@ +#[derive(Default)] +pub struct TransactionMemory { + pub calls: Vec, +} + +/// A virtual memory space specific to the current contract call. +pub struct ContractMemory { + pub code: MemorySegment, + pub main: MemorySegment, + pub calldata: MemorySegment, + pub returndata: MemorySegment, +} + +pub struct MemorySegment { + pub content: Vec, +} diff --git a/system_zero/src/permutation_unit.rs b/system_zero/src/permutation_unit.rs new file mode 100644 index 00000000..a490b49d --- /dev/null +++ b/system_zero/src/permutation_unit.rs @@ -0,0 +1,86 @@ +use plonky2::field::extension_field::{Extendable, FieldExtension}; +use plonky2::field::packed_field::PackedField; +use plonky2::hash::hash_types::RichField; +use plonky2::hash::hashing::SPONGE_WIDTH; +use plonky2::plonk::circuit_builder::CircuitBuilder; +use starky::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer}; +use starky::vars::StarkEvaluationTargets; +use starky::vars::StarkEvaluationVars; + +use crate::column_layout::{col_permutation_input, col_permutation_output, NUM_COLUMNS}; +use crate::public_input_layout::NUM_PUBLIC_INPUTS; +use crate::system_zero::SystemZero; + +impl, const D: usize> SystemZero { + pub(crate) fn generate_permutation_unit(&self, values: &mut [F; NUM_COLUMNS]) { + // Load inputs. + let mut state = [F::ZERO; SPONGE_WIDTH]; + for i in 0..SPONGE_WIDTH { + state[i] = values[col_permutation_input(i)]; + } + + // TODO: First full rounds. + // TODO: Partial rounds. + // TODO: Second full rounds. + + // Write outputs. + for i in 0..SPONGE_WIDTH { + values[col_permutation_output(i)] = state[i]; + } + } + + #[inline] + pub(crate) fn eval_permutation_unit( + &self, + vars: StarkEvaluationVars, + yield_constr: &mut ConstraintConsumer

, + ) where + FE: FieldExtension, + P: PackedField, + { + let local_values = &vars.local_values; + + // Load inputs. + let mut state = [P::ZEROS; SPONGE_WIDTH]; + for i in 0..SPONGE_WIDTH { + state[i] = local_values[col_permutation_input(i)]; + } + + // TODO: First full rounds. + // TODO: Partial rounds. + // TODO: Second full rounds. + + // Assert that the computed output matches the outputs in the trace. + for i in 0..SPONGE_WIDTH { + let out = local_values[col_permutation_output(i)]; + yield_constr.one(state[i] - out); + } + } + + pub(crate) fn eval_permutation_unit_recursively( + &self, + builder: &mut CircuitBuilder, + vars: StarkEvaluationTargets, + yield_constr: &mut RecursiveConstraintConsumer, + ) { + let zero = builder.zero_extension(); + let local_values = &vars.local_values; + + // Load inputs. + let mut state = [zero; SPONGE_WIDTH]; + for i in 0..SPONGE_WIDTH { + state[i] = local_values[col_permutation_input(i)]; + } + + // TODO: First full rounds. + // TODO: Partial rounds. + // TODO: Second full rounds. + + // Assert that the computed output matches the outputs in the trace. + for i in 0..SPONGE_WIDTH { + let out = local_values[col_permutation_output(i)]; + let diff = builder.sub_extension(state[i], out); + yield_constr.one(builder, diff); + } + } +} diff --git a/system_zero/src/public_input_layout.rs b/system_zero/src/public_input_layout.rs new file mode 100644 index 00000000..225b3814 --- /dev/null +++ b/system_zero/src/public_input_layout.rs @@ -0,0 +1,7 @@ +/// The previous state root, before these transactions were executed. +const PI_OLD_STATE_ROOT: usize = 0; + +/// The updated state root, after these transactions were executed. +const PI_NEW_STATE_ROOT: usize = PI_OLD_STATE_ROOT + 1; + +pub(crate) const NUM_PUBLIC_INPUTS: usize = PI_NEW_STATE_ROOT + 1; diff --git a/system_zero/src/system_zero.rs b/system_zero/src/system_zero.rs new file mode 100644 index 00000000..a16fb699 --- /dev/null +++ b/system_zero/src/system_zero.rs @@ -0,0 +1,109 @@ +use std::marker::PhantomData; + +use plonky2::field::extension_field::{Extendable, FieldExtension}; +use plonky2::field::packed_field::PackedField; +use plonky2::hash::hash_types::RichField; +use plonky2::plonk::circuit_builder::CircuitBuilder; +use starky::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer}; +use starky::stark::Stark; +use starky::vars::StarkEvaluationTargets; +use starky::vars::StarkEvaluationVars; + +use crate::column_layout::NUM_COLUMNS; +use crate::memory::TransactionMemory; +use crate::public_input_layout::NUM_PUBLIC_INPUTS; + +/// We require at least 2^16 rows as it helps support efficient 16-bit range checks. +const MIN_TRACE_ROWS: usize = 1 << 16; + +pub struct SystemZero, const D: usize> { + _phantom: PhantomData, +} + +impl, const D: usize> SystemZero { + fn generate_trace(&self) -> Vec<[F; NUM_COLUMNS]> { + let memory = TransactionMemory::default(); + + let mut row = [F::ZERO; NUM_COLUMNS]; + self.generate_first_row_core_registers(&mut row); + self.generate_permutation_unit(&mut row); + + let mut trace = Vec::with_capacity(MIN_TRACE_ROWS); + + loop { + let mut next_row = [F::ZERO; NUM_COLUMNS]; + self.generate_next_row_core_registers(&row, &mut next_row); + self.generate_permutation_unit(&mut next_row); + + trace.push(row); + row = next_row; + } + + trace.push(row); + trace + } +} + +impl, const D: usize> Default for SystemZero { + fn default() -> Self { + Self { + _phantom: PhantomData, + } + } +} + +impl, const D: usize> Stark for SystemZero { + const COLUMNS: usize = NUM_COLUMNS; + const PUBLIC_INPUTS: usize = NUM_PUBLIC_INPUTS; + + fn eval_packed_generic( + &self, + vars: StarkEvaluationVars, + yield_constr: &mut ConstraintConsumer

, + ) where + FE: FieldExtension, + P: PackedField, + { + self.eval_core_registers(vars, yield_constr); + self.eval_permutation_unit(vars, yield_constr); + todo!() + } + + fn eval_ext_recursively( + &self, + builder: &mut CircuitBuilder, + vars: StarkEvaluationTargets, + yield_constr: &mut RecursiveConstraintConsumer, + ) { + self.eval_core_registers_recursively(builder, vars, yield_constr); + self.eval_permutation_unit_recursively(builder, vars, yield_constr); + todo!() + } +} + +#[cfg(test)] +mod tests { + use log::Level; + use plonky2::field::goldilocks_field::GoldilocksField; + use plonky2::plonk::config::PoseidonGoldilocksConfig; + use plonky2::util::timing::TimingTree; + use starky::config::StarkConfig; + use starky::prover::prove; + + use crate::system_zero::SystemZero; + + #[test] + #[ignore] // TODO + fn run() { + type F = GoldilocksField; + type C = PoseidonGoldilocksConfig; + const D: usize = 2; + + type S = SystemZero; + let system = S::default(); + let config = StarkConfig::standard_fast_config(); + let mut timing = TimingTree::new("prove", Level::Debug); + let trace = system.generate_trace(); + prove::(system, config, trace, &mut timing); + } +}