Fix example and documentation rendering (#1614)

* Fix starky runnable doc example and fix cargo doc

* Formatting and Clippy

* bump katex version and add config.toml

* Update README for local doc and add flag for docs.rs

* Add missing whitespace

---------

Co-authored-by: Robin Salen <30937548+Nashtare@users.noreply.github.com>
Co-authored-by: Ben <bmarsh94@gmail.com>
This commit is contained in:
Simon Perriard 2024-08-29 14:22:37 +02:00 committed by GitHub
parent 9fdf1b8ef4
commit a4efbe8369
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
9 changed files with 137 additions and 231 deletions

2
.cargo/config.toml Normal file
View File

@ -0,0 +1,2 @@
[build]
rustdocflags = ["--html-in-header", ".cargo/katex-header.html"]

View File

@ -1,10 +1,10 @@
<link rel="stylesheet" href="https://cdn.jsdelivr.net/npm/katex@0.11.1/dist/katex.min.css"
integrity="sha384-zB1R0rpPzHqg7Kpt0Aljp8JPLqbXI3bhnPWROx27a9N0Ll6ZP/+DiW/UqRcLbRjq" crossorigin="anonymous">
<script defer src="https://cdn.jsdelivr.net/npm/katex@0.11.1/dist/katex.min.js"
integrity="sha384-y23I5Q6l+B6vatafAwxRu/0oK/79VlbSz7Q9aiSZUvyWYIYsd+qj+o24G5ZU2zJz"
<link rel="stylesheet" href="https://cdn.jsdelivr.net/npm/katex@0.16.11/dist/katex.min.css"
integrity="sha384-nB0miv6/jRmo5UMMR1wu3Gz6NLsoTkbqJghGIsx//Rlm+ZU03BU6SQNC66uf4l5+" crossorigin="anonymous">
<script defer src="https://cdn.jsdelivr.net/npm/katex@0.16.11/dist/katex.min.js"
integrity="sha384-7zkQWkzuo3B5mTepMUcHkMB5jZaolc2xDwL6VFqjFALcbeS9Ggm/Yr2r3Dy4lfFg"
crossorigin="anonymous"></script>
<script defer src="https://cdn.jsdelivr.net/npm/katex@0.11.1/dist/contrib/auto-render.min.js"
integrity="sha384-kWPLUVMOks5AQFrykwIup5lo0m3iMkkHrD0uJ4H5cjeGihAutqP0yW0J6dpFiVkI"
<script defer src="https://cdn.jsdelivr.net/npm/katex@0.16.11/dist/contrib/auto-render.min.js"
integrity="sha384-43gviWU0YVjaDtb/GhzOouOXtZMP/7XUzwPTstBeZFe/+rCMvRwr4yROQP43s0Xk"
crossorigin="anonymous"></script>
<script>
document.addEventListener("DOMContentLoaded", function () {

View File

@ -60,6 +60,14 @@ static GLOBAL: Jemalloc = Jemalloc;
Jemalloc is known to cause crashes when a binary compiled for x86 is run on an Apple silicon-based Mac under [Rosetta 2](https://support.apple.com/en-us/HT211861). If you are experiencing crashes on your Apple silicon Mac, run `rustc --print target-libdir`. The output should contain `aarch64-apple-darwin`. If the output contains `x86_64-apple-darwin`, then you are running the Rust toolchain for x86; we recommend switching to the native ARM version.
## Documentation
Generate documentation locally:
```sh
cargo doc --no-deps --open
```
## Contributing guidelines
See [CONTRIBUTING.md](./CONTRIBUTING.md).

View File

@ -0,0 +1,2 @@
[build]
rustdocflags = ["--html-in-header", ".cargo/katex-header.html"]

View File

@ -87,6 +87,7 @@ harness = false
# Display math equations properly in documentation
[package.metadata.docs.rs]
rustdoc-args = ["--html-in-header", ".cargo/katex-header.html"]
cargo-args = ["--no-deps"]
[lints]
workspace = true

View File

@ -45,16 +45,16 @@ use crate::util::serialization::{Buffer, IoResult, Read, Write};
/// non-routed wires. Let $x[]$ be the domain points, $v[]$ be the values, $w[]$ be the Barycentric
/// weights and $z$ be the evaluation point. Define the sequences
///
/// $p[0] = 1,$
/// $p\[0\] = 1,$
///
/// $p[i] = p[i - 1] \cdot (z - x[i - 1]),$
/// $p\[i\] = p[i - 1] \cdot (z - x[i - 1]),$
///
/// $e[0] = 0,$
/// $e\[0\] = 0,$
///
/// $e[i] = e[i - 1] ] \cdot (z - x[i - 1]) + w[i - 1] \cdot v[i - 1] \cdot p[i - 1]$
/// $e\[i\] = e[i - 1] ] \cdot (z - x[i - 1]) + w[i - 1] \cdot v[i - 1] \cdot p[i - 1]$
///
/// Then $e[N]$ is the final interpolated value. The non-routed wires hold every $(d - 1)$'th
/// intermediate value of $p$ and $e$, starting at $p[d]$ and $e[d]$, where $d$ is the gate degree.
/// Then $e\[N\]$ is the final interpolated value. The non-routed wires hold every $(d - 1)$'th
/// intermediate value of $p$ and $e$, starting at $p\[d\]$ and $e\[d\]$, where $d$ is the gate degree.
#[derive(Clone, Debug, Default)]
pub struct CosetInterpolationGate<F: RichField + Extendable<D>, const D: usize> {
pub subgroup_bits: usize,

View File

@ -0,0 +1,2 @@
[build]
rustdocflags = ["--html-in-header", ".cargo/katex-header.html"]

View File

@ -36,6 +36,7 @@ env_logger = { version = "0.9.0", default-features = false }
# Display math equations properly in documentation
[package.metadata.docs.rs]
rustdoc-args = ["--html-in-header", ".cargo/katex-header.html"]
cargo-args = ["--no-deps"]
[lints]
workspace = true

View File

@ -24,13 +24,12 @@
//! user-provided values `x0` and `x1`, one can do the following:
//!
//! ```rust
//! # use core::marker::PhantomData;
//! // Imports all basic types.
//! use std::marker::PhantomData;
//! use plonky2::field::extension::{Extendable, FieldExtension};
//! use plonky2::field::packed::PackedField;
//! use plonky2::field::polynomial::PolynomialValues;
//! use plonky2::hash::hash_types::RichField;
//! # use starky::util::trace_rows_to_poly_values;
//!
//! // Imports to define the constraints of our STARK.
//! use starky::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
@ -40,37 +39,53 @@
//! // Imports to define the recursive constraints of our STARK.
//! use plonky2::iop::ext_target::ExtensionTarget;
//! use plonky2::plonk::circuit_builder::CircuitBuilder;
//! use starky::util::trace_rows_to_poly_values;
//!
//! // Imports to generate a STARK instance, compute the trace and prove it
//! use plonky2::field::types::Field;
//! use plonky2::plonk::config::GenericConfig;
//! use plonky2::plonk::config::PoseidonGoldilocksConfig;
//! use plonky2::util::timing::TimingTree;
//! use starky::config::StarkConfig;
//! use starky::prover::prove;
//! use starky::verifier::verify_stark_proof;
//!
//!# #[derive(Copy, Clone)]
//! pub struct FibonacciStark<F: RichField + Extendable<D>, const D: usize> {
//! num_rows: usize,
//! _phantom: PhantomData<F>,
//! num_rows: usize,
//! _phantom: PhantomData<F>,
//! }
//!
//! // Define witness generation.
//! impl<F: RichField + Extendable<D>, const D: usize> FibonacciStark<F, D> {
//! // The first public input is `x0`.
//! const PI_INDEX_X0: usize = 0;
//! // The second public input is `x1`.
//! const PI_INDEX_X1: usize = 1;
//! // The third public input is the second element of the last row,
//! // which should be equal to the `num_rows`-th Fibonacci number.
//! const PI_INDEX_RES: usize = 2;
//! // The first public input is `x0`.
//! const PI_INDEX_X0: usize = 0;
//! // The second public input is `x1`.
//! const PI_INDEX_X1: usize = 1;
//! // The third public input is the second element of the last row,
//! // which should be equal to the `num_rows`-th Fibonacci number.
//! const PI_INDEX_RES: usize = 2;
//!
//! /// Generate the trace using `x0, x1, 0` as initial state values.
//! fn generate_trace(&self, x0: F, x1: F) -> Vec<PolynomialValues<F>> {
//! let mut trace_rows = (0..self.num_rows)
//! .scan([x0, x1, F::ZERO], |acc, _| {
//! let tmp = *acc;
//! acc[0] = tmp[1];
//! acc[1] = tmp[0] + tmp[1];
//! acc[2] = tmp[2] + F::ONE;
//! Some(tmp)
//! })
//! .collect::<Vec<_>>();
//! pub(crate) fn new(num_rows: usize) -> Self {
//! Self {
//! num_rows,
//! _phantom: PhantomData
//! }
//! }
//!
//! // Transpose the row-wise trace for the prover.
//! trace_rows_to_poly_values(trace_rows)
//! }
//! /// Generate the trace using `x0, x1, 0` as initial state values.
//! fn generate_trace(&self, x0: F, x1: F) -> Vec<PolynomialValues<F>> {
//! let mut trace_rows = (0..self.num_rows)
//! .scan([x0, x1, F::ZERO], |acc, _| {
//! let tmp = *acc;
//! acc[0] = tmp[1];
//! acc[1] = tmp[0] + tmp[1];
//! acc[2] = tmp[2] + F::ONE;
//! Some(tmp)
//! })
//! .collect::<Vec<_>>();
//! // Transpose the row-wise trace for the prover.
//! trace_rows_to_poly_values(trace_rows)
//! }
//! }
//!
//! // Define constraints.
@ -78,217 +93,92 @@
//! const PUBLIC_INPUTS: usize = 3;
//!
//! impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for FibonacciStark<F, D> {
//! type EvaluationFrame<FE, P, const D2: usize> = StarkFrame<P, P::Scalar, COLUMNS, PUBLIC_INPUTS>
//! where
//! FE: FieldExtension<D2, BaseField = F>,
//! P: PackedField<Scalar = FE>;
//! type EvaluationFrame<FE, P, const D2: usize> = StarkFrame<P, P::Scalar, COLUMNS, PUBLIC_INPUTS>
//! where
//! FE: FieldExtension<D2, BaseField = F>,
//! P: PackedField<Scalar = FE>;
//!
//! type EvaluationFrameTarget =
//! StarkFrame<ExtensionTarget<D>, ExtensionTarget<D>, COLUMNS, PUBLIC_INPUTS>;
//! type EvaluationFrameTarget =
//! StarkFrame<ExtensionTarget<D>, ExtensionTarget<D>, COLUMNS, PUBLIC_INPUTS>;
//!
//! // Define this STARK's constraints.
//! fn eval_packed_generic<FE, P, const D2: usize>(
//! &self,
//! vars: &Self::EvaluationFrame<FE, P, D2>,
//! yield_constr: &mut ConstraintConsumer<P>,
//! ) where
//! FE: FieldExtension<D2, BaseField = F>,
//! P: PackedField<Scalar = FE>,
//! {
//! let local_values = vars.get_local_values();
//! let next_values = vars.get_next_values();
//! let public_inputs = vars.get_public_inputs();
//! // Define this STARK's constraints.
//! fn eval_packed_generic<FE, P, const D2: usize>(
//! &self,
//! vars: &Self::EvaluationFrame<FE, P, D2>,
//! yield_constr: &mut ConstraintConsumer<P>,
//! ) where
//! FE: FieldExtension<D2, BaseField = F>,
//! P: PackedField<Scalar = FE>,
//! {
//! let local_values = vars.get_local_values();
//! let next_values = vars.get_next_values();
//! let public_inputs = vars.get_public_inputs();
//!
//! // Check public inputs.
//! yield_constr.constraint_first_row(local_values[0] - public_inputs[Self::PI_INDEX_X0]);
//! yield_constr.constraint_first_row(local_values[1] - public_inputs[Self::PI_INDEX_X1]);
//! yield_constr.constraint_last_row(local_values[1] - public_inputs[Self::PI_INDEX_RES]);
//! // Check public inputs.
//! yield_constr.constraint_first_row(local_values[0] - public_inputs[Self::PI_INDEX_X0]);
//! yield_constr.constraint_first_row(local_values[1] - public_inputs[Self::PI_INDEX_X1]);
//! yield_constr.constraint_last_row(local_values[1] - public_inputs[Self::PI_INDEX_RES]);
//!
//! // Enforce the Fibonacci transition constraints.
//! // x0' <- x1
//! yield_constr.constraint_transition(next_values[0] - local_values[1]);
//! // x1' <- x0 + x1
//! yield_constr.constraint_transition(next_values[1] - local_values[0] - local_values[1]);
//! }
//! // Enforce the Fibonacci transition constraints.
//! // x0' <- x1
//! yield_constr.constraint_transition(next_values[0] - local_values[1]);
//! // x1' <- x0 + x1
//! yield_constr.constraint_transition(next_values[1] - local_values[0] - local_values[1]);
//! }
//!
//! // Define the constraints to recursively verify this STARK.
//! fn eval_ext_circuit(
//! &self,
//! builder: &mut CircuitBuilder<F, D>,
//! vars: &Self::EvaluationFrameTarget,
//! yield_constr: &mut RecursiveConstraintConsumer<F, D>,
//! ) {
//! let local_values = vars.get_local_values();
//! let next_values = vars.get_next_values();
//! let public_inputs = vars.get_public_inputs();
//! // Define the constraints to recursively verify this STARK.
//! fn eval_ext_circuit(
//! &self,
//! builder: &mut CircuitBuilder<F, D>,
//! vars: &Self::EvaluationFrameTarget,
//! yield_constr: &mut RecursiveConstraintConsumer<F, D>,
//! ) {
//! let local_values = vars.get_local_values();
//! let next_values = vars.get_next_values();
//! let public_inputs = vars.get_public_inputs();
//!
//! // Check public inputs.
//! let pis_constraints = [
//! builder.sub_extension(local_values[0], public_inputs[Self::PI_INDEX_X0]),
//! builder.sub_extension(local_values[1], public_inputs[Self::PI_INDEX_X1]),
//! builder.sub_extension(local_values[1], public_inputs[Self::PI_INDEX_RES]),
//! ];
//! // Check public inputs.
//! let pis_constraints = [
//! builder.sub_extension(local_values[0], public_inputs[Self::PI_INDEX_X0]),
//! builder.sub_extension(local_values[1], public_inputs[Self::PI_INDEX_X1]),
//! builder.sub_extension(local_values[1], public_inputs[Self::PI_INDEX_RES]),
//! ];
//!
//! yield_constr.constraint_first_row(builder, pis_constraints[0]);
//! yield_constr.constraint_first_row(builder, pis_constraints[1]);
//! yield_constr.constraint_last_row(builder, pis_constraints[2]);
//! yield_constr.constraint_first_row(builder, pis_constraints[0]);
//! yield_constr.constraint_first_row(builder, pis_constraints[1]);
//! yield_constr.constraint_last_row(builder, pis_constraints[2]);
//!
//! // Enforce the Fibonacci transition constraints.
//! // x0' <- x1
//! let first_col_constraint = builder.sub_extension(next_values[0], local_values[1]);
//! yield_constr.constraint_transition(builder, first_col_constraint);
//! // x1' <- x0 + x1
//! let second_col_constraint = {
//! let tmp = builder.sub_extension(next_values[1], local_values[0]);
//! builder.sub_extension(tmp, local_values[1])
//! };
//! yield_constr.constraint_transition(builder, second_col_constraint);
//! }
//! // Enforce the Fibonacci transition constraints.
//! // x0' <- x1
//! let first_col_constraint = builder.sub_extension(next_values[0], local_values[1]);
//! yield_constr.constraint_transition(builder, first_col_constraint);
//! // x1' <- x0 + x1
//! let second_col_constraint = {
//! let tmp = builder.sub_extension(next_values[1], local_values[0]);
//! builder.sub_extension(tmp, local_values[1])
//! };
//! yield_constr.constraint_transition(builder, second_col_constraint);
//! }
//!
//! fn constraint_degree(&self) -> usize {
//! 2
//! }
//! fn constraint_degree(&self) -> usize {
//! 2
//! }
//! }
//! ```
//!
//! One can then instantiate a new `FibonacciStark` instance, generate an associated
//! STARK trace, and generate a proof for it.
//! // One can then instantiate a new `FibonacciStark` instance, generate an associated
//! // STARK trace, and generate a proof for it.
//!
//! ```rust
//! # use anyhow::Result;
//! # use core::marker::PhantomData;
//! # // Imports all basic types.
//! # use plonky2::field::extension::{Extendable, FieldExtension};
//! # use plonky2::field::types::Field;
//! # use plonky2::field::packed::PackedField;
//! # use plonky2::field::polynomial::PolynomialValues;
//! # use plonky2::hash::hash_types::RichField;
//! # use starky::util::trace_rows_to_poly_values;
//! # // Imports to define the constraints of our STARK.
//! # use starky::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
//! # use starky::evaluation_frame::{StarkEvaluationFrame, StarkFrame};
//! # use starky::stark::Stark;
//! # // Imports to define the recursive constraints of our STARK.
//! # use plonky2::iop::ext_target::ExtensionTarget;
//! # use plonky2::plonk::circuit_builder::CircuitBuilder;
//! # use plonky2::util::timing::TimingTree;
//! # use plonky2::plonk::config::{GenericConfig, PoseidonGoldilocksConfig};
//! # use starky::prover::prove;
//! # use starky::verifier::verify_stark_proof;
//! # use starky::config::StarkConfig;
//! #
//! # #[derive(Copy, Clone)]
//! # pub struct FibonacciStark<F: RichField + Extendable<D>, const D: usize> {
//! # num_rows: usize,
//! # _phantom: PhantomData<F>,
//! # }
//! # // Define witness generation.
//! # impl<F: RichField + Extendable<D>, const D: usize> FibonacciStark<F, D> {
//! # // The first public input is `x0`.
//! # const PI_INDEX_X0: usize = 0;
//! # // The second public input is `x1`.
//! # const PI_INDEX_X1: usize = 1;
//! # // The third public input is the second element of the last row,
//! # // which should be equal to the `num_rows`-th Fibonacci number.
//! # const PI_INDEX_RES: usize = 2;
//! # /// Generate the trace using `x0, x1, 0` as initial state values.
//! # fn generate_trace(&self, x0: F, x1: F) -> Vec<PolynomialValues<F>> {
//! # let mut trace_rows = (0..self.num_rows)
//! # .scan([x0, x1, F::ZERO], |acc, _| {
//! # let tmp = *acc;
//! # acc[0] = tmp[1];
//! # acc[1] = tmp[0] + tmp[1];
//! # acc[2] = tmp[2] + F::ONE;
//! # Some(tmp)
//! # })
//! # .collect::<Vec<_>>();
//! # // Transpose the row-wise trace for the prover.
//! # trace_rows_to_poly_values(trace_rows)
//! # }
//! # const fn new(num_rows: usize) -> Self {
//! # Self {
//! # num_rows,
//! # _phantom: PhantomData,
//! # }
//! # }
//! # }
//! # // Define constraints.
//! # const COLUMNS: usize = 3;
//! # const PUBLIC_INPUTS: usize = 3;
//! # impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for FibonacciStark<F, D> {
//! # type EvaluationFrame<FE, P, const D2: usize> = StarkFrame<P, P::Scalar, COLUMNS, PUBLIC_INPUTS>
//! # where
//! # FE: FieldExtension<D2, BaseField = F>,
//! # P: PackedField<Scalar = FE>;
//! # type EvaluationFrameTarget =
//! # StarkFrame<ExtensionTarget<D>, ExtensionTarget<D>, COLUMNS, PUBLIC_INPUTS>;
//! # // Define this STARK's constraints.
//! # fn eval_packed_generic<FE, P, const D2: usize>(
//! # &self,
//! # vars: &Self::EvaluationFrame<FE, P, D2>,
//! # yield_constr: &mut ConstraintConsumer<P>,
//! # ) where
//! # FE: FieldExtension<D2, BaseField = F>,
//! # P: PackedField<Scalar = FE>,
//! # {
//! # let local_values = vars.get_local_values();
//! # let next_values = vars.get_next_values();
//! # let public_inputs = vars.get_public_inputs();
//! # // Check public inputs.
//! # yield_constr.constraint_first_row(local_values[0] - public_inputs[Self::PI_INDEX_X0]);
//! # yield_constr.constraint_first_row(local_values[1] - public_inputs[Self::PI_INDEX_X1]);
//! # yield_constr.constraint_last_row(local_values[1] - public_inputs[Self::PI_INDEX_RES]);
//! # // Enforce the Fibonacci transition constraints.
//! # // x0' <- x1
//! # yield_constr.constraint_transition(next_values[0] - local_values[1]);
//! # // x1' <- x0 + x1
//! # yield_constr.constraint_transition(next_values[1] - local_values[0] - local_values[1]);
//! # }
//! # // Define the constraints to recursively verify this STARK.
//! # fn eval_ext_circuit(
//! # &self,
//! # builder: &mut CircuitBuilder<F, D>,
//! # vars: &Self::EvaluationFrameTarget,
//! # yield_constr: &mut RecursiveConstraintConsumer<F, D>,
//! # ) {
//! # let local_values = vars.get_local_values();
//! # let next_values = vars.get_next_values();
//! # let public_inputs = vars.get_public_inputs();
//! # // Check public inputs.
//! # let pis_constraints = [
//! # builder.sub_extension(local_values[0], public_inputs[Self::PI_INDEX_X0]),
//! # builder.sub_extension(local_values[1], public_inputs[Self::PI_INDEX_X1]),
//! # builder.sub_extension(local_values[1], public_inputs[Self::PI_INDEX_RES]),
//! # ];
//! # yield_constr.constraint_first_row(builder, pis_constraints[0]);
//! # yield_constr.constraint_first_row(builder, pis_constraints[1]);
//! # yield_constr.constraint_last_row(builder, pis_constraints[2]);
//! # // Enforce the Fibonacci transition constraints.
//! # // x0' <- x1
//! # let first_col_constraint = builder.sub_extension(next_values[0], local_values[1]);
//! # yield_constr.constraint_transition(builder, first_col_constraint);
//! # // x1' <- x0 + x1
//! # let second_col_constraint = {
//! # let tmp = builder.sub_extension(next_values[1], local_values[0]);
//! # builder.sub_extension(tmp, local_values[1])
//! # };
//! # yield_constr.constraint_transition(builder, second_col_constraint);
//! # }
//! # fn constraint_degree(&self) -> usize {
//! # 2
//! # }
//! # }
//! # fn fibonacci<F: Field>(n: usize, x0: F, x1: F) -> F {
//! # (0..n).fold((x0, x1), |x, _| (x.1, x.0 + x.1)).1
//! # }
//! #
//! const D: usize = 2;
//! const CONFIG: StarkConfig = StarkConfig::standard_fast_config();
//! type C = PoseidonGoldilocksConfig;
//! type F = <C as GenericConfig<D>>::F;
//! type S = FibonacciStark<F, D>;
//!
//! fn main() {
//! fn fibonacci<F: Field>(n: usize, x0: F, x1: F) -> F {
//! (0..n).fold((x0, x1), |acc, _| (acc.1, acc.0 + acc.1)).1
//! }
//!
//! fn fibonacci_stark() {
//! let num_rows = 1 << 10;
//! let x0 = F::from_canonical_u32(2);
//! let x1 = F::from_canonical_u32(7);