mirror of
https://github.com/logos-storage/plonky2.git
synced 2026-01-03 22:33:06 +00:00
Refactor CTL Handling (#1629)
* refactor * fmt * fmt * sync target version * fix * fix clippy * fix clippy
This commit is contained in:
parent
27a972f1cb
commit
7959bd22ce
@ -388,13 +388,13 @@ impl<'a, F: Field> PartitionWitness<'a, F> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, F: Field> WitnessWrite<F> for PartitionWitness<'a, F> {
|
||||
impl<F: Field> WitnessWrite<F> for PartitionWitness<'_, F> {
|
||||
fn set_target(&mut self, target: Target, value: F) -> Result<()> {
|
||||
self.set_target_returning_rep(target, value).map(|_| ())
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, F: Field> Witness<F> for PartitionWitness<'a, F> {
|
||||
impl<F: Field> Witness<F> for PartitionWitness<'_, F> {
|
||||
fn try_get_target(&self, target: Target) -> Option<F> {
|
||||
let rep_index = self.representative_map[self.target_index(target)];
|
||||
self.values[rep_index]
|
||||
|
||||
@ -46,7 +46,7 @@ pub struct EvaluationVarsBasePacked<'a, P: PackedField> {
|
||||
pub public_inputs_hash: &'a HashOut<P::Scalar>,
|
||||
}
|
||||
|
||||
impl<'a, F: RichField + Extendable<D>, const D: usize> EvaluationVars<'a, F, D> {
|
||||
impl<F: RichField + Extendable<D>, const D: usize> EvaluationVars<'_, F, D> {
|
||||
pub fn get_local_ext_algebra(
|
||||
&self,
|
||||
wire_range: Range<usize>,
|
||||
@ -120,7 +120,7 @@ impl<'a, F: Field> EvaluationVarsBaseBatch<'a, F> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, F: Field> EvaluationVarsBase<'a, F> {
|
||||
impl<F: Field> EvaluationVarsBase<'_, F> {
|
||||
pub fn get_local_ext<const D: usize>(&self, wire_range: Range<usize>) -> F::Extension
|
||||
where
|
||||
F: RichField + Extendable<D>,
|
||||
@ -209,13 +209,13 @@ impl<'a, P: PackedField> Iterator for EvaluationVarsBaseBatchIterPacked<'a, P> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, P: PackedField> ExactSizeIterator for EvaluationVarsBaseBatchIterPacked<'a, P> {
|
||||
impl<P: PackedField> ExactSizeIterator for EvaluationVarsBaseBatchIterPacked<'_, P> {
|
||||
fn len(&self) -> usize {
|
||||
(self.vars_batch.len() - self.i) / P::WIDTH
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, const D: usize> EvaluationTargets<'a, D> {
|
||||
impl<const D: usize> EvaluationTargets<'_, D> {
|
||||
pub fn remove_prefix(&mut self, num_selectors: usize) {
|
||||
self.local_constants = &self.local_constants[num_selectors..];
|
||||
}
|
||||
@ -228,7 +228,7 @@ pub struct EvaluationTargets<'a, const D: usize> {
|
||||
pub public_inputs_hash: &'a HashOutTarget,
|
||||
}
|
||||
|
||||
impl<'a, const D: usize> EvaluationTargets<'a, D> {
|
||||
impl<const D: usize> EvaluationTargets<'_, D> {
|
||||
pub fn get_local_ext_algebra(&self, wire_range: Range<usize>) -> ExtensionAlgebraTarget<D> {
|
||||
debug_assert_eq!(wire_range.len(), D);
|
||||
let arr = self.local_wires[wire_range].try_into().unwrap();
|
||||
|
||||
@ -2196,13 +2196,13 @@ impl<'a> Buffer<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Remaining for Buffer<'a> {
|
||||
impl Remaining for Buffer<'_> {
|
||||
fn remaining(&self) -> usize {
|
||||
self.bytes.len() - self.pos()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Read for Buffer<'a> {
|
||||
impl Read for Buffer<'_> {
|
||||
#[inline]
|
||||
fn read_exact(&mut self, bytes: &mut [u8]) -> IoResult<()> {
|
||||
let n = bytes.len();
|
||||
|
||||
@ -130,7 +130,7 @@ impl<'a, P: PackedField> PackedStridedView<'a, P> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, P: PackedField> Index<usize> for PackedStridedView<'a, P> {
|
||||
impl<P: PackedField> Index<usize> for PackedStridedView<'_, P> {
|
||||
type Output = P;
|
||||
#[inline]
|
||||
fn index(&self, index: usize) -> &Self::Output {
|
||||
@ -182,7 +182,7 @@ pub struct PackedStridedViewIter<'a, P: PackedField> {
|
||||
_phantom: PhantomData<&'a [P::Scalar]>,
|
||||
}
|
||||
|
||||
impl<'a, P: PackedField> PackedStridedViewIter<'a, P> {
|
||||
impl<P: PackedField> PackedStridedViewIter<'_, P> {
|
||||
pub(self) const fn new(start: *const P::Scalar, end: *const P::Scalar, stride: usize) -> Self {
|
||||
Self {
|
||||
start,
|
||||
@ -215,7 +215,7 @@ impl<'a, P: PackedField> Iterator for PackedStridedViewIter<'a, P> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, P: PackedField> DoubleEndedIterator for PackedStridedViewIter<'a, P> {
|
||||
impl<P: PackedField> DoubleEndedIterator for PackedStridedViewIter<'_, P> {
|
||||
fn next_back(&mut self) -> Option<Self::Item> {
|
||||
debug_assert_eq!(
|
||||
(self.end as usize).wrapping_sub(self.start as usize)
|
||||
@ -241,7 +241,7 @@ pub trait Viewable<F> {
|
||||
fn view(&self, index: F) -> Self::View;
|
||||
}
|
||||
|
||||
impl<'a, P: PackedField> Viewable<Range<usize>> for PackedStridedView<'a, P> {
|
||||
impl<P: PackedField> Viewable<Range<usize>> for PackedStridedView<'_, P> {
|
||||
type View = Self;
|
||||
fn view(&self, range: Range<usize>) -> Self::View {
|
||||
assert!(range.start <= self.len(), "Invalid access");
|
||||
@ -257,7 +257,7 @@ impl<'a, P: PackedField> Viewable<Range<usize>> for PackedStridedView<'a, P> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, P: PackedField> Viewable<RangeFrom<usize>> for PackedStridedView<'a, P> {
|
||||
impl<P: PackedField> Viewable<RangeFrom<usize>> for PackedStridedView<'_, P> {
|
||||
type View = Self;
|
||||
fn view(&self, range: RangeFrom<usize>) -> Self::View {
|
||||
assert!(range.start <= self.len(), "Invalid access");
|
||||
@ -272,14 +272,14 @@ impl<'a, P: PackedField> Viewable<RangeFrom<usize>> for PackedStridedView<'a, P>
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, P: PackedField> Viewable<RangeFull> for PackedStridedView<'a, P> {
|
||||
impl<P: PackedField> Viewable<RangeFull> for PackedStridedView<'_, P> {
|
||||
type View = Self;
|
||||
fn view(&self, _range: RangeFull) -> Self::View {
|
||||
*self
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, P: PackedField> Viewable<RangeInclusive<usize>> for PackedStridedView<'a, P> {
|
||||
impl<P: PackedField> Viewable<RangeInclusive<usize>> for PackedStridedView<'_, P> {
|
||||
type View = Self;
|
||||
fn view(&self, range: RangeInclusive<usize>) -> Self::View {
|
||||
assert!(*range.start() <= self.len(), "Invalid access");
|
||||
@ -295,7 +295,7 @@ impl<'a, P: PackedField> Viewable<RangeInclusive<usize>> for PackedStridedView<'
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, P: PackedField> Viewable<RangeTo<usize>> for PackedStridedView<'a, P> {
|
||||
impl<P: PackedField> Viewable<RangeTo<usize>> for PackedStridedView<'_, P> {
|
||||
type View = Self;
|
||||
fn view(&self, range: RangeTo<usize>) -> Self::View {
|
||||
assert!(range.end <= self.len(), "Invalid access");
|
||||
@ -308,7 +308,7 @@ impl<'a, P: PackedField> Viewable<RangeTo<usize>> for PackedStridedView<'a, P> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, P: PackedField> Viewable<RangeToInclusive<usize>> for PackedStridedView<'a, P> {
|
||||
impl<P: PackedField> Viewable<RangeToInclusive<usize>> for PackedStridedView<'_, P> {
|
||||
type View = Self;
|
||||
fn view(&self, range: RangeToInclusive<usize>) -> Self::View {
|
||||
assert!(range.end < self.len(), "Invalid access");
|
||||
|
||||
@ -30,7 +30,6 @@
|
||||
|
||||
#[cfg(not(feature = "std"))]
|
||||
use alloc::{vec, vec::Vec};
|
||||
use core::cmp::min;
|
||||
use core::fmt::Debug;
|
||||
use core::iter::once;
|
||||
|
||||
@ -55,7 +54,7 @@ use crate::lookup::{
|
||||
eval_helper_columns, eval_helper_columns_circuit, get_grand_product_challenge_set,
|
||||
get_helper_cols, Column, ColumnFilter, Filter, GrandProductChallenge, GrandProductChallengeSet,
|
||||
};
|
||||
use crate::proof::{MultiProof, StarkProofTarget, StarkProofWithMetadata};
|
||||
use crate::proof::{StarkProof, StarkProofTarget};
|
||||
use crate::stark::Stark;
|
||||
|
||||
/// An alias for `usize`, to represent the index of a STARK table in a multi-STARK setting.
|
||||
@ -186,7 +185,7 @@ impl<'a, F: Field> CtlZData<'a, F> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, F: Field> CtlData<'a, F> {
|
||||
impl<F: Field> CtlData<'_, F> {
|
||||
/// Returns all the cross-table lookup helper polynomials.
|
||||
pub(crate) fn ctl_helper_polys(&self) -> Vec<PolynomialValues<F>> {
|
||||
let num_polys = self
|
||||
@ -250,58 +249,6 @@ where
|
||||
(ctl_challenges, ctl_data)
|
||||
}
|
||||
|
||||
/// Outputs all the CTL data necessary to prove a multi-STARK system.
|
||||
pub fn get_ctl_vars_from_proofs<'a, F, C, const D: usize, const N: usize>(
|
||||
multi_proof: &MultiProof<F, C, D, N>,
|
||||
all_cross_table_lookups: &'a [CrossTableLookup<F>],
|
||||
ctl_challenges: &'a GrandProductChallengeSet<F>,
|
||||
num_lookup_columns: &'a [usize; N],
|
||||
max_constraint_degree: usize,
|
||||
) -> [Vec<CtlCheckVars<'a, F, <F as Extendable<D>>::Extension, <F as Extendable<D>>::Extension, D>>;
|
||||
N]
|
||||
where
|
||||
F: RichField + Extendable<D>,
|
||||
C: GenericConfig<D, F = F>,
|
||||
{
|
||||
let num_ctl_helper_cols =
|
||||
num_ctl_helper_columns_by_table(all_cross_table_lookups, max_constraint_degree);
|
||||
|
||||
CtlCheckVars::from_proofs(
|
||||
&multi_proof.stark_proofs,
|
||||
all_cross_table_lookups,
|
||||
ctl_challenges,
|
||||
num_lookup_columns,
|
||||
&num_ctl_helper_cols,
|
||||
)
|
||||
}
|
||||
/// Returns the number of helper columns for each `Table`.
|
||||
pub(crate) fn num_ctl_helper_columns_by_table<F: Field, const N: usize>(
|
||||
ctls: &[CrossTableLookup<F>],
|
||||
constraint_degree: usize,
|
||||
) -> Vec<[usize; N]> {
|
||||
let mut res = vec![[0; N]; ctls.len()];
|
||||
for (i, ctl) in ctls.iter().enumerate() {
|
||||
let CrossTableLookup {
|
||||
looking_tables,
|
||||
looked_table: _,
|
||||
} = ctl;
|
||||
let mut num_by_table = [0; N];
|
||||
|
||||
let grouped_lookups = looking_tables.iter().group_by(|&a| a.table);
|
||||
|
||||
for (table, group) in grouped_lookups.into_iter() {
|
||||
let sum = group.count();
|
||||
if sum > 1 {
|
||||
// We only need helper columns if there are at least 2 columns.
|
||||
num_by_table[table] = sum.div_ceil(constraint_degree - 1);
|
||||
}
|
||||
}
|
||||
|
||||
res[i] = num_by_table;
|
||||
}
|
||||
res
|
||||
}
|
||||
|
||||
/// Gets the auxiliary polynomials associated to these CTL data.
|
||||
pub(crate) fn get_ctl_auxiliary_polys<F: Field>(
|
||||
ctl_data: Option<&CtlData<F>>,
|
||||
@ -492,104 +439,82 @@ where
|
||||
impl<'a, F: RichField + Extendable<D>, const D: usize>
|
||||
CtlCheckVars<'a, F, F::Extension, F::Extension, D>
|
||||
{
|
||||
/// Extracts the `CtlCheckVars` for each STARK.
|
||||
pub fn from_proofs<C: GenericConfig<D, F = F>, const N: usize>(
|
||||
proofs: &[StarkProofWithMetadata<F, C, D>; N],
|
||||
/// Extracts the `CtlCheckVars` from a single proof.
|
||||
pub fn from_proof<C: GenericConfig<D, F = F>>(
|
||||
table_idx: TableIdx,
|
||||
proof: &StarkProof<F, C, D>,
|
||||
cross_table_lookups: &'a [CrossTableLookup<F>],
|
||||
ctl_challenges: &'a GrandProductChallengeSet<F>,
|
||||
num_lookup_columns: &[usize; N],
|
||||
num_helper_ctl_columns: &Vec<[usize; N]>,
|
||||
) -> [Vec<Self>; N] {
|
||||
let mut ctl_vars_per_table = [0; N].map(|_| vec![]);
|
||||
// If there are no auxiliary polys in the proofs `openings`,
|
||||
// return early. The verifier will reject the proofs when
|
||||
// calling `validate_proof_shape`.
|
||||
if proofs
|
||||
.iter()
|
||||
.any(|p| p.proof.openings.auxiliary_polys.is_none())
|
||||
{
|
||||
return ctl_vars_per_table;
|
||||
}
|
||||
num_lookup_columns: usize,
|
||||
total_num_helper_columns: usize,
|
||||
num_helper_ctl_columns: &[usize],
|
||||
) -> Vec<Self> {
|
||||
// Get all cross-table lookup polynomial openings for the provided STARK proof.
|
||||
let ctl_zs = {
|
||||
let auxiliary_polys = proof
|
||||
.openings
|
||||
.auxiliary_polys
|
||||
.as_ref()
|
||||
.expect("We cannot have CTLs without auxiliary polynomials.");
|
||||
let auxiliary_polys_next = proof
|
||||
.openings
|
||||
.auxiliary_polys_next
|
||||
.as_ref()
|
||||
.expect("We cannot have CTLs without auxiliary polynomials.");
|
||||
|
||||
let mut total_num_helper_cols_by_table = [0; N];
|
||||
for p_ctls in num_helper_ctl_columns {
|
||||
for j in 0..N {
|
||||
total_num_helper_cols_by_table[j] += p_ctls[j] * ctl_challenges.challenges.len();
|
||||
}
|
||||
}
|
||||
auxiliary_polys
|
||||
.iter()
|
||||
.skip(num_lookup_columns)
|
||||
.zip(auxiliary_polys_next.iter().skip(num_lookup_columns))
|
||||
.collect::<Vec<_>>()
|
||||
};
|
||||
|
||||
// Get all cross-table lookup polynomial openings for each STARK proof.
|
||||
let ctl_zs = proofs
|
||||
.iter()
|
||||
.zip(num_lookup_columns)
|
||||
.map(|(p, &num_lookup)| {
|
||||
let openings = &p.proof.openings;
|
||||
let mut z_index = 0;
|
||||
let mut start_index = 0;
|
||||
let mut ctl_vars = vec![];
|
||||
|
||||
let ctl_zs = &openings
|
||||
.auxiliary_polys
|
||||
.as_ref()
|
||||
.expect("We cannot have CTls without auxiliary polynomials.")[num_lookup..];
|
||||
let ctl_zs_next = &openings
|
||||
.auxiliary_polys_next
|
||||
.as_ref()
|
||||
.expect("We cannot have CTls without auxiliary polynomials.")[num_lookup..];
|
||||
ctl_zs.iter().zip(ctl_zs_next).collect::<Vec<_>>()
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
// Put each cross-table lookup polynomial into the correct table data: if a CTL polynomial is extracted from looking/looked table t, then we add it to the `CtlCheckVars` of table t.
|
||||
let mut start_indices = [0; N];
|
||||
let mut z_indices = [0; N];
|
||||
for (
|
||||
i,
|
||||
CrossTableLookup {
|
||||
looking_tables,
|
||||
looked_table,
|
||||
},
|
||||
num_ctls,
|
||||
) in cross_table_lookups.iter().zip(num_helper_ctl_columns)
|
||||
) in cross_table_lookups.iter().enumerate()
|
||||
{
|
||||
for &challenges in &ctl_challenges.challenges {
|
||||
// Group looking tables by `Table`, since we bundle the looking tables taken from the same `Table` together thanks to helper columns.
|
||||
// We want to only iterate on each `Table` once.
|
||||
let mut filtered_looking_tables = Vec::with_capacity(min(looking_tables.len(), N));
|
||||
for table in looking_tables {
|
||||
if !filtered_looking_tables.contains(&(table.table)) {
|
||||
filtered_looking_tables.push(table.table);
|
||||
// Group the looking tables by `Table` to process them together.
|
||||
let count = looking_tables
|
||||
.iter()
|
||||
.filter(|looking_table| looking_table.table == table_idx)
|
||||
.count();
|
||||
|
||||
let cols_filts = looking_tables.iter().filter_map(|looking_table| {
|
||||
if looking_table.table == table_idx {
|
||||
Some((&looking_table.columns, &looking_table.filter))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
for &table in filtered_looking_tables.iter() {
|
||||
// We have first all the helper polynomials, then all the z polynomials.
|
||||
let (looking_z, looking_z_next) =
|
||||
ctl_zs[table][total_num_helper_cols_by_table[table] + z_indices[table]];
|
||||
|
||||
let count = looking_tables
|
||||
.iter()
|
||||
.filter(|looking_table| looking_table.table == table)
|
||||
.count();
|
||||
let cols_filts = looking_tables.iter().filter_map(|looking_table| {
|
||||
if looking_table.table == table {
|
||||
Some((&looking_table.columns, &looking_table.filter))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
});
|
||||
if count > 0 {
|
||||
let mut columns = Vec::with_capacity(count);
|
||||
let mut filter = Vec::with_capacity(count);
|
||||
for (col, filt) in cols_filts {
|
||||
columns.push(&col[..]);
|
||||
filter.push(filt.clone());
|
||||
}
|
||||
let helper_columns = ctl_zs[table]
|
||||
[start_indices[table]..start_indices[table] + num_ctls[table]]
|
||||
|
||||
let (looking_z, looking_z_next) = ctl_zs[total_num_helper_columns + z_index];
|
||||
let helper_columns = ctl_zs
|
||||
[start_index..start_index + num_helper_ctl_columns[i]]
|
||||
.iter()
|
||||
.map(|&(h, _)| *h)
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
start_indices[table] += num_ctls[table];
|
||||
start_index += num_helper_ctl_columns[i];
|
||||
z_index += 1;
|
||||
|
||||
z_indices[table] += 1;
|
||||
ctl_vars_per_table[table].push(Self {
|
||||
ctl_vars.push(Self {
|
||||
helper_columns,
|
||||
local_z: *looking_z,
|
||||
next_z: *looking_z_next,
|
||||
@ -599,25 +524,26 @@ impl<'a, F: RichField + Extendable<D>, const D: usize>
|
||||
});
|
||||
}
|
||||
|
||||
let (looked_z, looked_z_next) = ctl_zs[looked_table.table]
|
||||
[total_num_helper_cols_by_table[looked_table.table]
|
||||
+ z_indices[looked_table.table]];
|
||||
if looked_table.table == table_idx {
|
||||
let (looked_z, looked_z_next) = ctl_zs[total_num_helper_columns + z_index];
|
||||
z_index += 1;
|
||||
|
||||
z_indices[looked_table.table] += 1;
|
||||
let columns = vec![&looked_table.columns[..]];
|
||||
let filter = vec![looked_table.filter.clone()];
|
||||
|
||||
let columns = vec![&looked_table.columns[..]];
|
||||
let filter = vec![looked_table.filter.clone()];
|
||||
ctl_vars_per_table[looked_table.table].push(Self {
|
||||
helper_columns: vec![],
|
||||
local_z: *looked_z,
|
||||
next_z: *looked_z_next,
|
||||
challenges,
|
||||
columns,
|
||||
filter,
|
||||
});
|
||||
ctl_vars.push(Self {
|
||||
helper_columns: vec![],
|
||||
local_z: *looked_z,
|
||||
next_z: *looked_z_next,
|
||||
challenges,
|
||||
columns,
|
||||
filter,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
ctl_vars_per_table
|
||||
|
||||
ctl_vars
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -159,31 +159,6 @@ where
|
||||
pub proof: StarkProof<F, C, D>,
|
||||
}
|
||||
|
||||
/// A combination of STARK proofs for independent statements operating on possibly shared variables,
|
||||
/// along with Cross-Table Lookup (CTL) challenges to assert consistency of common variables across tables.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct MultiProof<
|
||||
F: RichField + Extendable<D>,
|
||||
C: GenericConfig<D, F = F>,
|
||||
const D: usize,
|
||||
const N: usize,
|
||||
> {
|
||||
/// Proofs for all the different STARK modules.
|
||||
pub stark_proofs: [StarkProofWithMetadata<F, C, D>; N],
|
||||
/// Cross-table lookup challenges.
|
||||
pub ctl_challenges: GrandProductChallengeSet<F>,
|
||||
}
|
||||
|
||||
impl<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, const D: usize, const N: usize>
|
||||
MultiProof<F, C, D, N>
|
||||
{
|
||||
/// Returns the degree (i.e. the trace length) of each STARK proof,
|
||||
/// from their common [`StarkConfig`].
|
||||
pub fn recover_degree_bits(&self, config: &StarkConfig) -> [usize; N] {
|
||||
core::array::from_fn(|i| self.stark_proofs[i].proof.recover_degree_bits(config))
|
||||
}
|
||||
}
|
||||
|
||||
/// Randomness used for a STARK proof.
|
||||
#[derive(Debug)]
|
||||
pub struct StarkProofChallenges<F: RichField + Extendable<D>, const D: usize> {
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user