Fix the memory CTL and implement the verifier memory bus

Co-authored-by: Hamy Ratoanina <hamy.ratoanina@toposware.com>
This commit is contained in:
Linda Guiga 2023-04-14 21:55:44 +08:00 committed by Robin Salen
parent bfd6b98884
commit 06037f814f
No known key found for this signature in database
GPG Key ID: FB87BACFB3CB2007
14 changed files with 595 additions and 48 deletions

View File

@ -10,7 +10,7 @@ use crate::config::StarkConfig;
use crate::cpu::cpu_stark;
use crate::cpu::cpu_stark::CpuStark;
use crate::cpu::membus::NUM_GP_CHANNELS;
use crate::cross_table_lookup::{Column, CrossTableLookup, TableWithColumns};
use crate::cross_table_lookup::{CrossTableLookup, TableWithColumns};
use crate::keccak::keccak_stark;
use crate::keccak::keccak_stark::KeccakStark;
use crate::keccak_sponge::columns::KECCAK_RATE_BYTES;
@ -97,23 +97,13 @@ impl Table {
}
pub(crate) fn all_cross_table_lookups<F: Field>() -> Vec<CrossTableLookup<F>> {
let mut ctls = vec![
vec![
ctl_arithmetic(),
ctl_keccak_sponge(),
ctl_keccak(),
ctl_logic(),
ctl_memory(),
];
// TODO: Some CTLs temporarily disabled while we get them working.
disable_ctl(&mut ctls[4]);
ctls
}
fn disable_ctl<F: Field>(ctl: &mut CrossTableLookup<F>) {
for table in &mut ctl.looking_tables {
table.filter_column = Some(Column::zero());
}
ctl.looked_table.filter_column = Some(Column::zero());
]
}
fn ctl_arithmetic<F: Field>() -> CrossTableLookup<F> {

View File

@ -48,6 +48,7 @@ pub(crate) fn generate_bootstrap_kernel<F: Field>(state: &mut GenerationState<F>
final_cpu_row.mem_channels[2].value[0] = F::ZERO; // virt
final_cpu_row.mem_channels[3].value[0] = F::from_canonical_usize(KERNEL.code.len()); // len
final_cpu_row.mem_channels[4].value = KERNEL.code_hash.map(F::from_canonical_u32);
final_cpu_row.mem_channels[4].value.reverse();
keccak_sponge_log(
state,
MemoryAddress::new(0, Segment::Code, 0),
@ -93,6 +94,7 @@ pub(crate) fn eval_bootstrap_kernel<F: Field, P: PackedField<Scalar = F>>(
for (&expected, actual) in KERNEL
.code_hash
.iter()
.rev()
.zip(local_values.mem_channels.last().unwrap().value)
{
let expected = P::from(F::from_canonical_u32(expected));
@ -153,6 +155,7 @@ pub(crate) fn eval_bootstrap_kernel_circuit<F: RichField + Extendable<D>, const
for (&expected, actual) in KERNEL
.code_hash
.iter()
.rev()
.zip(local_values.mem_channels.last().unwrap().value)
{
let expected = builder.constant_extension(F::Extension::from_canonical_u32(expected));

View File

@ -44,9 +44,10 @@ impl Kernel {
prover_inputs: HashMap<usize, ProverInputFn>,
) -> Self {
let code_hash_bytes = keccak(&code).0;
let code_hash = core::array::from_fn(|i| {
let code_hash_be = core::array::from_fn(|i| {
u32::from_le_bytes(core::array::from_fn(|j| code_hash_bytes[i * 4 + j]))
});
let code_hash = code_hash_be.map(u32::from_be);
let ordered_labels = global_labels
.keys()
.cloned()

View File

@ -542,21 +542,29 @@ pub(crate) fn eval_cross_table_lookup_checks_circuit<
pub(crate) fn verify_cross_table_lookups<F: RichField + Extendable<D>, const D: usize>(
cross_table_lookups: &[CrossTableLookup<F>],
ctl_zs_lasts: [Vec<F>; NUM_TABLES],
ctl_extra_looking_products: Vec<Vec<F>>,
config: &StarkConfig,
) -> Result<()> {
let mut ctl_zs_openings = ctl_zs_lasts.iter().map(|v| v.iter()).collect::<Vec<_>>();
for CrossTableLookup {
looking_tables,
looked_table,
} in cross_table_lookups.iter()
for (
CrossTableLookup {
looking_tables,
looked_table,
},
extra_product_vec,
) in cross_table_lookups
.iter()
.zip(ctl_extra_looking_products.iter())
{
for _ in 0..config.num_challenges {
let looking_zs_prod = looking_tables
for c in 0..config.num_challenges {
let mut looking_zs_prod = looking_tables
.iter()
.map(|table| *ctl_zs_openings[table.table as usize].next().unwrap())
.product::<F>();
let looked_z = *ctl_zs_openings[looked_table.table as usize].next().unwrap();
looking_zs_prod *= extra_product_vec[c];
let looked_z = *ctl_zs_openings[looked_table.table as usize].next().unwrap();
ensure!(
looking_zs_prod == looked_z,
"Cross-table lookup verification failed."
@ -572,22 +580,31 @@ pub(crate) fn verify_cross_table_lookups_circuit<F: RichField + Extendable<D>, c
builder: &mut CircuitBuilder<F, D>,
cross_table_lookups: Vec<CrossTableLookup<F>>,
ctl_zs_lasts: [Vec<Target>; NUM_TABLES],
ctl_extra_looking_products: Vec<Vec<Target>>,
inner_config: &StarkConfig,
) {
let mut ctl_zs_openings = ctl_zs_lasts.iter().map(|v| v.iter()).collect::<Vec<_>>();
for CrossTableLookup {
looking_tables,
looked_table,
} in cross_table_lookups.into_iter()
for (
CrossTableLookup {
looking_tables,
looked_table,
},
extra_product_vec,
) in cross_table_lookups
.into_iter()
.zip(ctl_extra_looking_products.iter())
{
for _ in 0..inner_config.num_challenges {
let looking_zs_prod = builder.mul_many(
for c in 0..inner_config.num_challenges {
let mut looking_zs_prod = builder.mul_many(
looking_tables
.iter()
.map(|table| *ctl_zs_openings[table.table as usize].next().unwrap()),
);
looking_zs_prod = builder.mul(looking_zs_prod, extra_product_vec[c]);
let looked_z = *ctl_zs_openings[looked_table.table as usize].next().unwrap();
builder.connect(looking_zs_prod, looked_z);
builder.connect(looked_z, looking_zs_prod);
}
}
debug_assert!(ctl_zs_openings.iter_mut().all(|iter| iter.next().is_none()));

View File

@ -2,6 +2,7 @@ use core::mem::{self, MaybeUninit};
use std::collections::BTreeMap;
use std::ops::Range;
use ethereum_types::BigEndianHash;
use hashbrown::HashMap;
use itertools::{zip_eq, Itertools};
use plonky2::field::extension::Extendable;
@ -30,20 +31,26 @@ use crate::all_stark::{all_cross_table_lookups, AllStark, Table, NUM_TABLES};
use crate::arithmetic::arithmetic_stark::ArithmeticStark;
use crate::config::StarkConfig;
use crate::cpu::cpu_stark::CpuStark;
use crate::cpu::kernel::constants::global_metadata::GlobalMetadata;
use crate::cross_table_lookup::{verify_cross_table_lookups_circuit, CrossTableLookup};
use crate::generation::GenerationInputs;
use crate::keccak::keccak_stark::KeccakStark;
use crate::keccak_sponge::keccak_sponge_stark::KeccakSpongeStark;
use crate::logic::LogicStark;
use crate::memory::memory_stark::MemoryStark;
use crate::permutation::{get_grand_product_challenge_set_target, GrandProductChallengeSet};
use crate::proof::StarkProofWithMetadata;
use crate::memory::segments::Segment;
use crate::memory::{NUM_CHANNELS, VALUE_LIMBS};
use crate::permutation::{
get_grand_product_challenge_set_target, GrandProductChallenge, GrandProductChallengeSet,
};
use crate::proof::{BlockMetadataTarget, PublicValues, StarkProofWithMetadata};
use crate::prover::prove;
use crate::recursive_verifier::{
add_common_recursion_gates, recursive_stark_circuit, PlonkWrapperCircuit, PublicInputs,
StarkWrapperCircuit,
add_common_recursion_gates, add_virtual_trie_roots, recursive_stark_circuit,
PlonkWrapperCircuit, PublicInputs, StarkWrapperCircuit,
};
use crate::stark::Stark;
use crate::util::h160_limbs;
/// The recursion threshold. We end a chain of recursive proofs once we reach this size.
const THRESHOLD_DEGREE_BITS: usize = 13;
@ -339,6 +346,8 @@ where
pub fn new(
all_stark: &AllStark<F, D>,
degree_bits_ranges: &[Range<usize>; NUM_TABLES],
public_values: &PublicValues,
cpu_trace_len: usize,
stark_config: &StarkConfig,
) -> Self {
let arithmetic = RecursiveCircuitsForTable::new(
@ -385,7 +394,7 @@ where
);
let by_table = [arithmetic, cpu, keccak, keccak_sponge, logic, memory];
let root = Self::create_root_circuit(&by_table, stark_config);
let root = Self::create_root_circuit(public_values, cpu_trace_len, &by_table, stark_config);
let aggregation = Self::create_aggregation_circuit(&root);
let block = Self::create_block_circuit(&aggregation);
Self {
@ -397,6 +406,8 @@ where
}
fn create_root_circuit(
public_values: &PublicValues,
cpu_trace_len: usize,
by_table: &[RecursiveCircuitsForTable<F, C, D>; NUM_TABLES],
stark_config: &StarkConfig,
) -> RootCircuitData<F, C, D> {
@ -404,6 +415,7 @@ where
core::array::from_fn(|i| &by_table[i].final_circuits()[0].common);
let mut builder = CircuitBuilder::new(CircuitConfig::standard_recursion_config());
let recursive_proofs =
core::array::from_fn(|i| builder.add_virtual_proof_with_pis(inner_common_data[i]));
let pis: [_; NUM_TABLES] = core::array::from_fn(|i| {
@ -453,11 +465,50 @@ where
}
}
// Extra products to add to the looked last value
let mut extra_looking_products = Vec::new();
// Arithmetic
extra_looking_products.push(Vec::new());
for _ in 0..stark_config.num_challenges {
extra_looking_products[0].push(builder.constant(F::ONE));
}
// KeccakSponge
extra_looking_products.push(Vec::new());
for _ in 0..stark_config.num_challenges {
extra_looking_products[1].push(builder.constant(F::ONE));
}
// Keccak
extra_looking_products.push(Vec::new());
for _ in 0..stark_config.num_challenges {
extra_looking_products[2].push(builder.constant(F::ONE));
}
// Logic
extra_looking_products.push(Vec::new());
for _ in 0..stark_config.num_challenges {
extra_looking_products[3].push(builder.constant(F::ONE));
}
// Memory
extra_looking_products.push(Vec::new());
for c in 0..stark_config.num_challenges {
extra_looking_products[4].push(Self::get_memory_extra_looking_products_circuit(
&mut builder,
public_values,
cpu_trace_len,
ctl_challenges.challenges[c],
));
}
// Verify the CTL checks.
verify_cross_table_lookups_circuit::<F, D>(
&mut builder,
all_cross_table_lookups(),
pis.map(|p| p.ctl_zs_last),
extra_looking_products,
stark_config,
);
@ -505,6 +556,208 @@ where
}
}
/// Recursive version of `get_memory_extra_looking_products`.
pub(crate) fn get_memory_extra_looking_products_circuit(
builder: &mut CircuitBuilder<F, D>,
public_values: &PublicValues,
cpu_trace_len: usize,
challenge: GrandProductChallenge<Target>,
) -> Target {
let mut prod = builder.constant(F::ONE);
// Add metadata writes.
let metadata_target = BlockMetadataTarget {
block_beneficiary: builder.add_virtual_target_arr::<5>(),
block_timestamp: builder.constant(F::from_canonical_u64(
public_values.block_metadata.block_timestamp.as_u64(),
)),
block_difficulty: builder.constant(F::from_canonical_u64(
public_values.block_metadata.block_difficulty.as_u64(),
)),
block_number: builder.constant(F::from_canonical_u64(
public_values.block_metadata.block_number.as_u64(),
)),
block_gaslimit: builder.constant(F::from_canonical_u64(
public_values.block_metadata.block_gaslimit.as_u64(),
)),
block_chain_id: builder.constant(F::from_canonical_u64(
public_values.block_metadata.block_chain_id.as_u64(),
)),
block_base_fee: builder.constant(F::from_canonical_u64(
public_values.block_metadata.block_base_fee.as_u64(),
)),
};
let beneficiary_limbs = h160_limbs(public_values.block_metadata.block_beneficiary);
for i in 0..5 {
let limb_target = builder.constant(beneficiary_limbs[i]);
builder.connect(metadata_target.block_beneficiary[i], limb_target);
}
let block_fields_without_beneficiary = [
(
GlobalMetadata::BlockTimestamp,
metadata_target.block_timestamp,
),
(GlobalMetadata::BlockNumber, metadata_target.block_number),
(
GlobalMetadata::BlockDifficulty,
metadata_target.block_difficulty,
),
(
GlobalMetadata::BlockGasLimit,
metadata_target.block_gaslimit,
),
(GlobalMetadata::BlockChainId, metadata_target.block_chain_id),
(GlobalMetadata::BlockBaseFee, metadata_target.block_base_fee),
];
let zero = builder.constant(F::ZERO);
let one = builder.constant(F::ONE);
let segment = builder.constant(F::from_canonical_u32(Segment::GlobalMetadata as u32));
let row = builder.add_virtual_targets(13);
// is_read
builder.connect(row[0], zero);
// context
builder.connect(row[1], zero);
// segment
builder.connect(row[2], segment);
// virtual
let field_target = builder.constant(F::from_canonical_usize(
GlobalMetadata::BlockBeneficiary as usize,
));
builder.connect(row[3], field_target);
// values
for j in 0..5 {
builder.connect(row[4 + j], metadata_target.block_beneficiary[j]);
}
for j in 5..VALUE_LIMBS {
builder.connect(row[4 + j], zero);
}
// timestamp
builder.connect(row[12], one);
let combined = challenge.combine_base_circuit(builder, &row);
prod = builder.mul(prod, combined);
block_fields_without_beneficiary.map(|(field, target)| {
let row = builder.add_virtual_targets(13);
// is_read
builder.connect(row[0], zero);
// context
builder.connect(row[1], zero);
// segment
builder.connect(row[2], segment);
// virtual
let field_target = builder.constant(F::from_canonical_usize(field as usize));
builder.connect(row[3], field_target);
// These values only have one cell.
builder.connect(row[4], target);
for j in 1..VALUE_LIMBS {
builder.connect(row[4 + j], zero);
}
// timestamp
builder.connect(row[12], one);
let combined = challenge.combine_base_circuit(builder, &row);
prod = builder.mul(prod, combined);
});
// Add public values reads.
let tries_before_target = add_virtual_trie_roots(builder);
for (targets, hash) in [
tries_before_target.state_root,
tries_before_target.transactions_root,
tries_before_target.receipts_root,
]
.iter()
.zip([
public_values.trie_roots_before.state_root,
public_values.trie_roots_before.transactions_root,
public_values.trie_roots_before.receipts_root,
]) {
for (i, limb) in hash.into_uint().0.into_iter().enumerate() {
let low_limb_target = builder.constant(F::from_canonical_u32(limb as u32));
let high_limb_target = builder.constant(F::from_canonical_u32((limb >> 32) as u32));
builder.connect(targets[2 * i], low_limb_target);
builder.connect(targets[2 * i + 1], high_limb_target);
}
}
let tries_after_target = add_virtual_trie_roots(builder);
for (targets, hash) in [
tries_after_target.state_root,
tries_after_target.transactions_root,
tries_after_target.receipts_root,
]
.iter()
.zip([
public_values.trie_roots_after.state_root,
public_values.trie_roots_after.transactions_root,
public_values.trie_roots_after.receipts_root,
]) {
for (i, limb) in hash.into_uint().0.into_iter().enumerate() {
let low_limb_target = builder.constant(F::from_canonical_u32(limb as u32));
let high_limb_target = builder.constant(F::from_canonical_u32((limb >> 32) as u32));
builder.connect(targets[2 * i], low_limb_target);
builder.connect(targets[2 * i + 1], high_limb_target);
}
}
let trie_fields = [
(
GlobalMetadata::StateTrieRootDigestBefore,
tries_before_target.state_root,
),
(
GlobalMetadata::TransactionTrieRootDigestBefore,
tries_before_target.transactions_root,
),
(
GlobalMetadata::ReceiptTrieRootDigestBefore,
tries_before_target.receipts_root,
),
(
GlobalMetadata::StateTrieRootDigestAfter,
tries_after_target.state_root,
),
(
GlobalMetadata::TransactionTrieRootDigestAfter,
tries_after_target.transactions_root,
),
(
GlobalMetadata::ReceiptTrieRootDigestAfter,
tries_after_target.receipts_root,
),
];
let timestamp_target =
builder.constant(F::from_canonical_usize(cpu_trace_len * NUM_CHANNELS + 1));
trie_fields.map(|(field, targets)| {
let row = builder.add_virtual_targets(13);
// is_read
builder.connect(row[0], one);
// context
builder.connect(row[1], zero);
// segment
builder.connect(row[2], segment);
// virtual
let field_target = builder.constant(F::from_canonical_usize(field as usize));
builder.connect(row[3], field_target);
// values
for j in 0..VALUE_LIMBS {
builder.connect(row[j + 4], targets[j]);
}
// timestamp
builder.connect(row[12], timestamp_target);
let combined = challenge.combine_base_circuit(builder, &row);
prod = builder.mul(prod, combined);
});
prod
}
fn create_aggregation_circuit(
root: &RootCircuitData<F, C, D>,
) -> AggregationCircuitData<F, C, D> {

View File

@ -22,7 +22,7 @@ use crate::generation::outputs::{get_outputs, GenerationOutputs};
use crate::generation::state::GenerationState;
use crate::memory::segments::Segment;
use crate::proof::{BlockMetadata, PublicValues, TrieRoots};
use crate::witness::memory::{MemoryAddress, MemoryChannel};
use crate::witness::memory::{MemoryAddress, MemoryChannel, MemoryOp, MemoryOpKind};
use crate::witness::transition::transition;
pub mod mpt;
@ -105,6 +105,54 @@ fn apply_metadata_memops<F: RichField + Extendable<D>, const D: usize>(
state.traces.memory_ops.extend(ops);
}
fn apply_trie_memops<F: RichField + Extendable<D>, const D: usize>(
state: &mut GenerationState<F>,
trie_roots_before: &TrieRoots,
trie_roots_after: &TrieRoots,
) {
let fields = [
(
GlobalMetadata::StateTrieRootDigestBefore,
trie_roots_before.state_root,
),
(
GlobalMetadata::TransactionTrieRootDigestBefore,
trie_roots_before.transactions_root,
),
(
GlobalMetadata::ReceiptTrieRootDigestBefore,
trie_roots_before.receipts_root,
),
(
GlobalMetadata::StateTrieRootDigestAfter,
trie_roots_after.state_root,
),
(
GlobalMetadata::TransactionTrieRootDigestAfter,
trie_roots_after.transactions_root,
),
(
GlobalMetadata::ReceiptTrieRootDigestAfter,
trie_roots_after.receipts_root,
),
];
let channel = MemoryChannel::GeneralPurpose(0);
let ops = fields.map(|(field, hash)| {
let val = hash.into_uint();
MemoryOp::new(
channel,
state.traces.cpu.len(),
MemoryAddress::new(0, Segment::GlobalMetadata, field as usize),
MemoryOpKind::Read,
val,
)
});
state.traces.memory_ops.extend(ops);
}
pub(crate) fn generate_traces<F: RichField + Extendable<D>, const D: usize>(
all_stark: &AllStark<F, D>,
inputs: GenerationInputs,
@ -116,7 +164,6 @@ pub(crate) fn generate_traces<F: RichField + Extendable<D>, const D: usize>(
GenerationOutputs,
)> {
let mut state = GenerationState::<F>::new(inputs.clone(), &KERNEL.code);
apply_metadata_memops(&mut state, &inputs.block_metadata);
generate_bootstrap_kernel::<F>(&mut state);
@ -147,6 +194,8 @@ pub(crate) fn generate_traces<F: RichField + Extendable<D>, const D: usize>(
receipts_root: H256::from_uint(&read_metadata(ReceiptTrieRootDigestAfter)),
};
apply_trie_memops(&mut state, &trie_roots_before, &trie_roots_after);
let public_values = PublicValues {
trie_roots_before,
trie_roots_after,

View File

@ -9,6 +9,7 @@ pub(crate) const KECCAK_RATE_BYTES: usize = 136;
pub(crate) const KECCAK_RATE_U32S: usize = KECCAK_RATE_BYTES / 4;
pub(crate) const KECCAK_CAPACITY_BYTES: usize = 64;
pub(crate) const KECCAK_CAPACITY_U32S: usize = KECCAK_CAPACITY_BYTES / 4;
pub(crate) const KECCAK_DIGEST_BYTES: usize = 32;
#[repr(C)]
#[derive(Eq, PartialEq, Debug)]
@ -53,6 +54,8 @@ pub(crate) struct KeccakSpongeColumnsView<T: Copy> {
/// The entire state (rate + capacity) of the sponge, encoded as 32-bit chunks, after the
/// permutation is applied.
pub updated_state_u32s: [T; KECCAK_WIDTH_U32S],
pub updated_state_bytes: [T; KECCAK_DIGEST_BYTES],
}
// `u8` is guaranteed to have a `size_of` of 1.

View File

@ -25,7 +25,17 @@ use crate::witness::memory::MemoryAddress;
pub(crate) fn ctl_looked_data<F: Field>() -> Vec<Column<F>> {
let cols = KECCAK_SPONGE_COL_MAP;
let outputs = Column::singles(&cols.updated_state_u32s[..8]);
let mut outputs = vec![];
for i in (0..8).rev() {
let cur_col = Column::linear_combination(
cols.updated_state_bytes[i * 4..(i + 1) * 4]
.iter()
.enumerate()
.map(|(j, &c)| (c, F::from_canonical_u64(1 << (24 - 8 * j)))),
);
outputs.push(cur_col);
}
Column::singles([
cols.context,
cols.segment,
@ -137,7 +147,11 @@ pub(crate) fn ctl_looking_memory_filter<F: Field>(i: usize) -> Column<F> {
// - this is a full input block, or
// - this is a final block of length `i` or greater
let cols = KECCAK_SPONGE_COL_MAP;
Column::sum(once(&cols.is_full_input_block).chain(&cols.is_final_input_len[i..]))
if i == KECCAK_RATE_BYTES - 1 {
Column::single(cols.is_full_input_block)
} else {
Column::sum(once(&cols.is_full_input_block).chain(&cols.is_final_input_len[i + 1..]))
}
}
/// CTL filter for looking at XORs in the logic table.
@ -342,6 +356,25 @@ impl<F: RichField + Extendable<D>, const D: usize> KeccakSpongeStark<F, D> {
keccakf_u32s(&mut sponge_state);
row.updated_state_u32s = sponge_state.map(F::from_canonical_u32);
let is_final_block = row.is_final_input_len.iter().copied().sum::<F>() == F::ONE;
if is_final_block {
for (l, &elt) in row.updated_state_u32s[..8].iter().enumerate() {
let mut cur_bytes = vec![F::ZERO; 4];
let mut cur_elt = elt;
for i in 0..4 {
cur_bytes[i] =
F::from_canonical_u32((cur_elt.to_canonical_u64() & 0xFF) as u32);
cur_elt = F::from_canonical_u64(cur_elt.to_canonical_u64() >> 8);
row.updated_state_bytes[l * 4 + i] = cur_bytes[i];
}
let mut s = row.updated_state_bytes[l * 4].to_canonical_u64();
for i in 1..4 {
s += row.updated_state_bytes[l * 4 + i].to_canonical_u64() << (8 * i);
}
assert_eq!(elt, F::from_canonical_u64(s), "not equal");
}
}
}
fn generate_padding_row(&self) -> [F; NUM_KECCAK_SPONGE_COLUMNS] {
@ -448,6 +481,16 @@ impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for KeccakSpongeS
let entry_match = offset - P::from(FE::from_canonical_usize(i));
yield_constr.constraint(is_final_len * entry_match);
}
// Adding constraints for byte columns.
for (l, &elt) in local_values.updated_state_u32s[..8].iter().enumerate() {
let mut s = local_values.updated_state_bytes[l * 4];
for i in 1..4 {
s += local_values.updated_state_bytes[l * 4 + i]
* P::from(FE::from_canonical_usize(1 << (8 * i)));
}
yield_constr.constraint(is_final_block * (s - elt));
}
}
fn eval_ext_circuit(
@ -572,6 +615,21 @@ impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for KeccakSpongeS
let constraint = builder.mul_extension(is_final_len, entry_match);
yield_constr.constraint(builder, constraint);
}
// Adding constraints for byte columns.
for (l, &elt) in local_values.updated_state_u32s[..8].iter().enumerate() {
let mut s = local_values.updated_state_bytes[l * 4];
for i in 1..4 {
s = builder.mul_const_add_extension(
F::from_canonical_usize(1 << (8 * i)),
local_values.updated_state_bytes[l * 4 + i],
s,
);
}
let constraint = builder.sub_extension(s, elt);
let constraint = builder.mul_extension(is_final_block, constraint);
yield_constr.constraint(builder, constraint);
}
}
fn constraint_degree(&self) -> usize {

View File

@ -14,7 +14,9 @@ use plonky2::iop::ext_target::ExtensionTarget;
use plonky2::iop::target::Target;
use plonky2::plonk::circuit_builder::CircuitBuilder;
use plonky2::plonk::config::{AlgebraicHasher, Hasher};
use plonky2::plonk::plonk_common::{reduce_with_powers, reduce_with_powers_ext_circuit};
use plonky2::plonk::plonk_common::{
reduce_with_powers, reduce_with_powers_circuit, reduce_with_powers_ext_circuit,
};
use plonky2::util::reducing::{ReducingFactor, ReducingFactorTarget};
use plonky2::util::serialization::{Buffer, IoResult, Read, Write};
use plonky2_maybe_rayon::*;
@ -83,6 +85,17 @@ impl GrandProductChallenge<Target> {
}
}
impl GrandProductChallenge<Target> {
pub(crate) fn combine_base_circuit<F: RichField + Extendable<D>, const D: usize>(
&self,
builder: &mut CircuitBuilder<F, D>,
terms: &[Target],
) -> Target {
let reduced = reduce_with_powers_circuit(builder, terms, self.beta);
builder.add(reduced, self.gamma)
}
}
/// Like `PermutationChallenge`, but with `num_challenges` copies to boost soundness.
#[derive(Clone, Eq, PartialEq, Debug)]
pub(crate) struct GrandProductChallengeSet<T: Copy + Eq + PartialEq + Debug> {

View File

@ -119,7 +119,8 @@ where
C: GenericConfig<D, F = F>,
{
pub(crate) init_challenger_state: <C::Hasher as Hasher<F>>::Permutation,
pub(crate) proof: StarkProof<F, C, D>,
// TODO: set it back to pub(crate) when cpu trace len is a public input
pub proof: StarkProof<F, C, D>,
}
impl<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, const D: usize> StarkProof<F, C, D> {

View File

@ -129,10 +129,21 @@ impl<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, const D: usize>
ensure!(pis[i].challenger_state_before == pis[i - 1].challenger_state_after);
}
// Dummy values which will make the check fail.
// TODO: Fix this if the code isn't deprecated.
let mut extra_looking_products = Vec::new();
for i in 0..NUM_TABLES {
extra_looking_products.push(Vec::new());
for _ in 0..inner_config.num_challenges {
extra_looking_products[i].push(F::ONE);
}
}
// Verify the CTL checks.
verify_cross_table_lookups::<F, D>(
&cross_table_lookups,
pis.map(|p| p.ctl_zs_last),
extra_looking_products,
inner_config,
)?;

View File

@ -1,6 +1,7 @@
use std::any::type_name;
use anyhow::{ensure, Result};
use ethereum_types::{BigEndianHash, U256};
use plonky2::field::extension::{Extendable, FieldExtension};
use plonky2::field::types::Field;
use plonky2::fri::verifier::verify_fri_proof;
@ -13,14 +14,17 @@ use crate::arithmetic::arithmetic_stark::ArithmeticStark;
use crate::config::StarkConfig;
use crate::constraint_consumer::ConstraintConsumer;
use crate::cpu::cpu_stark::CpuStark;
use crate::cpu::kernel::constants::global_metadata::GlobalMetadata;
use crate::cross_table_lookup::{verify_cross_table_lookups, CtlCheckVars};
use crate::keccak::keccak_stark::KeccakStark;
use crate::keccak_sponge::keccak_sponge_stark::KeccakSpongeStark;
use crate::logic::LogicStark;
use crate::memory::memory_stark::MemoryStark;
use crate::permutation::PermutationCheckVars;
use crate::memory::segments::Segment;
use crate::memory::{NUM_CHANNELS, VALUE_LIMBS};
use crate::permutation::{GrandProductChallenge, PermutationCheckVars};
use crate::proof::{
AllProof, AllProofChallenges, StarkOpeningSet, StarkProof, StarkProofChallenges,
AllProof, AllProofChallenges, PublicValues, StarkOpeningSet, StarkProof, StarkProofChallenges,
};
use crate::stark::Stark;
use crate::vanishing_poly::eval_vanishing_poly;
@ -106,13 +110,159 @@ where
config,
)?;
let public_values = all_proof.public_values;
// Extra products to add to the looked last value.
let mut extra_looking_products = Vec::new();
// KeccakSponge
extra_looking_products.push(Vec::new());
for _ in 0..config.num_challenges {
extra_looking_products[0].push(F::ONE);
}
// Keccak
extra_looking_products.push(Vec::new());
for _ in 0..config.num_challenges {
extra_looking_products[1].push(F::ONE);
}
// Logic
extra_looking_products.push(Vec::new());
for _ in 0..config.num_challenges {
extra_looking_products[2].push(F::ONE);
}
// Memory
extra_looking_products.push(Vec::new());
let cpu_trace_len = 1 << all_proof.stark_proofs[0].proof.recover_degree_bits(config);
for c in 0..config.num_challenges {
extra_looking_products[3].push(get_memory_extra_looking_products(
&public_values,
cpu_trace_len,
ctl_challenges.challenges[c],
));
}
verify_cross_table_lookups::<F, D>(
cross_table_lookups,
all_proof.stark_proofs.map(|p| p.proof.openings.ctl_zs_last),
extra_looking_products,
config,
)
}
/// Computes the extra product to multiply to the looked value. It contains memory operations not in the CPU trace:
/// - block metadata writes before kernel bootstrapping,
/// - public values reads at the end of the execution.
pub(crate) fn get_memory_extra_looking_products<F, const D: usize>(
public_values: &PublicValues,
cpu_trace_len: usize,
challenge: GrandProductChallenge<F>,
) -> F
where
F: RichField + Extendable<D>,
{
let mut prod = F::ONE;
// Add metadata writes.
let block_fields = [
(
GlobalMetadata::BlockBeneficiary,
U256::from_big_endian(&public_values.block_metadata.block_beneficiary.0),
),
(
GlobalMetadata::BlockTimestamp,
public_values.block_metadata.block_timestamp,
),
(
GlobalMetadata::BlockNumber,
public_values.block_metadata.block_number,
),
(
GlobalMetadata::BlockDifficulty,
public_values.block_metadata.block_difficulty,
),
(
GlobalMetadata::BlockGasLimit,
public_values.block_metadata.block_gaslimit,
),
(
GlobalMetadata::BlockChainId,
public_values.block_metadata.block_chain_id,
),
(
GlobalMetadata::BlockBaseFee,
public_values.block_metadata.block_base_fee,
),
];
let is_read = F::ZERO;
let context = F::ZERO;
let segment = F::from_canonical_u32(Segment::GlobalMetadata as u32);
let timestamp = F::ONE;
block_fields.map(|(field, val)| {
let mut row = vec![F::ZERO; 13];
row[0] = is_read;
row[1] = context;
row[2] = segment;
row[3] = F::from_canonical_usize(field as usize);
for j in 0..VALUE_LIMBS {
row[j + 4] = F::from_canonical_u32((val >> (j * 32)).low_u32());
}
row[12] = timestamp;
prod *= challenge.combine(row.iter());
});
// Add public values reads.
let trie_fields = [
(
GlobalMetadata::StateTrieRootDigestBefore,
public_values.trie_roots_before.state_root,
),
(
GlobalMetadata::TransactionTrieRootDigestBefore,
public_values.trie_roots_before.transactions_root,
),
(
GlobalMetadata::ReceiptTrieRootDigestBefore,
public_values.trie_roots_before.receipts_root,
),
(
GlobalMetadata::StateTrieRootDigestAfter,
public_values.trie_roots_after.state_root,
),
(
GlobalMetadata::TransactionTrieRootDigestAfter,
public_values.trie_roots_after.transactions_root,
),
(
GlobalMetadata::ReceiptTrieRootDigestAfter,
public_values.trie_roots_after.receipts_root,
),
];
let is_read = F::ONE;
let timestamp = F::from_canonical_usize(cpu_trace_len * NUM_CHANNELS + 1);
trie_fields.map(|(field, hash)| {
let mut row = vec![F::ZERO; 13];
row[0] = is_read;
row[1] = context;
row[2] = segment;
row[3] = F::from_canonical_usize(field as usize);
let val = hash.into_uint();
for j in 0..VALUE_LIMBS {
row[j + 4] = F::from_canonical_u32((val >> (j * 32)).low_u32());
}
row[12] = timestamp;
prod *= challenge.combine(row.iter());
});
prod
}
pub(crate) fn verify_stark_proof_with_challenges<
F: RichField + Extendable<D>,
C: GenericConfig<D, F = F>,

View File

@ -141,12 +141,7 @@ pub(crate) fn generate_keccak_general<F: Field>(
log::debug!("Hashing {:?}", input);
let hash = keccak(&input);
let val_u64s: [u64; 4] =
core::array::from_fn(|i| u64::from_le_bytes(core::array::from_fn(|j| hash.0[i * 8 + j])));
let hash_int = U256(val_u64s);
let mut log_push = stack_push_log_and_fill(state, &mut row, hash_int)?;
log_push.value = hash.into_uint();
let log_push = stack_push_log_and_fill(state, &mut row, hash.into_uint())?;
keccak_sponge_log(state, base_address, input);

View File

@ -93,11 +93,14 @@ fn test_empty_txn_list() -> anyhow::Result<()> {
receipts_trie_root
);
verify_proof(&all_stark, proof, &config)?;
verify_proof(&all_stark, proof.clone(), &config)?;
let cpu_trace_len = 1 << proof.stark_proofs[0].proof.recover_degree_bits(&config);
let all_circuits = AllRecursiveCircuits::<F, C, D>::new(
&all_stark,
&[16..17, 14..15, 14..15, 9..10, 12..13, 18..19], // Minimal ranges to prove an empty list
&proof.public_values,
cpu_trace_len,
&config,
);