Public memory

This commit is contained in:
Daniel Lubarov 2022-08-25 12:24:22 -07:00
parent 9671c1e535
commit aa87f2c3ba
30 changed files with 509 additions and 369 deletions

View File

@ -53,11 +53,10 @@ impl<F: RichField, const D: usize> ArithmeticStark<F, D> {
impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for ArithmeticStark<F, D> {
const COLUMNS: usize = columns::NUM_ARITH_COLUMNS;
const PUBLIC_INPUTS: usize = 0;
fn eval_packed_generic<FE, P, const D2: usize>(
&self,
vars: StarkEvaluationVars<FE, P, { Self::COLUMNS }, { Self::PUBLIC_INPUTS }>,
vars: StarkEvaluationVars<FE, P, { Self::COLUMNS }>,
yield_constr: &mut ConstraintConsumer<P>,
) where
FE: FieldExtension<D2, BaseField = F>,
@ -72,7 +71,7 @@ impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for ArithmeticSta
fn eval_ext_circuit(
&self,
builder: &mut plonky2::plonk::circuit_builder::CircuitBuilder<F, D>,
vars: StarkEvaluationTargets<D, { Self::COLUMNS }, { Self::PUBLIC_INPUTS }>,
vars: StarkEvaluationTargets<D, { Self::COLUMNS }>,
yield_constr: &mut RecursiveConstraintConsumer<F, D>,
) {
let lv = vars.local_values;

View File

@ -15,7 +15,6 @@ use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer
use crate::cpu::columns::{CpuColumnsView, NUM_CPU_COLUMNS};
use crate::cpu::kernel::aggregator::KERNEL;
use crate::cpu::kernel::keccak_util::keccakf_u32s;
use crate::cpu::public_inputs::NUM_PUBLIC_INPUTS;
use crate::generation::state::GenerationState;
use crate::memory::segments::Segment;
use crate::memory::NUM_CHANNELS;
@ -50,7 +49,7 @@ pub(crate) fn generate_bootstrap_kernel<F: Field>(state: &mut GenerationState<F>
let mut packed_bytes: u32 = 0;
for (addr, byte) in chunk {
let channel = addr % NUM_CHANNELS;
state.set_mem_current(channel, Segment::Code, addr, byte.into());
state.set_mem_cpu_current(channel, Segment::Code, addr, byte.into());
packed_bytes = (packed_bytes << 8) | byte as u32;
}
@ -73,7 +72,7 @@ pub(crate) fn generate_bootstrap_kernel<F: Field>(state: &mut GenerationState<F>
}
pub(crate) fn eval_bootstrap_kernel<F: Field, P: PackedField<Scalar = F>>(
vars: StarkEvaluationVars<F, P, NUM_CPU_COLUMNS, NUM_PUBLIC_INPUTS>,
vars: StarkEvaluationVars<F, P, NUM_CPU_COLUMNS>,
yield_constr: &mut ConstraintConsumer<P>,
) {
let local_values: &CpuColumnsView<_> = vars.local_values.borrow();
@ -109,7 +108,7 @@ pub(crate) fn eval_bootstrap_kernel<F: Field, P: PackedField<Scalar = F>>(
pub(crate) fn eval_bootstrap_kernel_circuit<F: RichField + Extendable<D>, const D: usize>(
builder: &mut CircuitBuilder<F, D>,
vars: StarkEvaluationTargets<D, NUM_CPU_COLUMNS, NUM_PUBLIC_INPUTS>,
vars: StarkEvaluationTargets<D, NUM_CPU_COLUMNS>,
yield_constr: &mut RecursiveConstraintConsumer<F, D>,
) {
let local_values: &CpuColumnsView<_> = vars.local_values.borrow();

View File

@ -99,11 +99,10 @@ impl<F: RichField, const D: usize> CpuStark<F, D> {
impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for CpuStark<F, D> {
const COLUMNS: usize = NUM_CPU_COLUMNS;
const PUBLIC_INPUTS: usize = 0;
fn eval_packed_generic<FE, P, const D2: usize>(
&self,
vars: StarkEvaluationVars<FE, P, { Self::COLUMNS }, { Self::PUBLIC_INPUTS }>,
vars: StarkEvaluationVars<FE, P, { Self::COLUMNS }>,
yield_constr: &mut ConstraintConsumer<P>,
) where
FE: FieldExtension<D2, BaseField = F>,
@ -122,7 +121,7 @@ impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for CpuStark<F, D
fn eval_ext_circuit(
&self,
builder: &mut plonky2::plonk::circuit_builder::CircuitBuilder<F, D>,
vars: StarkEvaluationTargets<D, { Self::COLUMNS }, { Self::PUBLIC_INPUTS }>,
vars: StarkEvaluationTargets<D, { Self::COLUMNS }>,
yield_constr: &mut RecursiveConstraintConsumer<F, D>,
) {
let local_values = vars.local_values.borrow();

View File

@ -21,10 +21,20 @@ pub(crate) enum GlobalMetadata {
/// The number of storage tries involved in this transaction. I.e. the number of values in
/// `StorageTrieAddresses`, `StorageTriePointers` and `StorageTrieCheckpointPointers`.
NumStorageTries = 7,
// The root digests of each Merkle trie before these transactions.
StateTrieRootDigestBefore = 8,
TransactionsTrieRootDigestBefore = 9,
ReceiptsTrieRootDigestBefore = 10,
// The root digests of each Merkle trie after these transactions.
StateTrieRootDigestAfter = 11,
TransactionsTrieRootDigestAfter = 12,
ReceiptsTrieRootDigestAfter = 13,
}
impl GlobalMetadata {
pub(crate) const COUNT: usize = 8;
pub(crate) const COUNT: usize = 14;
pub(crate) fn all() -> [Self; Self::COUNT] {
[
@ -36,6 +46,12 @@ impl GlobalMetadata {
Self::TransactionTrieRoot,
Self::ReceiptTrieRoot,
Self::NumStorageTries,
Self::StateTrieRootDigestBefore,
Self::TransactionsTrieRootDigestBefore,
Self::ReceiptsTrieRootDigestBefore,
Self::StateTrieRootDigestAfter,
Self::TransactionsTrieRootDigestAfter,
Self::ReceiptsTrieRootDigestAfter,
]
}
@ -50,6 +66,20 @@ impl GlobalMetadata {
GlobalMetadata::TransactionTrieRoot => "GLOBAL_METADATA_TXN_TRIE_ROOT",
GlobalMetadata::ReceiptTrieRoot => "GLOBAL_METADATA_RECEIPT_TRIE_ROOT",
GlobalMetadata::NumStorageTries => "GLOBAL_METADATA_NUM_STORAGE_TRIES",
GlobalMetadata::StateTrieRootDigestBefore => "GLOBAL_METADATA_STATE_TRIE_DIGEST_BEFORE",
GlobalMetadata::TransactionsTrieRootDigestBefore => {
"GLOBAL_METADATA_TXNS_TRIE_DIGEST_BEFORE"
}
GlobalMetadata::ReceiptsTrieRootDigestBefore => {
"GLOBAL_METADATA_RECEIPTS_TRIE_DIGEST_BEFORE"
}
GlobalMetadata::StateTrieRootDigestAfter => "GLOBAL_METADATA_STATE_TRIE_DIGEST_AFTER",
GlobalMetadata::TransactionsTrieRootDigestAfter => {
"GLOBAL_METADATA_TXNS_TRIE_DIGEST_AFTER"
}
GlobalMetadata::ReceiptsTrieRootDigestAfter => {
"GLOBAL_METADATA_RECEIPTS_TRIE_DIGEST_AFTER"
}
}
}
}

View File

@ -2,9 +2,9 @@ pub mod aggregator;
pub mod assembler;
mod ast;
mod constants;
mod context_metadata;
pub(crate) mod context_metadata;
mod cost_estimator;
mod global_metadata;
pub(crate) mod global_metadata;
pub(crate) mod keccak_util;
mod opcodes;
mod optimizer;

View File

@ -5,6 +5,5 @@ pub mod cpu_stark;
pub(crate) mod decode;
mod jumps;
pub mod kernel;
pub mod public_inputs;
mod simple_logic;
mod syscalls;

View File

@ -1 +0,0 @@
pub const NUM_PUBLIC_INPUTS: usize = 0; // PIs will be added later.

View File

@ -19,7 +19,7 @@ use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer
use crate::permutation::{
get_grand_product_challenge_set, GrandProductChallenge, GrandProductChallengeSet,
};
use crate::proof::{StarkProofWithPublicInputs, StarkProofWithPublicInputsTarget};
use crate::proof::{StarkProof, StarkProofTarget};
use crate::stark::Stark;
use crate::vars::{StarkEvaluationTargets, StarkEvaluationVars};
@ -337,7 +337,7 @@ impl<'a, F: RichField + Extendable<D>, const D: usize>
CtlCheckVars<'a, F, F::Extension, F::Extension, D>
{
pub(crate) fn from_proofs<C: GenericConfig<D, F = F>>(
proofs: &[StarkProofWithPublicInputs<F, C, D>],
proofs: &[StarkProof<F, C, D>],
cross_table_lookups: &'a [CrossTableLookup<F>],
ctl_challenges: &'a GrandProductChallengeSet<F>,
num_permutation_zs: &[usize],
@ -347,7 +347,7 @@ impl<'a, F: RichField + Extendable<D>, const D: usize>
.iter()
.zip(num_permutation_zs)
.map(|(p, &num_perms)| {
let openings = &p.proof.openings;
let openings = &p.openings;
let ctl_zs = openings.permutation_ctl_zs.iter().skip(num_perms);
let ctl_zs_next = openings.permutation_ctl_zs_next.iter().skip(num_perms);
ctl_zs.zip(ctl_zs_next)
@ -388,7 +388,7 @@ impl<'a, F: RichField + Extendable<D>, const D: usize>
}
pub(crate) fn eval_cross_table_lookup_checks<F, FE, P, C, S, const D: usize, const D2: usize>(
vars: StarkEvaluationVars<FE, P, { S::COLUMNS }, { S::PUBLIC_INPUTS }>,
vars: StarkEvaluationVars<FE, P, { S::COLUMNS }>,
ctl_vars: &[CtlCheckVars<F, FE, P, D2>],
consumer: &mut ConstraintConsumer<P>,
) where
@ -441,7 +441,7 @@ pub struct CtlCheckVarsTarget<'a, F: Field, const D: usize> {
impl<'a, F: Field, const D: usize> CtlCheckVarsTarget<'a, F, D> {
pub(crate) fn from_proofs(
proofs: &[StarkProofWithPublicInputsTarget<D>],
proofs: &[StarkProofTarget<D>],
cross_table_lookups: &'a [CrossTableLookup<F>],
ctl_challenges: &'a GrandProductChallengeSet<Target>,
num_permutation_zs: &[usize],
@ -451,7 +451,7 @@ impl<'a, F: Field, const D: usize> CtlCheckVarsTarget<'a, F, D> {
.iter()
.zip(num_permutation_zs)
.map(|(p, &num_perms)| {
let openings = &p.proof.openings;
let openings = &p.openings;
let ctl_zs = openings.permutation_ctl_zs.iter().skip(num_perms);
let ctl_zs_next = openings.permutation_ctl_zs_next.iter().skip(num_perms);
ctl_zs.zip(ctl_zs_next)
@ -497,7 +497,7 @@ pub(crate) fn eval_cross_table_lookup_checks_circuit<
const D: usize,
>(
builder: &mut CircuitBuilder<F, D>,
vars: StarkEvaluationTargets<D, { S::COLUMNS }, { S::PUBLIC_INPUTS }>,
vars: StarkEvaluationTargets<D, { S::COLUMNS }>,
ctl_vars: &[CtlCheckVarsTarget<F, D>],
consumer: &mut RecursiveConstraintConsumer<F, D>,
) {
@ -559,17 +559,17 @@ pub(crate) fn verify_cross_table_lookups<
const D: usize,
>(
cross_table_lookups: Vec<CrossTableLookup<F>>,
proofs: &[StarkProofWithPublicInputs<F, C, D>],
proofs: &[StarkProof<F, C, D>],
challenges: GrandProductChallengeSet<F>,
config: &StarkConfig,
) -> Result<()> {
let degrees_bits = proofs
.iter()
.map(|p| p.proof.recover_degree_bits(config))
.map(|p| p.recover_degree_bits(config))
.collect::<Vec<_>>();
let mut ctl_zs_openings = proofs
.iter()
.map(|p| p.proof.openings.ctl_zs_last.iter())
.map(|p| p.openings.ctl_zs_last.iter())
.collect::<Vec<_>>();
for (
i,
@ -617,17 +617,17 @@ pub(crate) fn verify_cross_table_lookups_circuit<
>(
builder: &mut CircuitBuilder<F, D>,
cross_table_lookups: Vec<CrossTableLookup<F>>,
proofs: &[StarkProofWithPublicInputsTarget<D>],
proofs: &[StarkProofTarget<D>],
challenges: GrandProductChallengeSet<Target>,
inner_config: &StarkConfig,
) {
let degrees_bits = proofs
.iter()
.map(|p| p.proof.recover_degree_bits(inner_config))
.map(|p| p.recover_degree_bits(inner_config))
.collect::<Vec<_>>();
let mut ctl_zs_openings = proofs
.iter()
.map(|p| p.proof.openings.ctl_zs_last.iter())
.map(|p| p.openings.ctl_zs_last.iter())
.collect::<Vec<_>>();
for (
i,

View File

@ -7,17 +7,20 @@ use plonky2::hash::hash_types::RichField;
use crate::all_stark::AllStark;
use crate::cpu::bootstrap_kernel::generate_bootstrap_kernel;
use crate::cpu::columns::NUM_CPU_COLUMNS;
use crate::cpu::kernel::global_metadata::GlobalMetadata;
use crate::generation::partial_trie::PartialTrie;
use crate::generation::state::GenerationState;
use crate::memory::segments::Segment;
use crate::memory::NUM_CHANNELS;
use crate::proof::{BlockMetadata, PublicValues, TrieRoots};
use crate::util::trace_rows_to_poly_values;
pub(crate) mod memory;
pub mod partial_trie;
pub(crate) mod state;
#[allow(unused)] // TODO: Should be used soon.
pub struct TransactionData {
pub signed_txn: Vec<u8>,
pub struct EvmInputs {
pub signed_txns: Vec<Vec<u8>>,
/// A partial version of the state trie prior to this transaction. It should include all nodes
/// that will be accessed by this transaction.
@ -25,30 +28,55 @@ pub struct TransactionData {
/// A partial version of the transaction trie prior to this transaction. It should include all
/// nodes that will be accessed by this transaction.
pub transaction_trie: PartialTrie,
pub transactions_trie: PartialTrie,
/// A partial version of the receipt trie prior to this transaction. It should include all nodes
/// that will be accessed by this transaction.
pub receipt_trie: PartialTrie,
pub receipts_trie: PartialTrie,
/// A partial version of each storage trie prior to this transaction. It should include all
/// storage tries, and nodes therein, that will be accessed by this transaction.
pub storage_tries: Vec<(Address, PartialTrie)>,
pub block_metadata: BlockMetadata,
}
#[allow(unused)] // TODO: Should be used soon.
pub fn generate_traces<F: RichField + Extendable<D>, const D: usize>(
pub(crate) fn generate_traces<F: RichField + Extendable<D>, const D: usize>(
all_stark: &AllStark<F, D>,
txns: &[TransactionData],
) -> Vec<Vec<PolynomialValues<F>>> {
inputs: EvmInputs,
) -> (Vec<Vec<PolynomialValues<F>>>, PublicValues) {
let mut state = GenerationState::<F>::default();
generate_bootstrap_kernel::<F>(&mut state);
for txn in txns {
for txn in &inputs.signed_txns {
generate_txn(&mut state, txn);
}
// TODO: Pad to a power of two, ending in the `halt` kernel function.
let cpu_rows = state.cpu_rows.len();
let mem_end_timestamp = cpu_rows * NUM_CHANNELS;
let mut read_metadata = |field| {
state.get_mem(
0,
Segment::GlobalMetadata,
field as usize,
mem_end_timestamp,
)
};
let trie_roots_before = TrieRoots {
state_root: read_metadata(GlobalMetadata::StateTrieRootDigestBefore),
transactions_root: read_metadata(GlobalMetadata::TransactionsTrieRootDigestBefore),
receipts_root: read_metadata(GlobalMetadata::ReceiptsTrieRootDigestBefore),
};
let trie_roots_after = TrieRoots {
state_root: read_metadata(GlobalMetadata::StateTrieRootDigestAfter),
transactions_root: read_metadata(GlobalMetadata::TransactionsTrieRootDigestAfter),
receipts_root: read_metadata(GlobalMetadata::ReceiptsTrieRootDigestAfter),
};
let GenerationState {
cpu_rows,
current_cpu_row,
@ -63,9 +91,17 @@ pub fn generate_traces<F: RichField + Extendable<D>, const D: usize>(
let keccak_trace = all_stark.keccak_stark.generate_trace(keccak_inputs);
let logic_trace = all_stark.logic_stark.generate_trace(logic_ops);
let memory_trace = all_stark.memory_stark.generate_trace(memory.log);
vec![cpu_trace, keccak_trace, logic_trace, memory_trace]
let traces = vec![cpu_trace, keccak_trace, logic_trace, memory_trace];
let public_values = PublicValues {
trie_roots_before,
trie_roots_after,
block_metadata: inputs.block_metadata,
};
(traces, public_values)
}
fn generate_txn<F: Field>(_state: &mut GenerationState<F>, _txn: &TransactionData) {
fn generate_txn<F: Field>(_state: &mut GenerationState<F>, _signed_txn: &[u8]) {
// TODO
}

View File

@ -10,6 +10,7 @@ use crate::keccak_memory::keccak_memory_stark::KeccakMemoryOp;
use crate::memory::memory_stark::MemoryOp;
use crate::memory::segments::Segment;
use crate::memory::NUM_CHANNELS;
use crate::util::u256_limbs;
use crate::{keccak, logic};
#[derive(Debug)]
@ -52,28 +53,49 @@ impl<F: Field> GenerationState<F> {
result
}
/// Read some memory within the current execution context, and log the operation.
/// Like `get_mem_cpu`, but reads from the current context specifically.
#[allow(unused)] // TODO: Should be used soon.
pub(crate) fn get_mem_current(
pub(crate) fn get_mem_cpu_current(
&mut self,
channel_index: usize,
segment: Segment,
virt: usize,
) -> U256 {
let context = self.current_context;
self.get_mem(channel_index, context, segment, virt)
self.get_mem_cpu(channel_index, context, segment, virt)
}
/// Read some memory, and log the operation.
pub(crate) fn get_mem(
/// Simulates the CPU reading some memory through the given channel. Besides logging the memory
/// operation, this also generates the associated registers in the current CPU row.
pub(crate) fn get_mem_cpu(
&mut self,
channel_index: usize,
context: usize,
segment: Segment,
virt: usize,
) -> U256 {
let timestamp = self.cpu_rows.len() * NUM_CHANNELS + channel_index;
let value = self.get_mem(context, segment, virt, timestamp);
self.current_cpu_row.mem_channel_used[channel_index] = F::ONE;
let timestamp = self.cpu_rows.len();
self.current_cpu_row.mem_is_read[channel_index] = F::ONE;
self.current_cpu_row.mem_addr_context[channel_index] = F::from_canonical_usize(context);
self.current_cpu_row.mem_addr_segment[channel_index] =
F::from_canonical_usize(segment as usize);
self.current_cpu_row.mem_addr_virtual[channel_index] = F::from_canonical_usize(virt);
self.current_cpu_row.mem_value[channel_index] = u256_limbs(value);
value
}
/// Read some memory, and log the operation.
pub(crate) fn get_mem(
&mut self,
context: usize,
segment: Segment,
virt: usize,
timestamp: usize,
) -> U256 {
let value = self.memory.contexts[context].segments[segment as usize].get(virt);
self.memory.log.push(MemoryOp {
filter: true,
@ -88,7 +110,7 @@ impl<F: Field> GenerationState<F> {
}
/// Write some memory within the current execution context, and log the operation.
pub(crate) fn set_mem_current(
pub(crate) fn set_mem_cpu_current(
&mut self,
channel_index: usize,
segment: Segment,
@ -96,11 +118,11 @@ impl<F: Field> GenerationState<F> {
value: U256,
) {
let context = self.current_context;
self.set_mem(channel_index, context, segment, virt, value);
self.set_mem_cpu(channel_index, context, segment, virt, value);
}
/// Write some memory, and log the operation.
pub(crate) fn set_mem(
pub(crate) fn set_mem_cpu(
&mut self,
channel_index: usize,
context: usize,
@ -108,9 +130,27 @@ impl<F: Field> GenerationState<F> {
virt: usize,
value: U256,
) {
let timestamp = self.cpu_rows.len() * NUM_CHANNELS + channel_index;
self.set_mem(context, segment, virt, value, timestamp);
self.current_cpu_row.mem_channel_used[channel_index] = F::ONE;
let timestamp = self.cpu_rows.len();
let timestamp = timestamp * NUM_CHANNELS + channel_index;
self.current_cpu_row.mem_is_read[channel_index] = F::ZERO; // For clarity; should already be 0.
self.current_cpu_row.mem_addr_context[channel_index] = F::from_canonical_usize(context);
self.current_cpu_row.mem_addr_segment[channel_index] =
F::from_canonical_usize(segment as usize);
self.current_cpu_row.mem_addr_virtual[channel_index] = F::from_canonical_usize(virt);
self.current_cpu_row.mem_value[channel_index] = u256_limbs(value);
}
/// Write some memory, and log the operation.
pub(crate) fn set_mem(
&mut self,
context: usize,
segment: Segment,
virt: usize,
value: U256,
timestamp: usize,
) {
self.memory.log.push(MemoryOp {
filter: true,
timestamp,
@ -133,11 +173,12 @@ impl<F: Field> GenerationState<F> {
virt: usize,
) -> [u64; keccak::keccak_stark::NUM_INPUTS] {
let read_timestamp = self.cpu_rows.len() * NUM_CHANNELS;
let _write_timestamp = read_timestamp + 1;
let input = (0..25)
.map(|i| {
let bytes = [0, 1, 2, 3, 4, 5, 6, 7].map(|j| {
let virt = virt + i * 8 + j;
let byte = self.get_mem(0, context, segment, virt);
let byte = self.get_mem(context, segment, virt, read_timestamp);
debug_assert!(byte.bits() <= 8);
byte.as_u32() as u8
});
@ -155,6 +196,7 @@ impl<F: Field> GenerationState<F> {
input,
output,
});
// TODO: Write output to memory.
output
}

View File

@ -24,9 +24,11 @@ impl<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, const D: usize> A
let mut challenger = Challenger::<F, C::Hasher>::new();
for proof in &self.stark_proofs {
challenger.observe_cap(&proof.proof.trace_cap);
challenger.observe_cap(&proof.trace_cap);
}
// TODO: Observe public values.
let ctl_challenges =
get_grand_product_challenge_set(&mut challenger, config.num_challenges);
@ -58,7 +60,7 @@ impl<const D: usize> AllProofTarget<D> {
let mut challenger = RecursiveChallenger::<F, C::Hasher, D>::new(builder);
for proof in &self.stark_proofs {
challenger.observe_cap(&proof.proof.trace_cap);
challenger.observe_cap(&proof.trace_cap);
}
let ctl_challenges =
@ -85,7 +87,7 @@ impl<const D: usize> AllProofTarget<D> {
}
}
impl<F, C, const D: usize> StarkProofWithPublicInputs<F, C, D>
impl<F, C, const D: usize> StarkProof<F, C, D>
where
F: RichField + Extendable<D>,
C: GenericConfig<D, F = F>,
@ -98,7 +100,7 @@ where
stark_permutation_batch_size: usize,
config: &StarkConfig,
) -> StarkProofChallenges<F, D> {
let degree_bits = self.proof.recover_degree_bits(config);
let degree_bits = self.recover_degree_bits(config);
let StarkProof {
permutation_ctl_zs_cap,
@ -112,7 +114,7 @@ where
..
},
..
} = &self.proof;
} = &self;
let num_challenges = config.num_challenges;
@ -148,7 +150,7 @@ where
}
}
impl<const D: usize> StarkProofWithPublicInputsTarget<D> {
impl<const D: usize> StarkProofTarget<D> {
pub(crate) fn get_challenges<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>>(
&self,
builder: &mut CircuitBuilder<F, D>,
@ -172,7 +174,7 @@ impl<const D: usize> StarkProofWithPublicInputsTarget<D> {
..
},
..
} = &self.proof;
} = &self;
let num_challenges = config.num_challenges;

View File

@ -32,8 +32,6 @@ pub(crate) const NUM_ROUNDS: usize = 24;
/// Number of 64-bit elements in the Keccak permutation input.
pub(crate) const NUM_INPUTS: usize = 25;
pub(crate) const NUM_PUBLIC_INPUTS: usize = 0;
pub fn ctl_data<F: Field>() -> Vec<Column<F>> {
let mut res: Vec<_> = (0..2 * NUM_INPUTS).map(reg_input_limb).collect();
res.extend(Column::singles((0..2 * NUM_INPUTS).map(reg_output_limb)));
@ -228,11 +226,10 @@ impl<F: RichField + Extendable<D>, const D: usize> KeccakStark<F, D> {
impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for KeccakStark<F, D> {
const COLUMNS: usize = NUM_COLUMNS;
const PUBLIC_INPUTS: usize = NUM_PUBLIC_INPUTS;
fn eval_packed_generic<FE, P, const D2: usize>(
&self,
vars: StarkEvaluationVars<FE, P, { Self::COLUMNS }, { Self::PUBLIC_INPUTS }>,
vars: StarkEvaluationVars<FE, P, { Self::COLUMNS }>,
yield_constr: &mut ConstraintConsumer<P>,
) where
FE: FieldExtension<D2, BaseField = F>,
@ -380,7 +377,7 @@ impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for KeccakStark<F
fn eval_ext_circuit(
&self,
builder: &mut plonky2::plonk::circuit_builder::CircuitBuilder<F, D>,
vars: StarkEvaluationTargets<D, { Self::COLUMNS }, { Self::PUBLIC_INPUTS }>,
vars: StarkEvaluationTargets<D, { Self::COLUMNS }>,
yield_constr: &mut RecursiveConstraintConsumer<F, D>,
) {
let two = builder.two();

View File

@ -7,12 +7,12 @@ use plonky2::plonk::circuit_builder::CircuitBuilder;
use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
use crate::keccak::columns::reg_step;
use crate::keccak::columns::NUM_COLUMNS;
use crate::keccak::keccak_stark::{NUM_PUBLIC_INPUTS, NUM_ROUNDS};
use crate::keccak::keccak_stark::NUM_ROUNDS;
use crate::vars::StarkEvaluationTargets;
use crate::vars::StarkEvaluationVars;
pub(crate) fn eval_round_flags<F: Field, P: PackedField<Scalar = F>>(
vars: StarkEvaluationVars<F, P, NUM_COLUMNS, NUM_PUBLIC_INPUTS>,
vars: StarkEvaluationVars<F, P, NUM_COLUMNS>,
yield_constr: &mut ConstraintConsumer<P>,
) {
// Initially, the first step flag should be 1 while the others should be 0.
@ -30,7 +30,7 @@ pub(crate) fn eval_round_flags<F: Field, P: PackedField<Scalar = F>>(
pub(crate) fn eval_round_flags_recursively<F: RichField + Extendable<D>, const D: usize>(
builder: &mut CircuitBuilder<F, D>,
vars: StarkEvaluationTargets<D, NUM_COLUMNS, NUM_PUBLIC_INPUTS>,
vars: StarkEvaluationTargets<D, NUM_COLUMNS>,
yield_constr: &mut RecursiveConstraintConsumer<F, D>,
) {
let one = builder.one_extension();

View File

@ -18,8 +18,6 @@ use crate::util::trace_rows_to_poly_values;
use crate::vars::StarkEvaluationTargets;
use crate::vars::StarkEvaluationVars;
const NUM_PUBLIC_INPUTS: usize = 0;
pub(crate) fn ctl_looked_data<F: Field>() -> Vec<Column<F>> {
Column::singles([COL_CONTEXT, COL_SEGMENT, COL_VIRTUAL, COL_READ_TIMESTAMP]).collect()
}
@ -162,11 +160,10 @@ impl<F: RichField + Extendable<D>, const D: usize> KeccakMemoryStark<F, D> {
impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for KeccakMemoryStark<F, D> {
const COLUMNS: usize = NUM_COLUMNS;
const PUBLIC_INPUTS: usize = NUM_PUBLIC_INPUTS;
fn eval_packed_generic<FE, P, const D2: usize>(
&self,
vars: StarkEvaluationVars<FE, P, { Self::COLUMNS }, { Self::PUBLIC_INPUTS }>,
vars: StarkEvaluationVars<FE, P, { Self::COLUMNS }>,
yield_constr: &mut ConstraintConsumer<P>,
) where
FE: FieldExtension<D2, BaseField = F>,
@ -180,7 +177,7 @@ impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for KeccakMemoryS
fn eval_ext_circuit(
&self,
builder: &mut plonky2::plonk::circuit_builder::CircuitBuilder<F, D>,
vars: StarkEvaluationTargets<D, { Self::COLUMNS }, { Self::PUBLIC_INPUTS }>,
vars: StarkEvaluationTargets<D, { Self::COLUMNS }>,
yield_constr: &mut RecursiveConstraintConsumer<F, D>,
) {
// is_real must be 0 or 1.

View File

@ -140,11 +140,10 @@ impl<F: RichField, const D: usize> LogicStark<F, D> {
impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for LogicStark<F, D> {
const COLUMNS: usize = columns::NUM_COLUMNS;
const PUBLIC_INPUTS: usize = 0;
fn eval_packed_generic<FE, P, const D2: usize>(
&self,
vars: StarkEvaluationVars<FE, P, { Self::COLUMNS }, { Self::PUBLIC_INPUTS }>,
vars: StarkEvaluationVars<FE, P, { Self::COLUMNS }>,
yield_constr: &mut ConstraintConsumer<P>,
) where
FE: FieldExtension<D2, BaseField = F>,
@ -196,7 +195,7 @@ impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for LogicStark<F,
fn eval_ext_circuit(
&self,
builder: &mut plonky2::plonk::circuit_builder::CircuitBuilder<F, D>,
vars: StarkEvaluationTargets<D, { Self::COLUMNS }, { Self::PUBLIC_INPUTS }>,
vars: StarkEvaluationTargets<D, { Self::COLUMNS }>,
yield_constr: &mut RecursiveConstraintConsumer<F, D>,
) {
let lv = &vars.local_values;

View File

@ -10,13 +10,8 @@ use plonky2::plonk::circuit_builder::CircuitBuilder;
use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
use crate::vars::{StarkEvaluationTargets, StarkEvaluationVars};
pub(crate) fn eval_lookups<
F: Field,
P: PackedField<Scalar = F>,
const COLS: usize,
const PUB_INPUTS: usize,
>(
vars: StarkEvaluationVars<F, P, COLS, PUB_INPUTS>,
pub(crate) fn eval_lookups<F: Field, P: PackedField<Scalar = F>, const COLS: usize>(
vars: StarkEvaluationVars<F, P, COLS>,
yield_constr: &mut ConstraintConsumer<P>,
col_permuted_input: usize,
col_permuted_table: usize,
@ -42,10 +37,9 @@ pub(crate) fn eval_lookups_circuit<
F: RichField + Extendable<D>,
const D: usize,
const COLS: usize,
const PUB_INPUTS: usize,
>(
builder: &mut CircuitBuilder<F, D>,
vars: StarkEvaluationTargets<D, COLS, PUB_INPUTS>,
vars: StarkEvaluationTargets<D, COLS>,
yield_constr: &mut RecursiveConstraintConsumer<F, D>,
col_permuted_input: usize,
col_permuted_table: usize,

View File

@ -26,8 +26,6 @@ use crate::permutation::PermutationPair;
use crate::stark::Stark;
use crate::vars::{StarkEvaluationTargets, StarkEvaluationVars};
pub(crate) const NUM_PUBLIC_INPUTS: usize = 0;
pub fn ctl_data<F: Field>() -> Vec<Column<F>> {
let mut res =
Column::singles([IS_READ, ADDR_CONTEXT, ADDR_SEGMENT, ADDR_VIRTUAL]).collect_vec();
@ -218,11 +216,10 @@ impl<F: RichField + Extendable<D>, const D: usize> MemoryStark<F, D> {
impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for MemoryStark<F, D> {
const COLUMNS: usize = NUM_COLUMNS;
const PUBLIC_INPUTS: usize = NUM_PUBLIC_INPUTS;
fn eval_packed_generic<FE, P, const D2: usize>(
&self,
vars: StarkEvaluationVars<FE, P, { Self::COLUMNS }, { Self::PUBLIC_INPUTS }>,
vars: StarkEvaluationVars<FE, P, { Self::COLUMNS }>,
yield_constr: &mut ConstraintConsumer<P>,
) where
FE: FieldExtension<D2, BaseField = F>,
@ -302,7 +299,7 @@ impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for MemoryStark<F
fn eval_ext_circuit(
&self,
builder: &mut plonky2::plonk::circuit_builder::CircuitBuilder<F, D>,
vars: StarkEvaluationTargets<D, { Self::COLUMNS }, { Self::PUBLIC_INPUTS }>,
vars: StarkEvaluationTargets<D, { Self::COLUMNS }>,
yield_constr: &mut RecursiveConstraintConsumer<F, D>,
) {
let one = builder.one_extension();

View File

@ -298,7 +298,7 @@ where
pub(crate) fn eval_permutation_checks<F, FE, P, C, S, const D: usize, const D2: usize>(
stark: &S,
config: &StarkConfig,
vars: StarkEvaluationVars<FE, P, { S::COLUMNS }, { S::PUBLIC_INPUTS }>,
vars: StarkEvaluationVars<FE, P, { S::COLUMNS }>,
permutation_vars: PermutationCheckVars<F, FE, P, D2>,
consumer: &mut ConstraintConsumer<P>,
) where
@ -365,14 +365,13 @@ pub(crate) fn eval_permutation_checks_circuit<F, S, const D: usize>(
builder: &mut CircuitBuilder<F, D>,
stark: &S,
config: &StarkConfig,
vars: StarkEvaluationTargets<D, { S::COLUMNS }, { S::PUBLIC_INPUTS }>,
vars: StarkEvaluationTargets<D, { S::COLUMNS }>,
permutation_data: PermutationCheckDataTarget<D>,
consumer: &mut RecursiveConstraintConsumer<F, D>,
) where
F: RichField + Extendable<D>,
S: Stark<F, D>,
[(); S::COLUMNS]:,
[(); S::PUBLIC_INPUTS]:,
{
let PermutationCheckDataTarget {
local_zs,

View File

@ -1,3 +1,4 @@
use ethereum_types::{Address, U256};
use itertools::Itertools;
use maybe_rayon::*;
use plonky2::field::extension::{Extendable, FieldExtension};
@ -17,21 +18,22 @@ use crate::permutation::GrandProductChallengeSet;
#[derive(Debug, Clone)]
pub struct AllProof<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, const D: usize> {
pub stark_proofs: Vec<StarkProofWithPublicInputs<F, C, D>>,
pub stark_proofs: Vec<StarkProof<F, C, D>>,
pub public_values: PublicValues,
}
impl<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, const D: usize> AllProof<F, C, D> {
pub fn degree_bits(&self, config: &StarkConfig) -> Vec<usize> {
self.stark_proofs
.iter()
.map(|proof| proof.proof.recover_degree_bits(config))
.map(|proof| proof.recover_degree_bits(config))
.collect()
}
pub fn nums_ctl_zs(&self) -> Vec<usize> {
self.stark_proofs
.iter()
.map(|proof| proof.proof.openings.ctl_zs_last.len())
.map(|proof| proof.openings.ctl_zs_last.len())
.collect()
}
}
@ -42,7 +44,54 @@ pub(crate) struct AllProofChallenges<F: RichField + Extendable<D>, const D: usiz
}
pub struct AllProofTarget<const D: usize> {
pub stark_proofs: Vec<StarkProofWithPublicInputsTarget<D>>,
pub stark_proofs: Vec<StarkProofTarget<D>>,
pub public_values: PublicValuesTarget,
}
#[derive(Debug, Clone)]
pub struct PublicValues {
pub trie_roots_before: TrieRoots,
pub trie_roots_after: TrieRoots,
pub block_metadata: BlockMetadata,
}
#[derive(Debug, Clone)]
pub struct TrieRoots {
pub state_root: U256,
pub transactions_root: U256,
pub receipts_root: U256,
}
#[derive(Debug, Clone)]
pub struct BlockMetadata {
pub block_coinbase: Address,
pub block_timestamp: U256,
pub block_number: U256,
pub block_difficulty: U256,
pub block_gaslimit: U256,
pub block_chain_id: U256,
}
/// Note: All the larger integers are encoded with 32-bit limbs in little-endian order.
pub struct PublicValuesTarget {
pub trie_roots_before: TrieRootsTarget,
pub trie_roots_after: TrieRootsTarget,
pub block_metadata: BlockMetadataTarget,
}
pub struct TrieRootsTarget {
pub state_root: [Target; 8],
pub transactions_root: [Target; 8],
pub receipts_root: [Target; 8],
}
pub struct BlockMetadataTarget {
pub block_coinbase: [Target; 5],
pub block_timestamp: Target,
pub block_number: Target,
pub block_difficulty: Target,
pub block_gaslimit: Target,
pub block_chain_id: Target,
}
pub(crate) struct AllProofChallengesTarget<const D: usize> {
@ -96,22 +145,6 @@ impl<const D: usize> StarkProofTarget<D> {
}
}
#[derive(Debug, Clone)]
pub struct StarkProofWithPublicInputs<
F: RichField + Extendable<D>,
C: GenericConfig<D, F = F>,
const D: usize,
> {
pub proof: StarkProof<F, C, D>,
// TODO: Maybe make it generic over a `S: Stark` and replace with `[F; S::PUBLIC_INPUTS]`.
pub public_inputs: Vec<F>,
}
pub struct StarkProofWithPublicInputsTarget<const D: usize> {
pub proof: StarkProofTarget<D>,
pub public_inputs: Vec<Target>,
}
pub(crate) struct StarkProofChallenges<F: RichField + Extendable<D>, const D: usize> {
/// Randomness used in any permutation arguments.
pub permutation_challenge_sets: Option<Vec<GrandProductChallengeSet<F>>>,

View File

@ -22,6 +22,7 @@ use crate::config::StarkConfig;
use crate::constraint_consumer::ConstraintConsumer;
use crate::cpu::cpu_stark::CpuStark;
use crate::cross_table_lookup::{cross_table_lookup_data, CtlCheckVars, CtlData};
use crate::generation::{generate_traces, EvmInputs};
use crate::keccak::keccak_stark::KeccakStark;
use crate::keccak_memory::keccak_memory_stark::KeccakMemoryStark;
use crate::logic::LogicStark;
@ -30,17 +31,38 @@ use crate::permutation::PermutationCheckVars;
use crate::permutation::{
compute_permutation_z_polys, get_n_grand_product_challenge_sets, GrandProductChallengeSet,
};
use crate::proof::{AllProof, StarkOpeningSet, StarkProof, StarkProofWithPublicInputs};
use crate::proof::{AllProof, PublicValues, StarkOpeningSet, StarkProof};
use crate::stark::Stark;
use crate::vanishing_poly::eval_vanishing_poly;
use crate::vars::StarkEvaluationVars;
/// Compute all STARK proofs.
/// Generate traces, then create all STARK proofs.
pub fn prove<F, C, const D: usize>(
all_stark: &AllStark<F, D>,
config: &StarkConfig,
trace_poly_values: Vec<Vec<PolynomialValues<F>>>,
public_inputs: Vec<Vec<F>>,
inputs: EvmInputs,
timing: &mut TimingTree,
) -> Result<AllProof<F, C, D>>
where
F: RichField + Extendable<D>,
C: GenericConfig<D, F = F>,
[(); C::Hasher::HASH_SIZE]:,
[(); CpuStark::<F, D>::COLUMNS]:,
[(); KeccakStark::<F, D>::COLUMNS]:,
[(); KeccakMemoryStark::<F, D>::COLUMNS]:,
[(); LogicStark::<F, D>::COLUMNS]:,
[(); MemoryStark::<F, D>::COLUMNS]:,
{
let (traces, public_values) = generate_traces(all_stark, inputs);
prove_with_traces(all_stark, config, traces, public_values, timing)
}
/// Compute all STARK proofs.
pub(crate) fn prove_with_traces<F, C, const D: usize>(
all_stark: &AllStark<F, D>,
config: &StarkConfig,
trace_poly_values: Vec<Vec<PolynomialValues<F>>>,
public_values: PublicValues,
timing: &mut TimingTree,
) -> Result<AllProof<F, C, D>>
where
@ -48,19 +70,13 @@ where
C: GenericConfig<D, F = F>,
[(); C::Hasher::HASH_SIZE]:,
[(); CpuStark::<F, D>::COLUMNS]:,
[(); CpuStark::<F, D>::PUBLIC_INPUTS]:,
[(); KeccakStark::<F, D>::COLUMNS]:,
[(); KeccakStark::<F, D>::PUBLIC_INPUTS]:,
[(); KeccakMemoryStark::<F, D>::COLUMNS]:,
[(); KeccakMemoryStark::<F, D>::PUBLIC_INPUTS]:,
[(); LogicStark::<F, D>::COLUMNS]:,
[(); LogicStark::<F, D>::PUBLIC_INPUTS]:,
[(); MemoryStark::<F, D>::COLUMNS]:,
[(); MemoryStark::<F, D>::PUBLIC_INPUTS]:,
{
let num_starks = Table::num_tables();
debug_assert_eq!(num_starks, trace_poly_values.len());
debug_assert_eq!(num_starks, public_inputs.len());
let rate_bits = config.fri_config.rate_bits;
let cap_height = config.fri_config.cap_height;
@ -107,10 +123,6 @@ where
&trace_poly_values[Table::Cpu as usize],
&trace_commitments[Table::Cpu as usize],
&ctl_data_per_table[Table::Cpu as usize],
public_inputs[Table::Cpu as usize]
.clone()
.try_into()
.unwrap(),
&mut challenger,
timing,
)?;
@ -120,10 +132,6 @@ where
&trace_poly_values[Table::Keccak as usize],
&trace_commitments[Table::Keccak as usize],
&ctl_data_per_table[Table::Keccak as usize],
public_inputs[Table::Keccak as usize]
.clone()
.try_into()
.unwrap(),
&mut challenger,
timing,
)?;
@ -133,10 +141,6 @@ where
&trace_poly_values[Table::KeccakMemory as usize],
&trace_commitments[Table::KeccakMemory as usize],
&ctl_data_per_table[Table::KeccakMemory as usize],
public_inputs[Table::KeccakMemory as usize]
.clone()
.try_into()
.unwrap(),
&mut challenger,
timing,
)?;
@ -146,10 +150,6 @@ where
&trace_poly_values[Table::Logic as usize],
&trace_commitments[Table::Logic as usize],
&ctl_data_per_table[Table::Logic as usize],
public_inputs[Table::Logic as usize]
.clone()
.try_into()
.unwrap(),
&mut challenger,
timing,
)?;
@ -159,10 +159,6 @@ where
&trace_poly_values[Table::Memory as usize],
&trace_commitments[Table::Memory as usize],
&ctl_data_per_table[Table::Memory as usize],
public_inputs[Table::Memory as usize]
.clone()
.try_into()
.unwrap(),
&mut challenger,
timing,
)?;
@ -176,7 +172,10 @@ where
];
debug_assert_eq!(stark_proofs.len(), num_starks);
Ok(AllProof { stark_proofs })
Ok(AllProof {
stark_proofs,
public_values,
})
}
/// Compute proof for a single STARK table.
@ -186,17 +185,15 @@ fn prove_single_table<F, C, S, const D: usize>(
trace_poly_values: &[PolynomialValues<F>],
trace_commitment: &PolynomialBatch<F, C, D>,
ctl_data: &CtlData<F>,
public_inputs: [F; S::PUBLIC_INPUTS],
challenger: &mut Challenger<F, C::Hasher>,
timing: &mut TimingTree,
) -> Result<StarkProofWithPublicInputs<F, C, D>>
) -> Result<StarkProof<F, C, D>>
where
F: RichField + Extendable<D>,
C: GenericConfig<D, F = F>,
S: Stark<F, D>,
[(); C::Hasher::HASH_SIZE]:,
[(); S::COLUMNS]:,
[(); S::PUBLIC_INPUTS]:,
{
let degree = trace_poly_values[0].len();
let degree_bits = log2_strict(degree);
@ -250,7 +247,6 @@ where
&permutation_ctl_zs_commitment,
permutation_challenges.as_ref(),
ctl_data,
public_inputs,
alphas.clone(),
degree_bits,
num_permutation_zs,
@ -263,7 +259,6 @@ where
&permutation_ctl_zs_commitment,
permutation_challenges.as_ref(),
ctl_data,
public_inputs,
alphas,
degree_bits,
num_permutation_zs,
@ -332,17 +327,13 @@ where
timing,
)
);
let proof = StarkProof {
Ok(StarkProof {
trace_cap: trace_commitment.merkle_tree.cap.clone(),
permutation_ctl_zs_cap,
quotient_polys_cap,
openings,
opening_proof,
};
Ok(StarkProofWithPublicInputs {
proof,
public_inputs: public_inputs.to_vec(),
})
}
@ -354,7 +345,6 @@ fn compute_quotient_polys<'a, F, P, C, S, const D: usize>(
permutation_ctl_zs_commitment: &'a PolynomialBatch<F, C, D>,
permutation_challenges: Option<&'a Vec<GrandProductChallengeSet<F>>>,
ctl_data: &CtlData<F>,
public_inputs: [F; S::PUBLIC_INPUTS],
alphas: Vec<F>,
degree_bits: usize,
num_permutation_zs: usize,
@ -366,7 +356,6 @@ where
C: GenericConfig<D, F = F>,
S: Stark<F, D>,
[(); S::COLUMNS]:,
[(); S::PUBLIC_INPUTS]:,
{
let degree = 1 << degree_bits;
let rate_bits = config.fri_config.rate_bits;
@ -428,7 +417,6 @@ where
let vars = StarkEvaluationVars {
local_values: &get_trace_values_packed(i_start),
next_values: &get_trace_values_packed(i_next_start),
public_inputs: &public_inputs,
};
let permutation_check_vars =
permutation_challenges.map(|permutation_challenge_sets| PermutationCheckVars {
@ -494,7 +482,6 @@ fn check_constraints<'a, F, C, S, const D: usize>(
permutation_ctl_zs_commitment: &'a PolynomialBatch<F, C, D>,
permutation_challenges: Option<&'a Vec<GrandProductChallengeSet<F>>>,
ctl_data: &CtlData<F>,
public_inputs: [F; S::PUBLIC_INPUTS],
alphas: Vec<F>,
degree_bits: usize,
num_permutation_zs: usize,
@ -504,7 +491,6 @@ fn check_constraints<'a, F, C, S, const D: usize>(
C: GenericConfig<D, F = F>,
S: Stark<F, D>,
[(); S::COLUMNS]:,
[(); S::PUBLIC_INPUTS]:,
{
let degree = 1 << degree_bits;
let rate_bits = 0; // Set this to higher value to check constraint degree.
@ -553,7 +539,6 @@ fn check_constraints<'a, F, C, S, const D: usize>(
let vars = StarkEvaluationVars {
local_values: trace_subgroup_evals[i].as_slice().try_into().unwrap(),
next_values: trace_subgroup_evals[i_next].as_slice().try_into().unwrap(),
public_inputs: &public_inputs,
};
let permutation_check_vars =
permutation_challenges.map(|permutation_challenge_sets| PermutationCheckVars {

View File

@ -22,11 +22,12 @@ use crate::logic::LogicStark;
use crate::memory::memory_stark::MemoryStark;
use crate::permutation::PermutationCheckDataTarget;
use crate::proof::{
AllProof, AllProofChallengesTarget, AllProofTarget, StarkOpeningSetTarget, StarkProof,
StarkProofChallengesTarget, StarkProofTarget, StarkProofWithPublicInputs,
StarkProofWithPublicInputsTarget,
AllProof, AllProofChallengesTarget, AllProofTarget, BlockMetadata, BlockMetadataTarget,
PublicValues, PublicValuesTarget, StarkOpeningSetTarget, StarkProof,
StarkProofChallengesTarget, StarkProofTarget, TrieRoots, TrieRootsTarget,
};
use crate::stark::Stark;
use crate::util::{h160_limbs, u256_limbs};
use crate::vanishing_poly::eval_vanishing_poly_circuit;
use crate::vars::StarkEvaluationTargets;
@ -41,15 +42,10 @@ pub fn verify_proof_circuit<
inner_config: &StarkConfig,
) where
[(); CpuStark::<F, D>::COLUMNS]:,
[(); CpuStark::<F, D>::PUBLIC_INPUTS]:,
[(); KeccakStark::<F, D>::COLUMNS]:,
[(); KeccakStark::<F, D>::PUBLIC_INPUTS]:,
[(); KeccakMemoryStark::<F, D>::COLUMNS]:,
[(); KeccakMemoryStark::<F, D>::PUBLIC_INPUTS]:,
[(); LogicStark::<F, D>::COLUMNS]:,
[(); LogicStark::<F, D>::PUBLIC_INPUTS]:,
[(); MemoryStark::<F, D>::COLUMNS]:,
[(); MemoryStark::<F, D>::PUBLIC_INPUTS]:,
C::Hasher: AlgebraicHasher<F>,
{
let AllProofChallengesTarget {
@ -158,23 +154,17 @@ fn verify_stark_proof_with_challenges_circuit<
>(
builder: &mut CircuitBuilder<F, D>,
stark: S,
proof_with_pis: &StarkProofWithPublicInputsTarget<D>,
proof: &StarkProofTarget<D>,
challenges: &StarkProofChallengesTarget<D>,
ctl_vars: &[CtlCheckVarsTarget<F, D>],
inner_config: &StarkConfig,
) where
C::Hasher: AlgebraicHasher<F>,
[(); S::COLUMNS]:,
[(); S::PUBLIC_INPUTS]:,
{
let zero = builder.zero();
let one = builder.one_extension();
let StarkProofWithPublicInputsTarget {
proof,
public_inputs,
} = proof_with_pis;
assert_eq!(public_inputs.len(), S::PUBLIC_INPUTS);
let StarkOpeningSetTarget {
local_values,
next_values,
@ -186,12 +176,6 @@ fn verify_stark_proof_with_challenges_circuit<
let vars = StarkEvaluationTargets {
local_values: &local_values.to_vec().try_into().unwrap(),
next_values: &next_values.to_vec().try_into().unwrap(),
public_inputs: &public_inputs
.iter()
.map(|&t| builder.convert_to_ext(t))
.collect::<Vec<_>>()
.try_into()
.unwrap(),
};
let degree_bits = proof.recover_degree_bits(inner_config);
@ -297,99 +281,93 @@ pub fn add_virtual_all_proof<F: RichField + Extendable<D>, const D: usize>(
nums_ctl_zs: &[usize],
) -> AllProofTarget<D> {
let stark_proofs = vec![
{
let proof = add_virtual_stark_proof(
builder,
all_stark.cpu_stark,
config,
degree_bits[Table::Cpu as usize],
nums_ctl_zs[Table::Cpu as usize],
);
let public_inputs = builder.add_virtual_targets(CpuStark::<F, D>::PUBLIC_INPUTS);
StarkProofWithPublicInputsTarget {
proof,
public_inputs,
}
},
{
let proof = add_virtual_stark_proof(
builder,
all_stark.keccak_stark,
config,
degree_bits[Table::Keccak as usize],
nums_ctl_zs[Table::Keccak as usize],
);
let public_inputs = builder.add_virtual_targets(KeccakStark::<F, D>::PUBLIC_INPUTS);
StarkProofWithPublicInputsTarget {
proof,
public_inputs,
}
},
{
let proof = add_virtual_stark_proof(
builder,
all_stark.keccak_memory_stark,
config,
degree_bits[Table::KeccakMemory as usize],
nums_ctl_zs[Table::KeccakMemory as usize],
);
let public_inputs =
builder.add_virtual_targets(KeccakMemoryStark::<F, D>::PUBLIC_INPUTS);
StarkProofWithPublicInputsTarget {
proof,
public_inputs,
}
},
{
let proof = add_virtual_stark_proof(
builder,
all_stark.logic_stark,
config,
degree_bits[Table::Logic as usize],
nums_ctl_zs[Table::Logic as usize],
);
let public_inputs = builder.add_virtual_targets(LogicStark::<F, D>::PUBLIC_INPUTS);
StarkProofWithPublicInputsTarget {
proof,
public_inputs,
}
},
{
let proof = add_virtual_stark_proof(
builder,
all_stark.memory_stark,
config,
degree_bits[Table::Memory as usize],
nums_ctl_zs[Table::Memory as usize],
);
let public_inputs = builder.add_virtual_targets(MemoryStark::<F, D>::PUBLIC_INPUTS);
StarkProofWithPublicInputsTarget {
proof,
public_inputs,
}
},
add_virtual_stark_proof(
builder,
all_stark.cpu_stark,
config,
degree_bits[Table::Cpu as usize],
nums_ctl_zs[Table::Cpu as usize],
),
add_virtual_stark_proof(
builder,
all_stark.keccak_stark,
config,
degree_bits[Table::Keccak as usize],
nums_ctl_zs[Table::Keccak as usize],
),
add_virtual_stark_proof(
builder,
all_stark.keccak_memory_stark,
config,
degree_bits[Table::KeccakMemory as usize],
nums_ctl_zs[Table::KeccakMemory as usize],
),
add_virtual_stark_proof(
builder,
all_stark.logic_stark,
config,
degree_bits[Table::Logic as usize],
nums_ctl_zs[Table::Logic as usize],
),
add_virtual_stark_proof(
builder,
all_stark.memory_stark,
config,
degree_bits[Table::Memory as usize],
nums_ctl_zs[Table::Memory as usize],
),
];
assert_eq!(stark_proofs.len(), Table::num_tables());
AllProofTarget { stark_proofs }
let public_values = add_virtual_public_values(builder);
AllProofTarget {
stark_proofs,
public_values,
}
}
pub fn add_virtual_stark_proof_with_pis<
F: RichField + Extendable<D>,
S: Stark<F, D>,
const D: usize,
>(
pub fn add_virtual_public_values<F: RichField + Extendable<D>, const D: usize>(
builder: &mut CircuitBuilder<F, D>,
stark: S,
config: &StarkConfig,
degree_bits: usize,
num_ctl_zs: usize,
) -> StarkProofWithPublicInputsTarget<D> {
let proof = add_virtual_stark_proof::<F, S, D>(builder, stark, config, degree_bits, num_ctl_zs);
let public_inputs = builder.add_virtual_targets(S::PUBLIC_INPUTS);
StarkProofWithPublicInputsTarget {
proof,
public_inputs,
) -> PublicValuesTarget {
let trie_roots_before = add_virtual_trie_roots(builder);
let trie_roots_after = add_virtual_trie_roots(builder);
let block_metadata = add_virtual_block_metadata(builder);
PublicValuesTarget {
trie_roots_before,
trie_roots_after,
block_metadata,
}
}
pub fn add_virtual_trie_roots<F: RichField + Extendable<D>, const D: usize>(
builder: &mut CircuitBuilder<F, D>,
) -> TrieRootsTarget {
let state_root = builder.add_virtual_target_arr();
let transactions_root = builder.add_virtual_target_arr();
let receipts_root = builder.add_virtual_target_arr();
TrieRootsTarget {
state_root,
transactions_root,
receipts_root,
}
}
pub fn add_virtual_block_metadata<F: RichField + Extendable<D>, const D: usize>(
builder: &mut CircuitBuilder<F, D>,
) -> BlockMetadataTarget {
let block_coinbase = builder.add_virtual_target_arr();
let block_timestamp = builder.add_virtual_target();
let block_number = builder.add_virtual_target();
let block_difficulty = builder.add_virtual_target();
let block_gaslimit = builder.add_virtual_target();
let block_chain_id = builder.add_virtual_target();
BlockMetadataTarget {
block_coinbase,
block_timestamp,
block_number,
block_difficulty,
block_gaslimit,
block_chain_id,
}
}
@ -455,35 +433,13 @@ pub fn set_all_proof_target<F, C: GenericConfig<D, F = F>, W, const D: usize>(
.iter()
.zip_eq(&all_proof.stark_proofs)
{
set_stark_proof_with_pis_target(witness, pt, p, zero);
set_stark_proof_target(witness, pt, p, zero);
}
}
pub fn set_stark_proof_with_pis_target<F, C: GenericConfig<D, F = F>, W, const D: usize>(
witness: &mut W,
stark_proof_with_pis_target: &StarkProofWithPublicInputsTarget<D>,
stark_proof_with_pis: &StarkProofWithPublicInputs<F, C, D>,
zero: Target,
) where
F: RichField + Extendable<D>,
C::Hasher: AlgebraicHasher<F>,
W: Witness<F>,
{
let StarkProofWithPublicInputs {
proof,
public_inputs,
} = stark_proof_with_pis;
let StarkProofWithPublicInputsTarget {
proof: pt,
public_inputs: pi_targets,
} = stark_proof_with_pis_target;
// Set public inputs.
for (&pi_t, &pi) in pi_targets.iter().zip_eq(public_inputs) {
witness.set_target(pi_t, pi);
}
set_stark_proof_target(witness, pt, proof, zero);
set_public_value_targets(
witness,
&all_proof_target.public_values,
&all_proof.public_values,
)
}
pub fn set_stark_proof_target<F, C: GenericConfig<D, F = F>, W, const D: usize>(
@ -511,3 +467,84 @@ pub fn set_stark_proof_target<F, C: GenericConfig<D, F = F>, W, const D: usize>(
set_fri_proof_target(witness, &proof_target.opening_proof, &proof.opening_proof);
}
pub fn set_public_value_targets<F, W, const D: usize>(
witness: &mut W,
public_values_target: &PublicValuesTarget,
public_values: &PublicValues,
) where
F: RichField + Extendable<D>,
W: Witness<F>,
{
set_trie_roots_target(
witness,
&public_values_target.trie_roots_before,
&public_values.trie_roots_before,
);
set_trie_roots_target(
witness,
&public_values_target.trie_roots_after,
&public_values.trie_roots_after,
);
set_block_metadata_target(
witness,
&public_values_target.block_metadata,
&public_values.block_metadata,
);
}
pub fn set_trie_roots_target<F, W, const D: usize>(
witness: &mut W,
trie_roots_target: &TrieRootsTarget,
trie_roots: &TrieRoots,
) where
F: RichField + Extendable<D>,
W: Witness<F>,
{
witness.set_target_arr(
trie_roots_target.state_root,
u256_limbs(trie_roots.state_root),
);
witness.set_target_arr(
trie_roots_target.transactions_root,
u256_limbs(trie_roots.transactions_root),
);
witness.set_target_arr(
trie_roots_target.receipts_root,
u256_limbs(trie_roots.receipts_root),
);
}
pub fn set_block_metadata_target<F, W, const D: usize>(
witness: &mut W,
block_metadata_target: &BlockMetadataTarget,
block_metadata: &BlockMetadata,
) where
F: RichField + Extendable<D>,
W: Witness<F>,
{
witness.set_target_arr(
block_metadata_target.block_coinbase,
h160_limbs(block_metadata.block_coinbase),
);
witness.set_target(
block_metadata_target.block_timestamp,
F::from_canonical_u64(block_metadata.block_timestamp.as_u64()),
);
witness.set_target(
block_metadata_target.block_number,
F::from_canonical_u64(block_metadata.block_number.as_u64()),
);
witness.set_target(
block_metadata_target.block_difficulty,
F::from_canonical_u64(block_metadata.block_difficulty.as_u64()),
);
witness.set_target(
block_metadata_target.block_gaslimit,
F::from_canonical_u64(block_metadata.block_gaslimit.as_u64()),
);
witness.set_target(
block_metadata_target.block_chain_id,
F::from_canonical_u64(block_metadata.block_chain_id.as_u64()),
);
}

View File

@ -20,8 +20,6 @@ use crate::vars::StarkEvaluationVars;
pub trait Stark<F: RichField + Extendable<D>, const D: usize>: Sync {
/// The total number of columns in the trace.
const COLUMNS: usize;
/// The number of public inputs.
const PUBLIC_INPUTS: usize;
/// Evaluate constraints at a vector of points.
///
@ -31,7 +29,7 @@ pub trait Stark<F: RichField + Extendable<D>, const D: usize>: Sync {
/// constraints over `F`.
fn eval_packed_generic<FE, P, const D2: usize>(
&self,
vars: StarkEvaluationVars<FE, P, { Self::COLUMNS }, { Self::PUBLIC_INPUTS }>,
vars: StarkEvaluationVars<FE, P, { Self::COLUMNS }>,
yield_constr: &mut ConstraintConsumer<P>,
) where
FE: FieldExtension<D2, BaseField = F>,
@ -40,7 +38,7 @@ pub trait Stark<F: RichField + Extendable<D>, const D: usize>: Sync {
/// Evaluate constraints at a vector of points from the base field `F`.
fn eval_packed_base<P: PackedField<Scalar = F>>(
&self,
vars: StarkEvaluationVars<F, P, { Self::COLUMNS }, { Self::PUBLIC_INPUTS }>,
vars: StarkEvaluationVars<F, P, { Self::COLUMNS }>,
yield_constr: &mut ConstraintConsumer<P>,
) {
self.eval_packed_generic(vars, yield_constr)
@ -49,12 +47,7 @@ pub trait Stark<F: RichField + Extendable<D>, const D: usize>: Sync {
/// Evaluate constraints at a single point from the degree `D` extension field.
fn eval_ext(
&self,
vars: StarkEvaluationVars<
F::Extension,
F::Extension,
{ Self::COLUMNS },
{ Self::PUBLIC_INPUTS },
>,
vars: StarkEvaluationVars<F::Extension, F::Extension, { Self::COLUMNS }>,
yield_constr: &mut ConstraintConsumer<F::Extension>,
) {
self.eval_packed_generic(vars, yield_constr)
@ -67,7 +60,7 @@ pub trait Stark<F: RichField + Extendable<D>, const D: usize>: Sync {
fn eval_ext_circuit(
&self,
builder: &mut CircuitBuilder<F, D>,
vars: StarkEvaluationTargets<D, { Self::COLUMNS }, { Self::PUBLIC_INPUTS }>,
vars: StarkEvaluationTargets<D, { Self::COLUMNS }>,
yield_constr: &mut RecursiveConstraintConsumer<F, D>,
);

View File

@ -26,13 +26,11 @@ pub fn test_stark_low_degree<F: RichField + Extendable<D>, S: Stark<F, D>, const
) -> Result<()>
where
[(); S::COLUMNS]:,
[(); S::PUBLIC_INPUTS]:,
{
let rate_bits = log2_ceil(stark.constraint_degree() + 1);
let trace_ldes = random_low_degree_matrix::<F>(S::COLUMNS, rate_bits);
let size = trace_ldes.len();
let public_inputs = F::rand_arr::<{ S::PUBLIC_INPUTS }>();
let lagrange_first = PolynomialValues::selector(WITNESS_SIZE, 0).lde(rate_bits);
let lagrange_last = PolynomialValues::selector(WITNESS_SIZE, WITNESS_SIZE - 1).lde(rate_bits);
@ -49,7 +47,6 @@ where
.clone()
.try_into()
.unwrap(),
public_inputs: &public_inputs,
};
let mut consumer = ConstraintConsumer::<F>::new(
@ -89,14 +86,12 @@ pub fn test_stark_circuit_constraints<
) -> Result<()>
where
[(); S::COLUMNS]:,
[(); S::PUBLIC_INPUTS]:,
[(); C::Hasher::HASH_SIZE]:,
{
// Compute native constraint evaluation on random values.
let vars = StarkEvaluationVars {
local_values: &F::Extension::rand_arr::<{ S::COLUMNS }>(),
next_values: &F::Extension::rand_arr::<{ S::COLUMNS }>(),
public_inputs: &F::Extension::rand_arr::<{ S::PUBLIC_INPUTS }>(),
};
let alphas = F::rand_vec(1);
let z_last = F::Extension::rand();
@ -124,8 +119,6 @@ where
pw.set_extension_targets(&locals_t, vars.local_values);
let nexts_t = builder.add_virtual_extension_targets(S::COLUMNS);
pw.set_extension_targets(&nexts_t, vars.next_values);
let pis_t = builder.add_virtual_extension_targets(S::PUBLIC_INPUTS);
pw.set_extension_targets(&pis_t, vars.public_inputs);
let alphas_t = builder.add_virtual_targets(1);
pw.set_target(alphas_t[0], alphas[0]);
let z_last_t = builder.add_virtual_extension_target();
@ -135,10 +128,9 @@ where
let lagrange_last_t = builder.add_virtual_extension_target();
pw.set_extension_target(lagrange_last_t, lagrange_last);
let vars = StarkEvaluationTargets::<D, { S::COLUMNS }, { S::PUBLIC_INPUTS }> {
let vars = StarkEvaluationTargets::<D, { S::COLUMNS }> {
local_values: &locals_t.try_into().unwrap(),
next_values: &nexts_t.try_into().unwrap(),
public_inputs: &pis_t.try_into().unwrap(),
};
let mut consumer = RecursiveConstraintConsumer::<F, D>::new(
builder.zero_extension(),

View File

@ -1,3 +1,4 @@
use ethereum_types::{H160, U256};
use itertools::Itertools;
use plonky2::field::extension::Extendable;
use plonky2::field::packed::PackedField;
@ -40,3 +41,29 @@ pub fn trace_rows_to_poly_values<F: Field, const COLUMNS: usize>(
.map(|column| PolynomialValues::new(column))
.collect()
}
/// Returns the 32-bit little-endian limbs of a `U256`.
pub(crate) fn u256_limbs<F: Field>(u256: U256) -> [F; 8] {
u256.0
.into_iter()
.flat_map(|limb_64| {
let lo = (limb_64 & 0xFFFFFFFF) as u32;
let hi = (limb_64 >> 32) as u32;
[lo, hi]
})
.map(F::from_canonical_u32)
.collect_vec()
.try_into()
.unwrap()
}
/// Returns the 32-bit limbs of a `U160`.
pub(crate) fn h160_limbs<F: Field>(h160: H160) -> [F; 5] {
h160.0
.chunks(4)
.map(|chunk| u32::from_le_bytes(chunk.try_into().unwrap()))
.map(F::from_canonical_u32)
.collect_vec()
.try_into()
.unwrap()
}

View File

@ -20,7 +20,7 @@ use crate::vars::{StarkEvaluationTargets, StarkEvaluationVars};
pub(crate) fn eval_vanishing_poly<F, FE, P, C, S, const D: usize, const D2: usize>(
stark: &S,
config: &StarkConfig,
vars: StarkEvaluationVars<FE, P, { S::COLUMNS }, { S::PUBLIC_INPUTS }>,
vars: StarkEvaluationVars<FE, P, { S::COLUMNS }>,
permutation_vars: Option<PermutationCheckVars<F, FE, P, D2>>,
ctl_vars: &[CtlCheckVars<F, FE, P, D2>],
consumer: &mut ConstraintConsumer<P>,
@ -48,7 +48,7 @@ pub(crate) fn eval_vanishing_poly_circuit<F, C, S, const D: usize>(
builder: &mut CircuitBuilder<F, D>,
stark: &S,
config: &StarkConfig,
vars: StarkEvaluationTargets<D, { S::COLUMNS }, { S::PUBLIC_INPUTS }>,
vars: StarkEvaluationTargets<D, { S::COLUMNS }>,
permutation_data: Option<PermutationCheckDataTarget<D>>,
ctl_vars: &[CtlCheckVarsTarget<F, D>],
consumer: &mut RecursiveConstraintConsumer<F, D>,
@ -57,7 +57,6 @@ pub(crate) fn eval_vanishing_poly_circuit<F, C, S, const D: usize>(
C: GenericConfig<D, F = F>,
S: Stark<F, D>,
[(); S::COLUMNS]:,
[(); S::PUBLIC_INPUTS]:,
{
stark.eval_ext_circuit(builder, vars, consumer);
if let Some(permutation_data) = permutation_data {

View File

@ -3,24 +3,17 @@ use plonky2::field::types::Field;
use plonky2::iop::ext_target::ExtensionTarget;
#[derive(Debug, Copy, Clone)]
pub struct StarkEvaluationVars<'a, F, P, const COLUMNS: usize, const PUBLIC_INPUTS: usize>
pub struct StarkEvaluationVars<'a, F, P, const COLUMNS: usize>
where
F: Field,
P: PackedField<Scalar = F>,
{
pub local_values: &'a [P; COLUMNS],
pub next_values: &'a [P; COLUMNS],
pub public_inputs: &'a [P::Scalar; PUBLIC_INPUTS],
}
#[derive(Debug, Copy, Clone)]
pub struct StarkEvaluationTargets<
'a,
const D: usize,
const COLUMNS: usize,
const PUBLIC_INPUTS: usize,
> {
pub struct StarkEvaluationTargets<'a, const D: usize, const COLUMNS: usize> {
pub local_values: &'a [ExtensionTarget<D>; COLUMNS],
pub next_values: &'a [ExtensionTarget<D>; COLUMNS],
pub public_inputs: &'a [ExtensionTarget<D>; PUBLIC_INPUTS],
}

View File

@ -17,7 +17,7 @@ use crate::logic::LogicStark;
use crate::memory::memory_stark::MemoryStark;
use crate::permutation::PermutationCheckVars;
use crate::proof::{
AllProof, AllProofChallenges, StarkOpeningSet, StarkProofChallenges, StarkProofWithPublicInputs,
AllProof, AllProofChallenges, StarkOpeningSet, StarkProof, StarkProofChallenges,
};
use crate::stark::Stark;
use crate::vanishing_poly::eval_vanishing_poly;
@ -30,15 +30,10 @@ pub fn verify_proof<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, co
) -> Result<()>
where
[(); CpuStark::<F, D>::COLUMNS]:,
[(); CpuStark::<F, D>::PUBLIC_INPUTS]:,
[(); KeccakStark::<F, D>::COLUMNS]:,
[(); KeccakStark::<F, D>::PUBLIC_INPUTS]:,
[(); KeccakMemoryStark::<F, D>::COLUMNS]:,
[(); KeccakMemoryStark::<F, D>::PUBLIC_INPUTS]:,
[(); LogicStark::<F, D>::COLUMNS]:,
[(); LogicStark::<F, D>::PUBLIC_INPUTS]:,
[(); MemoryStark::<F, D>::COLUMNS]:,
[(); MemoryStark::<F, D>::PUBLIC_INPUTS]:,
[(); C::Hasher::HASH_SIZE]:,
{
let AllProofChallenges {
@ -115,21 +110,15 @@ pub(crate) fn verify_stark_proof_with_challenges<
const D: usize,
>(
stark: S,
proof_with_pis: &StarkProofWithPublicInputs<F, C, D>,
proof: &StarkProof<F, C, D>,
challenges: &StarkProofChallenges<F, D>,
ctl_vars: &[CtlCheckVars<F, F::Extension, F::Extension, D>],
config: &StarkConfig,
) -> Result<()>
where
[(); S::COLUMNS]:,
[(); S::PUBLIC_INPUTS]:,
[(); C::Hasher::HASH_SIZE]:,
{
let StarkProofWithPublicInputs {
proof,
public_inputs,
} = proof_with_pis;
ensure!(public_inputs.len() == S::PUBLIC_INPUTS);
let StarkOpeningSet {
local_values,
next_values,
@ -141,13 +130,6 @@ where
let vars = StarkEvaluationVars {
local_values: &local_values.to_vec().try_into().unwrap(),
next_values: &next_values.to_vec().try_into().unwrap(),
public_inputs: &public_inputs
.iter()
.copied()
.map(F::Extension::from_basefield)
.collect::<Vec<_>>()
.try_into()
.unwrap(),
};
let degree_bits = proof.recover_degree_bits(config);

View File

@ -5,7 +5,8 @@ use plonky2::util::timing::TimingTree;
use plonky2_evm::all_stark::AllStark;
use plonky2_evm::config::StarkConfig;
use plonky2_evm::generation::partial_trie::PartialTrie;
use plonky2_evm::generation::{generate_traces, TransactionData};
use plonky2_evm::generation::EvmInputs;
use plonky2_evm::proof::BlockMetadata;
use plonky2_evm::prover::prove;
use plonky2_evm::verifier::verify_proof;
@ -18,26 +19,29 @@ type C = PoseidonGoldilocksConfig;
#[ignore] // TODO: Won't work until txn parsing, storage, etc. are implemented.
fn test_simple_transfer() -> anyhow::Result<()> {
let all_stark = AllStark::<F, D>::default();
let config = StarkConfig::standard_fast_config();
let txn = TransactionData {
signed_txn: hex!("f85f050a82520894000000000000000000000000000000000000000064801ca0fa56df5d988638fad8798e5ef75a1e1125dc7fb55d2ac4bce25776a63f0c2967a02cb47a5579eb5f83a1cabe4662501c0059f1b58e60ef839a1b0da67af6b9fb38").to_vec(),
// TODO: Add trie with sender account.
state_trie: PartialTrie::Empty,
transaction_trie: PartialTrie::Empty,
receipt_trie: PartialTrie::Empty,
storage_tries: vec![],
let block_metadata = BlockMetadata {
block_coinbase: Default::default(),
block_timestamp: Default::default(),
block_number: Default::default(),
block_difficulty: Default::default(),
block_gaslimit: Default::default(),
block_chain_id: Default::default(),
};
let traces = generate_traces(&all_stark, &[txn]);
let txn = hex!("f85f050a82520894000000000000000000000000000000000000000064801ca0fa56df5d988638fad8798e5ef75a1e1125dc7fb55d2ac4bce25776a63f0c2967a02cb47a5579eb5f83a1cabe4662501c0059f1b58e60ef839a1b0da67af6b9fb38");
let config = StarkConfig::standard_fast_config();
let proof = prove::<F, C, D>(
&all_stark,
&config,
traces,
vec![vec![]; 4],
&mut TimingTree::default(),
)?;
let inputs = EvmInputs {
signed_txns: vec![txn.to_vec()],
state_trie: PartialTrie::Empty,
transactions_trie: PartialTrie::Empty,
receipts_trie: PartialTrie::Empty,
storage_tries: vec![],
block_metadata,
};
let proof = prove::<F, C, D>(&all_stark, &config, inputs, &mut TimingTree::default())?;
verify_proof(all_stark, proof, &config)
}

View File

@ -104,9 +104,12 @@ pub trait Witness<F: Field> {
where
F: RichField + Extendable<D>,
{
let limbs = value.to_basefield_array();
(0..D).for_each(|i| {
self.set_target(et.0[i], limbs[i]);
self.set_target_arr(et.0, value.to_basefield_array());
}
fn set_target_arr<const N: usize>(&mut self, targets: [Target; N], values: [F; N]) {
(0..N).for_each(|i| {
self.set_target(targets[i], values[i]);
});
}

View File

@ -157,6 +157,10 @@ impl<F: RichField + Extendable<D>, const D: usize> CircuitBuilder<F, D> {
(0..n).map(|_i| self.add_virtual_target()).collect()
}
pub fn add_virtual_target_arr<const N: usize>(&mut self) -> [Target; N] {
[0; N].map(|_| self.add_virtual_target())
}
pub fn add_virtual_hash(&mut self) -> HashOutTarget {
HashOutTarget::from_vec(self.add_virtual_targets(4))
}