Merge branch 'pairing-test' of github.com:mir-protocol/plonky2 into pairing-test

This commit is contained in:
Dmitry Vagner 2023-04-27 16:25:48 -07:00
commit 2aa83d9adb
19 changed files with 379 additions and 90 deletions

View File

@ -54,49 +54,55 @@ insert_accessed_addresses_found:
%macro insert_accessed_storage_keys
%stack (addr, key) -> (addr, key, %%after)
%stack (addr, key, value) -> (addr, key, value, %%after)
%jump(insert_accessed_storage_keys)
%%after:
// stack: cold_access
%endmacro
/// Inserts the storage key into the access list if it is not already present.
/// Return 1 if the storage key was inserted, 0 if it was already present.
/// Inserts the storage key and value into the access list if it is not already present.
/// `value` should be the current storage value at the slot `(addr, key)`.
/// Return `1, original_value` if the storage key was inserted, `0, original_value` if it was already present.
global insert_accessed_storage_keys:
// stack: addr, key, retdest
// stack: addr, key, value, retdest
%mload_global_metadata(@GLOBAL_METADATA_ACCESSED_STORAGE_KEYS_LEN)
// stack: len, addr, key, retdest
// stack: len, addr, key, value, retdest
PUSH 0
insert_accessed_storage_keys_loop:
%stack (i, len, addr, key, retdest) -> (i, len, i, len, addr, key, retdest)
%stack (i, len, addr, key, value, retdest) -> (i, len, i, len, addr, key, value, retdest)
EQ %jumpi(insert_storage_key)
// stack: i, len, addr, key, retdest
// stack: i, len, addr, key, value, retdest
DUP1 %increment %mload_kernel(@SEGMENT_ACCESSED_STORAGE_KEYS)
// stack: loaded_key, i, len, addr, key, retdest
// stack: loaded_key, i, len, addr, key, value, retdest
DUP2 %mload_kernel(@SEGMENT_ACCESSED_STORAGE_KEYS)
// stack: loaded_addr, loaded_key, i, len, addr, key, retdest
// stack: loaded_addr, loaded_key, i, len, addr, key, value, retdest
DUP5 EQ
// stack: loaded_addr==addr, loaded_key, i, len, addr, key, retdest
// stack: loaded_addr==addr, loaded_key, i, len, addr, key, value, retdest
SWAP1 DUP6 EQ
// stack: loaded_key==key, loaded_addr==addr, i, len, addr, key, retdest
// stack: loaded_key==key, loaded_addr==addr, i, len, addr, key, value, retdest
MUL // AND
%jumpi(insert_accessed_storage_keys_found)
// stack: i, len, addr, key, retdest
%add_const(2)
// stack: i, len, addr, key, value, retdest
%add_const(3)
%jump(insert_accessed_storage_keys_loop)
insert_storage_key:
// stack: i, len, addr, key, retdest
// stack: i, len, addr, key, value, retdest
DUP1 %increment
%stack (i_plus_1, i, len, addr, key, retdest) -> (i, addr, i_plus_1, key, i_plus_1, retdest)
DUP1 %increment
%stack (i_plus_2, i_plus_1, i, len, addr, key, value) -> (i, addr, i_plus_1, key, i_plus_2, value, i_plus_2, value)
%mstore_kernel(@SEGMENT_ACCESSED_STORAGE_KEYS) // Store new address at the end of the array.
%mstore_kernel(@SEGMENT_ACCESSED_STORAGE_KEYS) // Store new key after that
// stack: i_plus_1, retdest
%mstore_kernel(@SEGMENT_ACCESSED_STORAGE_KEYS) // Store new value after that
// stack: i_plus_2, value, retdest
%increment
%mstore_global_metadata(@GLOBAL_METADATA_ACCESSED_STORAGE_KEYS_LEN) // Store new length in front of the array.
PUSH 1 // Return 1 to indicate that the storage key was inserted.
SWAP1 JUMP
%mstore_global_metadata(@GLOBAL_METADATA_ACCESSED_STORAGE_KEYS_LEN) // Store new length.
%stack (value, retdest) -> (retdest, 1, value) // Return 1 to indicate that the storage key was inserted.
JUMP
insert_accessed_storage_keys_found:
%stack (i, len, addr, key, retdest) -> (retdest, 0) // Return 0 to indicate that the storage key was already present.
// stack: i, len, addr, key, value, retdest
%add_const(2)
%mload_kernel(@SEGMENT_ACCESSED_STORAGE_KEYS)
%stack (original_value, len, addr, key, value, retdest) -> (retdest, 0, original_value) // Return 0 to indicate that the storage key was already present.
JUMP

View File

@ -1,3 +1,118 @@
global precompile_snarkv:
// TODO
PANIC
// stack: address, retdest, new_ctx, (old stack)
%pop2
// stack: new_ctx, (old stack)
DUP1
SET_CONTEXT
// stack: (empty)
PUSH 0x100000000 // = 2^32 (is_kernel = true)
// stack: kexit_info
PUSH 192 %calldatasize DUP2 DUP2
// stack: calldata_size, 192, calldata_size, 192, kexit_info
MOD %jumpi(fault_exception) // calldata_size should be a multiple of 192
DIV
// stack: k, kexit_info
DUP1 %mul_const(@SNARKV_DYNAMIC_GAS) %add_const(@SNARKV_STATIC_GAS)
%stack (gas, k, kexit_info) -> (gas, kexit_info, k)
%charge_gas
SWAP1
// stack: k, kexit_info
PUSH 0
loading_loop:
// stack: i, k, kexit_info
DUP2 DUP2 EQ %jumpi(loading_done)
// stack: i, k, kexit_info
DUP1 %mul_const(192)
// stack: px, i, k, kexit_info
GET_CONTEXT
%stack (ctx, px) -> (ctx, @SEGMENT_CALLDATA, px, 32, loading_loop_contd, px)
%jump(mload_packing)
loading_loop_contd:
// stack: x, px, i, k, kexit_info
SWAP1 %add_const(32)
GET_CONTEXT
%stack (ctx, py) -> (ctx, @SEGMENT_CALLDATA, py, 32, loading_loop_contd2, py)
%jump(mload_packing)
loading_loop_contd2:
// stack: y, py, x, i, k, kexit_info
SWAP1 %add_const(32)
GET_CONTEXT
%stack (ctx, px_im) -> (ctx, @SEGMENT_CALLDATA, px_im, 32, loading_loop_contd3, px_im)
%jump(mload_packing)
loading_loop_contd3:
// stack: x_im, px_im, y, x, i, k, kexit_info
SWAP1 %add_const(32)
// stack: px_re, x_im, y, x, i, k, kexit_info
GET_CONTEXT
%stack (ctx, px_re) -> (ctx, @SEGMENT_CALLDATA, px_re, 32, loading_loop_contd4, px_re)
%jump(mload_packing)
loading_loop_contd4:
// stack: x_re, px_re, x_im, y, x, i, k, kexit_info
SWAP1 %add_const(32)
// stack: py_im, x_re, x_im, y, x, i, k, kexit_info
GET_CONTEXT
%stack (ctx, py_im) -> (ctx, @SEGMENT_CALLDATA, py_im, 32, loading_loop_contd5, py_im)
%jump(mload_packing)
loading_loop_contd5:
// stack: y_im, py_im, x_re, x_im, y, x, i, k, kexit_info
SWAP1 %add_const(32)
// stack: py_re, y_im, x_re, x_im, y, x, i, k, kexit_info
GET_CONTEXT
%stack (ctx, py_re) -> (ctx, @SEGMENT_CALLDATA, py_re, 32, loading_loop_contd6)
%jump(mload_packing)
loading_loop_contd6:
// stack: y_re, y_im, x_re, x_im, y, x, i, k, kexit_info
SWAP1
// stack: y_im, y_re, x_re, x_im, y, x, i, k, kexit_info
DUP7
// stack: i, y_im, y_re, x_re, x_im, y, x, i, k, kexit_info
%mul_const(6) %add_const(@SNARKV_INP)
%add_const(5)
%mstore_kernel_bn254_pairing
// stack: y_re, x_re, x_im, y, x, i, k, kexit_info
DUP6
// stack: i, y_re, x_re, x_im, y, x, i, k, kexit_info
%mul_const(6) %add_const(@SNARKV_INP)
%add_const(4)
%mstore_kernel_bn254_pairing
SWAP1
// stack: x_im, x_re, y, x, i, k, kexit_info
DUP5
// stack: i, x_im, x_re, y, x, i, k, kexit_info
%mul_const(6) %add_const(@SNARKV_INP)
%add_const(3)
%mstore_kernel_bn254_pairing
// stack: x_re, y, x, i, k, kexit_info
DUP4
// stack: i, x_re, y, x, i, k, kexit_info
%mul_const(6) %add_const(@SNARKV_INP)
%add_const(2)
%mstore_kernel_bn254_pairing
// stack: y, x, i, k, kexit_info
DUP3
// stack: i, y, x, i, k, kexit_info
%mul_const(6) %add_const(@SNARKV_INP)
%add_const(1)
%mstore_kernel_bn254_pairing
// stack: x, i, k, kexit_info
DUP2
// stack: i, x, i, k, kexit_info
%mul_const(6) %add_const(@SNARKV_INP)
%mstore_kernel_bn254_pairing
// stack: i, k, kexit_info
%increment
%jump(loading_loop)
loading_done:
%stack (i, k) -> (k, @SNARKV_INP, @SNARKV_OUT, got_result)
%jump(bn254_pairing)
got_result:
// stack: result, kexit_info
DUP1 %eq_const(@U256_MAX) %jumpi(fault_exception)
// stack: result, kexit_info
// Store the result bool (repr. by a U256) to the parent's return data using `mstore_unpacking`.
%mstore_parent_context_metadata(@CTX_METADATA_RETURNDATA_SIZE, 32)
%mload_context_metadata(@CTX_METADATA_PARENT_CONTEXT)
%stack (parent_ctx, address) -> (parent_ctx, @SEGMENT_RETURNDATA, 0, address, 32, pop_and_return_success)
%jump(mstore_unpacking)

View File

@ -1,3 +1,34 @@
%macro sload_current
%stack (slot) -> (slot, %%after)
%jump(sload_current)
%%after:
%endmacro
global sload_current:
%stack (slot) -> (slot, after_storage_read)
%slot_to_storage_key
// stack: storage_key, after_storage_read
PUSH 64 // storage_key has 64 nibbles
%current_storage_trie
// stack: storage_root_ptr, 64, storage_key, after_storage_read
%jump(mpt_read)
global after_storage_read:
// stack: value_ptr, retdest
DUP1 %jumpi(storage_key_exists)
// Storage key not found. Return default value_ptr = 0,
// which derefs to 0 since @SEGMENT_TRIE_DATA[0] = 0.
%stack (value_ptr, retdest) -> (retdest, 0)
JUMP
global storage_key_exists:
// stack: value_ptr, retdest
%mload_trie_data
// stack: value, retdest
SWAP1
JUMP
// Read a word from the current account's storage trie.
//
// Pre stack: kexit_info, slot
@ -6,38 +37,20 @@
global sys_sload:
// stack: kexit_info, slot
SWAP1
// stack: slot, kexit_info
DUP1 %address
// stack: addr, slot, slot, kexit_info
%insert_accessed_storage_keys PUSH @GAS_COLDSLOAD_MINUS_WARMACCESS
MUL
PUSH @GAS_WARMACCESS
ADD
%stack (gas, slot, kexit_info) -> (gas, kexit_info, slot)
DUP1
// stack: slot, slot, kexit_info
%sload_current
%stack (value, slot, kexit_info) -> (slot, value, kexit_info, value)
%address
// stack: addr, slot, value, kexit_info, value
%insert_accessed_storage_keys
// stack: cold_access, old_value, kexit_info, value
SWAP1 POP
// stack: cold_access, kexit_info, value
%mul_const(@GAS_COLDSLOAD_MINUS_WARMACCESS)
%add_const(@GAS_WARMACCESS)
%charge_gas
// stack: kexit_info, slot
SWAP1
%stack (slot) -> (slot, after_storage_read)
%slot_to_storage_key
// stack: storage_key, after_storage_read, kexit_info
PUSH 64 // storage_key has 64 nibbles
%current_storage_trie
// stack: storage_root_ptr, 64, storage_key, after_storage_read, kexit_info
%jump(mpt_read)
after_storage_read:
// stack: value_ptr, kexit_info
DUP1 %jumpi(storage_key_exists)
// Storage key not found. Return default value_ptr = 0,
// which derefs to 0 since @SEGMENT_TRIE_DATA[0] = 0.
%stack (value_ptr, kexit_info) -> (kexit_info, 0)
// stack: kexit_info, value
EXIT_KERNEL
storage_key_exists:
// stack: value_ptr, kexit_info
%mload_trie_data
// stack: value, kexit_info
SWAP1
EXIT_KERNEL

View File

@ -6,14 +6,42 @@
global sys_sstore:
%check_static
%stack (kexit_info, slot, value) -> (slot, kexit_info, slot, value)
%address %insert_accessed_storage_keys POP // TODO: Use return value in gas calculation.
// TODO: Assuming a cold zero -> nonzero write for now.
PUSH @GAS_COLDSLOAD
PUSH @GAS_SSET
ADD
%sload_current
%address
%stack (addr, current_value, kexit_info, slot, value) -> (addr, slot, current_value, current_value, kexit_info, slot, value)
%insert_accessed_storage_keys
// stack: cold_access, original_value, current_value, kexit_info, slot, value
%mul_const(@GAS_COLDSLOAD)
// Check for warm access.
%stack (gas, original_value, current_value, kexit_info, slot, value) ->
(value, current_value, current_value, original_value, gas, original_value, current_value, kexit_info, slot, value)
EQ SWAP2 EQ ISZERO
// stack: current_value==original_value, value==current_value, gas, original_value, current_value, kexit_info, slot, value)
ADD // OR
%jumpi(sstore_warm)
// Check for sset (set a zero storage slot to a non-zero value).
// stack: gas, original_value, current_value, kexit_info, slot, value
DUP2 ISZERO %mul_const(@GAS_SSET) ADD
// Check for sreset (set a non-zero storage slot to a non-zero value).
// stack: gas, original_value, current_value, kexit_info, slot, value
DUP2 ISZERO ISZERO %mul_const(@GAS_SRESET) ADD
%jump(sstore_charge_gas)
sstore_warm:
// stack: gas, original_value, current_value, kexit_info, slot, value)
%add_const(@GAS_WARMACCESS)
sstore_charge_gas:
%stack (gas, original_value, current_value, kexit_info, slot, value) -> (gas, kexit_info, current_value, slot, value)
%charge_gas
%stack (kexit_info, slot, value) -> (slot, value, kexit_info)
// Check if `value` is equal to `current_value`, and if so exit the kernel early.
%stack (kexit_info, current_value, slot, value) -> (value, current_value, slot, value, kexit_info)
EQ %jumpi(sstore_noop)
// TODO: If value = 0, delete the key instead of inserting 0.
// stack: slot, value, kexit_info
@ -57,3 +85,8 @@ after_storage_insert:
after_state_insert:
// stack: kexit_info
EXIT_KERNEL
sstore_noop:
// stack: slot, value, kexit_info
%pop2
EXIT_KERNEL

View File

@ -44,6 +44,10 @@ pub fn evm_constants() -> HashMap<String, U256> {
c.insert(name.into(), U256::from(value));
}
for (name, value) in SNARKV_POINTERS {
c.insert(name.into(), U256::from(value));
}
for segment in Segment::all() {
c.insert(segment.var_name().into(), (segment as u32).into());
}
@ -240,6 +244,8 @@ const PRECOMPILES_GAS: [(&str, u16); 13] = [
("BLAKE2_F_DYNAMIC_GAS", 1),
];
const SNARKV_POINTERS: [(&str, u64); 2] = [("SNARKV_INP", 112), ("SNARKV_OUT", 100)];
const CODE_SIZE_LIMIT: [(&str, u64); 3] = [
("MAX_CODE_SIZE", 0x6000),
("MAX_INITCODE_SIZE", 0xc000),

View File

@ -99,12 +99,12 @@ fn test_insert_accessed_storage_keys() -> Result<()> {
let mut rng = thread_rng();
let n = rng.gen_range(1..10);
let storage_keys = (0..n)
.map(|_| (rng.gen::<Address>(), U256(rng.gen())))
.map(|_| (rng.gen::<Address>(), U256(rng.gen()), U256(rng.gen())))
.collect::<HashSet<_>>()
.into_iter()
.collect::<Vec<(Address, U256)>>();
.collect::<Vec<(Address, U256, U256)>>();
let storage_key_in_list = storage_keys[rng.gen_range(0..n)];
let storage_key_not_in_list = (rng.gen::<Address>(), U256(rng.gen()));
let storage_key_not_in_list = (rng.gen::<Address>(), U256(rng.gen()), U256(rng.gen()));
assert!(
!storage_keys.contains(&storage_key_not_in_list),
"Cosmic luck or bad RNG?"
@ -113,6 +113,7 @@ fn test_insert_accessed_storage_keys() -> Result<()> {
// Test for storage key already in list.
let initial_stack = vec![
retaddr,
storage_key_in_list.2,
storage_key_in_list.1,
U256::from(storage_key_in_list.0 .0.as_slice()),
];
@ -122,30 +123,35 @@ fn test_insert_accessed_storage_keys() -> Result<()> {
interpreter
.generation_state
.memory
.set(MemoryAddress::new(0, AccessedStorageKeys, 2 * i), addr);
.set(MemoryAddress::new(0, AccessedStorageKeys, 3 * i), addr);
interpreter.generation_state.memory.set(
MemoryAddress::new(0, AccessedStorageKeys, 2 * i + 1),
MemoryAddress::new(0, AccessedStorageKeys, 3 * i + 1),
storage_keys[i].1,
);
interpreter.generation_state.memory.set(
MemoryAddress::new(0, AccessedStorageKeys, 3 * i + 2),
storage_keys[i].2,
);
}
interpreter.generation_state.memory.set(
MemoryAddress::new(0, GlobalMetadata, AccessedStorageKeysLen as usize),
U256::from(2 * n),
U256::from(3 * n),
);
interpreter.run()?;
assert_eq!(interpreter.stack(), &[U256::zero()]);
assert_eq!(interpreter.stack(), &[storage_key_in_list.2, U256::zero()]);
assert_eq!(
interpreter.generation_state.memory.get(MemoryAddress::new(
0,
GlobalMetadata,
AccessedStorageKeysLen as usize
)),
U256::from(2 * n)
U256::from(3 * n)
);
// Test for storage key not in list.
let initial_stack = vec![
retaddr,
storage_key_not_in_list.2,
storage_key_not_in_list.1,
U256::from(storage_key_not_in_list.0 .0.as_slice()),
];
@ -155,41 +161,56 @@ fn test_insert_accessed_storage_keys() -> Result<()> {
interpreter
.generation_state
.memory
.set(MemoryAddress::new(0, AccessedStorageKeys, 2 * i), addr);
.set(MemoryAddress::new(0, AccessedStorageKeys, 3 * i), addr);
interpreter.generation_state.memory.set(
MemoryAddress::new(0, AccessedStorageKeys, 2 * i + 1),
MemoryAddress::new(0, AccessedStorageKeys, 3 * i + 1),
storage_keys[i].1,
);
interpreter.generation_state.memory.set(
MemoryAddress::new(0, AccessedStorageKeys, 3 * i + 2),
storage_keys[i].2,
);
}
interpreter.generation_state.memory.set(
MemoryAddress::new(0, GlobalMetadata, AccessedStorageKeysLen as usize),
U256::from(2 * n),
U256::from(3 * n),
);
interpreter.run()?;
assert_eq!(interpreter.stack(), &[U256::one()]);
assert_eq!(
interpreter.stack(),
&[storage_key_not_in_list.2, U256::one()]
);
assert_eq!(
interpreter.generation_state.memory.get(MemoryAddress::new(
0,
GlobalMetadata,
AccessedStorageKeysLen as usize
)),
U256::from(2 * (n + 1))
U256::from(3 * (n + 1))
);
assert_eq!(
interpreter
.generation_state
.memory
.get(MemoryAddress::new(0, AccessedStorageKeys, 2 * n,)),
.get(MemoryAddress::new(0, AccessedStorageKeys, 3 * n,)),
U256::from(storage_key_not_in_list.0 .0.as_slice())
);
assert_eq!(
interpreter.generation_state.memory.get(MemoryAddress::new(
0,
AccessedStorageKeys,
2 * n + 1,
3 * n + 1,
)),
storage_key_not_in_list.1
);
assert_eq!(
interpreter.generation_state.memory.get(MemoryAddress::new(
0,
AccessedStorageKeys,
3 * n + 2,
)),
storage_key_not_in_list.2
);
Ok(())
}

View File

@ -45,9 +45,7 @@ pub(crate) fn kernel_peek<F: Field>(
segment: Segment,
virt: usize,
) -> U256 {
state
.memory
.get(MemoryAddress::new(state.registers.context, segment, virt))
state.memory.get(MemoryAddress::new(0, segment, virt))
}
pub(crate) fn mem_read_with_log<F: Field>(

View File

@ -31,6 +31,7 @@ plonky2_util = { version = "0.1.0", default-features = false }
rand = { version = "0.8.4", default-features = false }
rand_chacha = { version = "0.3.1", optional = true, default-features = false }
serde = { version = "1.0", default-features = false, features = ["derive"] }
serde_json = "1.0"
static_assertions = { version = "1.1.0", default-features = false }
unroll = { version = "0.1.5", default-features = false }

View File

@ -0,0 +1,79 @@
#![allow(clippy::upper_case_acronyms)]
use std::fs;
use anyhow::Result;
use plonky2::field::types::Field;
use plonky2::iop::witness::{PartialWitness, WitnessWrite};
use plonky2::plonk::circuit_builder::CircuitBuilder;
use plonky2::plonk::circuit_data::CircuitConfig;
use plonky2::plonk::config::{GenericConfig, PoseidonGoldilocksConfig};
/// An example of using Plonky2 to prove a statement of the form
/// "I know the 100th element of the Fibonacci sequence, starting with constants a and b."
/// When a == 0 and b == 1, this is proving knowledge of the 100th (standard) Fibonacci number.
/// This example also serializes the circuit data and proof to JSON files.
fn main() -> Result<()> {
const D: usize = 2;
type C = PoseidonGoldilocksConfig;
type F = <C as GenericConfig<D>>::F;
let config = CircuitConfig::standard_recursion_config();
let mut builder = CircuitBuilder::<F, D>::new(config);
// The arithmetic circuit.
let initial_a = builder.add_virtual_target();
let initial_b = builder.add_virtual_target();
let mut prev_target = initial_a;
let mut cur_target = initial_b;
for _ in 0..99 {
let temp = builder.add(prev_target, cur_target);
prev_target = cur_target;
cur_target = temp;
}
// Public inputs are the two initial values (provided below) and the result (which is generated).
builder.register_public_input(initial_a);
builder.register_public_input(initial_b);
builder.register_public_input(cur_target);
// Provide initial values.
let mut pw = PartialWitness::new();
pw.set_target(initial_a, F::ZERO);
pw.set_target(initial_b, F::ONE);
let data = builder.build::<C>();
let common_circuit_data_serialized = serde_json::to_string(&data.common).unwrap();
fs::write("common_circuit_data.json", common_circuit_data_serialized)
.expect("Unable to write file");
let verifier_only_circuit_data_serialized = serde_json::to_string(&data.verifier_only).unwrap();
fs::write(
"verifier_only_circuit_data.json",
verifier_only_circuit_data_serialized,
)
.expect("Unable to write file");
let proof = data.prove(pw)?;
let proof_serialized = serde_json::to_string(&proof).unwrap();
fs::write("proof_with_public_inputs.json", proof_serialized).expect("Unable to write file");
let proof_challenges = proof
.get_challenges(
proof.get_public_inputs_hash(),
&data.verifier_only.circuit_digest,
&data.common,
)
.unwrap();
let proof_challenges_serialized = serde_json::to_string(&proof_challenges).unwrap();
fs::write("proof_challenges.json", proof_challenges_serialized).expect("Unable to write file");
println!(
"100th Fibonacci number mod |F| (starting with {}, {}) is: {}",
proof.public_inputs[0], proof.public_inputs[1], proof.public_inputs[2]
);
data.verify(proof)
}

View File

@ -1,5 +1,7 @@
use alloc::vec::Vec;
use serde::Serialize;
use crate::fri::reduction_strategies::FriReductionStrategy;
mod challenges;
@ -13,7 +15,7 @@ mod validate_shape;
pub mod verifier;
pub mod witness_util;
#[derive(Debug, Clone, Eq, PartialEq)]
#[derive(Debug, Clone, Eq, PartialEq, Serialize)]
pub struct FriConfig {
/// `rate = 2^{-rate_bits}`.
pub rate_bits: usize,
@ -56,7 +58,7 @@ impl FriConfig {
/// FRI parameters, including generated parameters which are specific to an instance size, in
/// contrast to `FriConfig` which is user-specified and independent of instance size.
#[derive(Debug, Clone, Eq, PartialEq)]
#[derive(Debug, Clone, Eq, PartialEq, Serialize)]
pub struct FriParams {
/// User-specified FRI configuration.
pub config: FriConfig,

View File

@ -393,6 +393,7 @@ impl<F: RichField + Extendable<D>, HCO: HashConfig, H: Hasher<F, HCO>, const D:
}
}
#[derive(Serialize)]
pub struct FriChallenges<F: RichField + Extendable<D>, const D: usize> {
// Scaling factor to combine polynomials.
pub fri_alpha: F::Extension,

View File

@ -4,9 +4,10 @@ use alloc::vec::Vec;
use std::time::Instant;
use log::debug;
use serde::Serialize;
/// A method for deciding what arity to use at each reduction layer.
#[derive(Debug, Clone, Eq, PartialEq)]
#[derive(Debug, Clone, Eq, PartialEq, Serialize)]
pub enum FriReductionStrategy {
/// Specifies the exact sequence of arities (expressed in bits) to use.
Fixed(Vec<usize>),

View File

@ -2,6 +2,8 @@ use alloc::string::String;
use alloc::vec::Vec;
use alloc::{format, vec};
use serde::{Deserialize, Serialize};
use crate::field::extension::Extendable;
use crate::field::packed::PackedField;
use crate::gates::gate::Gate;
@ -18,7 +20,7 @@ use crate::plonk::vars::{
use crate::util::serialization::{Buffer, IoResult, Read, Write};
/// A gate which takes a single constant parameter and outputs that value.
#[derive(Copy, Clone, Debug)]
#[derive(Copy, Clone, Debug, Serialize, Deserialize)]
pub struct ConstantGate {
pub(crate) num_consts: usize,
}

View File

@ -8,6 +8,7 @@ use core::hash::{Hash, Hasher};
use core::ops::Range;
use hashbrown::HashMap;
use serde::{Serialize, Serializer};
use crate::field::batch_util::batch_multiply_inplace;
use crate::field::extension::{Extendable, FieldExtension};
@ -239,6 +240,12 @@ impl<F: RichField + Extendable<D>, const D: usize> Debug for GateRef<F, D> {
}
}
impl<F: RichField + Extendable<D>, const D: usize> Serialize for GateRef<F, D> {
fn serialize<S: Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> {
serializer.serialize_str(&self.0.id())
}
}
/// Map between gate parameters and available slots.
/// An available slot is of the form `(row, op)`, meaning the current available slot
/// is at gate index `row` in the `op`-th operation.

View File

@ -2,6 +2,8 @@ use alloc::vec;
use alloc::vec::Vec;
use core::ops::Range;
use serde::Serialize;
use crate::field::extension::Extendable;
use crate::field::polynomial::PolynomialValues;
use crate::gates::gate::{GateInstance, GateRef};
@ -10,7 +12,7 @@ use crate::hash::hash_types::RichField;
/// Placeholder value to indicate that a gate doesn't use a selector polynomial.
pub(crate) const UNUSED_SELECTOR: usize = u32::MAX as usize;
#[derive(Debug, Clone, Eq, PartialEq)]
#[derive(Debug, Clone, Eq, PartialEq, Serialize)]
pub struct SelectorsInfo {
pub(crate) selector_indices: Vec<usize>,
pub(crate) groups: Vec<Range<usize>>,

View File

@ -4,6 +4,7 @@ use alloc::vec::Vec;
use core::ops::{Range, RangeFrom};
use anyhow::Result;
use serde::Serialize;
use crate::field::extension::Extendable;
use crate::field::fft::FftRootTable;
@ -35,7 +36,7 @@ use crate::util::serialization::{
};
use crate::util::timing::TimingTree;
#[derive(Clone, Debug, Eq, PartialEq)]
#[derive(Clone, Debug, Eq, PartialEq, Serialize)]
pub struct CircuitConfig {
pub num_wires: usize,
pub num_routed_wires: usize,
@ -347,7 +348,7 @@ pub struct ProverOnlyCircuitData<
}
/// Circuit data required by the verifier, but not the prover.
#[derive(Debug, Clone, Eq, PartialEq)]
#[derive(Debug, Clone, Eq, PartialEq, Serialize)]
pub struct VerifierOnlyCircuitData<C: GenericConfig<D>, const D: usize> {
/// A commitment to each constant polynomial and each permutation polynomial.
pub constants_sigmas_cap: MerkleCap<C::F, C::HCO, C::Hasher>,
@ -370,7 +371,7 @@ impl<C: GenericConfig<D>, const D: usize> VerifierOnlyCircuitData<C, D> {
}
/// Circuit data required by both the prover and the verifier.
#[derive(Debug, Clone, Eq, PartialEq)]
#[derive(Debug, Clone, Eq, PartialEq, Serialize)]
pub struct CommonCircuitData<F: RichField + Extendable<D>, const D: usize> {
pub config: CircuitConfig,

View File

@ -117,7 +117,7 @@ impl HashConfig for PoseidonHashConfig {
const WIDTH: usize = 12;
}
/// Configuration using Poseidon over the Goldilocks field.
#[derive(Debug, Copy, Clone, Eq, PartialEq)]
#[derive(Debug, Copy, Clone, Eq, PartialEq, Serialize)]
pub struct PoseidonGoldilocksConfig;
impl GenericConfig<2> for PoseidonGoldilocksConfig {
type F = GoldilocksField;

View File

@ -94,7 +94,7 @@ impl<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, const D: usize>
}
/// Computes all Fiat-Shamir challenges used in the Plonk proof.
pub(crate) fn get_challenges(
pub fn get_challenges(
&self,
public_inputs_hash: <<C as GenericConfig<D>>::InnerHasher as Hasher<F, C::HCI>>::Hash,
circuit_digest: &<<C as GenericConfig<D>>::Hasher as Hasher<C::F, C::HCO>>::Hash,

View File

@ -102,7 +102,7 @@ impl<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, const D: usize>
})
}
pub(crate) fn get_public_inputs_hash(
pub fn get_public_inputs_hash(
&self,
) -> <<C as GenericConfig<D>>::InnerHasher as Hasher<F, C::HCI>>::Hash
where
@ -276,7 +276,8 @@ impl<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, const D: usize>
}
}
pub(crate) struct ProofChallenges<F: RichField + Extendable<D>, const D: usize> {
#[derive(Serialize)]
pub struct ProofChallenges<F: RichField + Extendable<D>, const D: usize> {
/// Random values used in Plonk's permutation argument.
pub plonk_betas: Vec<F>,