mirror of
https://github.com/logos-storage/plonky2.git
synced 2026-01-05 07:13:08 +00:00
Merge pull request #1097 from topos-protocol/receipts_and_logs
Implement receipts and logs
This commit is contained in:
commit
86fb6aa065
@ -11,6 +11,7 @@ edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
anyhow = "1.0.40"
|
||||
bytes = "1.4.0"
|
||||
env_logger = "0.10.0"
|
||||
eth_trie_utils = "0.6.0"
|
||||
ethereum-types = "0.14.0"
|
||||
|
||||
@ -29,6 +29,7 @@ pub(crate) fn combined_kernel() -> Kernel {
|
||||
include_str!("asm/core/create_addresses.asm"),
|
||||
include_str!("asm/core/create_contract_account.asm"),
|
||||
include_str!("asm/core/exception.asm"),
|
||||
include_str!("asm/core/create_receipt.asm"),
|
||||
include_str!("asm/core/gas.asm"),
|
||||
include_str!("asm/core/intrinsic_gas.asm"),
|
||||
include_str!("asm/core/jumpdest_analysis.asm"),
|
||||
@ -158,6 +159,7 @@ pub(crate) fn combined_kernel() -> Kernel {
|
||||
include_str!("asm/util/math.asm"),
|
||||
include_str!("asm/account_code.asm"),
|
||||
include_str!("asm/balance.asm"),
|
||||
include_str!("asm/bloom_filter.asm"),
|
||||
];
|
||||
|
||||
let parsed_files = files.iter().map(|f| parse(f)).collect_vec();
|
||||
|
||||
176
evm/src/cpu/kernel/asm/bloom_filter.asm
Normal file
176
evm/src/cpu/kernel/asm/bloom_filter.asm
Normal file
@ -0,0 +1,176 @@
|
||||
/// Implementation of Bloom filters for logs.
|
||||
|
||||
// Adds a Bloom entry to the transaction Bloom filter and the block Bloom filter.
|
||||
//
|
||||
// This is calculated by taking the least significant 11 bits from
|
||||
// the first 3 16-bit bytes of the keccak_256 hash of bloom_entry.
|
||||
add_to_bloom:
|
||||
// stack: is_topic, bloom_entry, retdest
|
||||
%compute_entry_hash
|
||||
// stack: hash, retdest
|
||||
DUP1
|
||||
// stack: hash, hash, retdest
|
||||
%shr_const(240)
|
||||
// stack: hahs_shft_240, hash, retdest
|
||||
%bloom_byte_indices
|
||||
// stack: byte_index, byte_bit_index, hash, retdest
|
||||
%bloom_write_bit
|
||||
// stack: hash, retdest
|
||||
|
||||
// We shift the hash by 16 bits and repeat.
|
||||
DUP1 %shr_const(224)
|
||||
// stack: hash_shft_224, hash, retdest
|
||||
%bloom_byte_indices
|
||||
// stack: byte_index, byte_bit_index, hash, retdest
|
||||
%bloom_write_bit
|
||||
// stack: hash, retdest
|
||||
|
||||
// We shift again the hash by 16 bits and repeat.
|
||||
%shr_const(208)
|
||||
// stack: hash_shft_208, retdest
|
||||
%bloom_byte_indices
|
||||
// stack: byte_index, byte_bit_index, retdest
|
||||
%bloom_write_bit
|
||||
// stack: retdest
|
||||
JUMP
|
||||
|
||||
// The LOGS segment is [log0_ptr, log1_ptr...]. logs_len is a global metadata for the number of logs.
|
||||
// A log in the LOGS_DATA segment is [log_payload_len, address, num_topics, [topics], data_len, [data]].
|
||||
global logs_bloom:
|
||||
// stack: retdest
|
||||
%mload_global_metadata(@GLOBAL_METADATA_LOGS_LEN)
|
||||
// stack: logs_len, retdest
|
||||
PUSH 0
|
||||
|
||||
logs_bloom_loop:
|
||||
// stack: i, logs_len, retdest
|
||||
DUP2 DUP2 EQ
|
||||
// stack: i == logs_len, i, logs_len, retdest
|
||||
%jumpi(logs_bloom_end)
|
||||
// stack: i, logs_len, retdest
|
||||
DUP1
|
||||
%mload_kernel(@SEGMENT_LOGS)
|
||||
// stack: log_payload_len_ptr, i, logs_len, retdest
|
||||
|
||||
// Add address to bloom filter.
|
||||
%increment
|
||||
// stack: addr_ptr, i, logs_len, retdest
|
||||
DUP1
|
||||
%mload_kernel(@SEGMENT_LOGS_DATA)
|
||||
// stack: addr, addr_ptr, i, logs_len, retdest
|
||||
PUSH 0
|
||||
// stack: is_topic, addr, addr_ptr, i, logs_len, retdest
|
||||
%add_to_bloom
|
||||
// stack: addr_ptr, i, logs_len, retdest
|
||||
%increment
|
||||
// stack: num_topics_ptr, i, logs_len, retdest
|
||||
DUP1
|
||||
%mload_kernel(@SEGMENT_LOGS_DATA)
|
||||
// stack: num_topics, num_topics_ptr, i, logs_len, retdest
|
||||
SWAP1 %increment
|
||||
// stack: topics_ptr, num_topics, i, logs_len, retdest
|
||||
PUSH 0
|
||||
|
||||
logs_bloom_topic_loop:
|
||||
// stack: j, topics_ptr, num_topics, i, logs_len, retdest
|
||||
DUP3 DUP2 EQ
|
||||
// stack: j == num_topics, j, topics_ptr, num_topics, i, logs_len, retdest
|
||||
%jumpi(logs_bloom_topic_end)
|
||||
DUP2 DUP2 ADD
|
||||
// stack: curr_topic_ptr, j, topics_ptr, num_topics, i, logs_len, retdest
|
||||
%mload_kernel(@SEGMENT_LOGS_DATA)
|
||||
// stack: topic, j, topics_ptr, num_topics, i, logs_len, retdest
|
||||
PUSH 1
|
||||
// stack: is_topic, topic, j, topics_ptr, num_topics, i, logs_len, retdest
|
||||
%add_to_bloom
|
||||
// stack: j, topics_ptr, num_topics, i, logs_len, retdest
|
||||
%increment
|
||||
%jump(logs_bloom_topic_loop)
|
||||
|
||||
logs_bloom_topic_end:
|
||||
// stack: num_topics, topics_ptr, num_topics, i, logs_len, retdest
|
||||
%pop3
|
||||
%increment
|
||||
%jump(logs_bloom_loop)
|
||||
|
||||
logs_bloom_end:
|
||||
// stack: logs_len, logs_len, retdest
|
||||
%pop2
|
||||
JUMP
|
||||
|
||||
%macro compute_entry_hash
|
||||
// stack: is_topic, bloom_entry
|
||||
ISZERO
|
||||
%jumpi(%%compute_entry_hash_address)
|
||||
// stack: bloom_entry
|
||||
%keccak256_word(32)
|
||||
// stack: topic_hash
|
||||
%jump(%%after)
|
||||
|
||||
%%compute_entry_hash_address:
|
||||
// stack: bloom_entry
|
||||
%keccak256_word(20)
|
||||
// stack: address_hash
|
||||
|
||||
%%after:
|
||||
%endmacro
|
||||
|
||||
%macro add_to_bloom
|
||||
%stack (is_topic, bloom_entry) -> (is_topic, bloom_entry, %%after)
|
||||
%jump(add_to_bloom)
|
||||
|
||||
%%after:
|
||||
%endmacro
|
||||
|
||||
// Computes the byte index and bit index within to update the Bloom filter with.
|
||||
// The hash value must be properly shifted prior calling this macro.
|
||||
%macro bloom_byte_indices
|
||||
// stack: hash
|
||||
%and_const(0x07FF)
|
||||
PUSH 0x07FF
|
||||
SUB
|
||||
// stack: bit_index
|
||||
DUP1
|
||||
%and_const(0x7)
|
||||
SWAP1
|
||||
%shr_const(0x3)
|
||||
// stack: byte_index, byte_bit_index
|
||||
%endmacro
|
||||
|
||||
|
||||
// Updates the corresponding bloom filter byte with provided bit.
|
||||
// Also updates the block bloom filter.
|
||||
%macro bloom_write_bit
|
||||
// stack: byte_index, byte_bit_index
|
||||
DUP2
|
||||
// stack: byte_bit_index, byte_index, byte_bit_index
|
||||
PUSH 7 SUB
|
||||
PUSH 1 SWAP1 SHL
|
||||
// Updates the current txn bloom filter.
|
||||
// stack: one_shifted_by_index, byte_index, byte_bit_index
|
||||
DUP2 DUP1
|
||||
// stack: byte_index, byte_index, one_shifted_by_index, byte_index, byte_bit_index
|
||||
// load bloom_byte from current txn bloom filter
|
||||
%mload_kernel(@SEGMENT_TXN_BLOOM)
|
||||
%stack (old_bloom_byte, byte_index, one_shifted_by_index) -> (old_bloom_byte, one_shifted_by_index, byte_index, one_shifted_by_index)
|
||||
OR
|
||||
// stack: new_bloom_byte, byte_index, one_shifted_by_index, byte_index, byte_bit_index
|
||||
SWAP1
|
||||
%mstore_kernel(@SEGMENT_TXN_BLOOM)
|
||||
// stack: one_shifted_by_index, byte_index, byte_bit_index
|
||||
|
||||
// Updates the block bloom filter.
|
||||
SWAP2 POP DUP1
|
||||
%mload_kernel(@SEGMENT_BLOCK_BLOOM)
|
||||
// stack: old_bloom_byte, byte_index, one_shifted_by_index
|
||||
DUP3 OR
|
||||
// stack: new_bloom_byte, byte_index, one_shifted_by_index
|
||||
SWAP1
|
||||
%mstore_kernel(@SEGMENT_BLOCK_BLOOM)
|
||||
// stack: one_shifted_by_index
|
||||
POP
|
||||
// stack: empty
|
||||
%endmacro
|
||||
|
||||
|
||||
|
||||
236
evm/src/cpu/kernel/asm/core/create_receipt.asm
Normal file
236
evm/src/cpu/kernel/asm/core/create_receipt.asm
Normal file
@ -0,0 +1,236 @@
|
||||
// Pre-stack: status, leftover_gas, prev_cum_gas, txn_nb, retdest
|
||||
// Post stack: new_cum_gas, txn_nb
|
||||
// A receipt is stored in MPT_TRIE_DATA as:
|
||||
// [payload_len, status, cum_gas_used, bloom, logs_payload_len, num_logs, [logs]]
|
||||
//
|
||||
// In this function, we:
|
||||
// - compute cum_gas,
|
||||
// - check if the transaction failed and set number of logs to 0 if it is the case,
|
||||
// - compute the bloom filter,
|
||||
// - write the receipt in MPT_TRIE_DATA ,
|
||||
// - insert a new node in receipt_trie,
|
||||
// - set the bloom filter back to 0
|
||||
global process_receipt:
|
||||
// stack: status, leftover_gas, prev_cum_gas, txn_nb, retdest
|
||||
DUP2 DUP4
|
||||
// stack: prev_cum_gas, leftover_gas, status, leftover_gas, prev_cum_gas, txn_nb, retdest
|
||||
%compute_cumulative_gas
|
||||
// stack: new_cum_gas, status, leftover_gas, prev_cum_gas, txn_nb, retdest
|
||||
SWAP3 POP
|
||||
// stack: status, leftover_gas, new_cum_gas, txn_nb, retdest
|
||||
SWAP1 POP
|
||||
// stack: status, new_cum_gas, txn_nb, retdest
|
||||
// Now, we need to check whether the transaction has failed.
|
||||
DUP1 ISZERO %jumpi(failed_receipt)
|
||||
|
||||
process_receipt_after_status:
|
||||
// stack: status, new_cum_gas, txn_nb, retdest
|
||||
PUSH process_receipt_after_bloom
|
||||
%jump(logs_bloom)
|
||||
|
||||
process_receipt_after_bloom:
|
||||
// stack: status, new_cum_gas, txn_nb, retdest
|
||||
DUP2 DUP4
|
||||
// stack: txn_nb, new_cum_gas, status, new_cum_gas, txn_nb, retdest
|
||||
SWAP2
|
||||
// stack: status, new_cum_gas, txn_nb, new_cum_gas, txn_nb, retdest
|
||||
|
||||
// Compute the total RLP payload length of the receipt.
|
||||
PUSH 1 // status is always 1 byte.
|
||||
// stack: payload_len, status, new_cum_gas, txn_nb, new_cum_gas, txn_nb, retdest
|
||||
DUP3
|
||||
%rlp_scalar_len // cum_gas is a simple scalar.
|
||||
ADD
|
||||
// stack: payload_len, status, new_cum_gas, txn_nb, new_cum_gas, txn_nb, retdest
|
||||
// Next is the bloom_filter, which is a 256-byte array. Its RLP encoding is
|
||||
// 1 + 2 + 256 bytes.
|
||||
%add_const(259)
|
||||
// stack: payload_len, status, new_cum_gas, txn_nb, new_cum_gas, txn_nb, retdest
|
||||
// Last is the logs.
|
||||
%mload_global_metadata(@GLOBAL_METADATA_LOGS_PAYLOAD_LEN)
|
||||
%rlp_list_len
|
||||
ADD
|
||||
// stack: payload_len, status, new_cum_gas, txn_nb, new_cum_gas, txn_nb, retdest
|
||||
// Now we can write the receipt in MPT_TRIE_DATA.
|
||||
%get_trie_data_size
|
||||
// stack: receipt_ptr, payload_len, status, new_cum_gas, txn_nb, new_cum_gas, txn_nb, retdest
|
||||
// Write payload_len.
|
||||
SWAP1
|
||||
%append_to_trie_data
|
||||
// stack: receipt_ptr, status, new_cum_gas, txn_nb, new_cum_gas, txn_nb, retdest
|
||||
// Write status.
|
||||
SWAP1
|
||||
%append_to_trie_data
|
||||
// stack: receipt_ptr, new_cum_gas, txn_nb, new_cum_gas, txn_nb, retdest
|
||||
// Write cum_gas_used.
|
||||
SWAP1
|
||||
%append_to_trie_data
|
||||
// stack: receipt_ptr, txn_nb, new_cum_gas, txn_nb, retdest
|
||||
// Write Bloom filter.
|
||||
PUSH 256 // Bloom length.
|
||||
PUSH 0 PUSH @SEGMENT_TXN_BLOOM PUSH 0 // Bloom memory address.
|
||||
%get_trie_data_size PUSH @SEGMENT_TRIE_DATA PUSH 0 // MPT dest address.
|
||||
// stack: DST, SRC, 256, receipt_ptr, txn_nb, new_cum_gas, txn_nb, retdest
|
||||
%memcpy
|
||||
// stack: receipt_ptr, txn_nb, new_cum_gas, txn_nb, retdest
|
||||
// Update trie data size.
|
||||
%get_trie_data_size
|
||||
%add_const(256)
|
||||
%set_trie_data_size
|
||||
|
||||
// Now we write logs.
|
||||
// stack: receipt_ptr, txn_nb, new_cum_gas, txn_nb, retdest
|
||||
// We start with the logs payload length.
|
||||
%mload_global_metadata(@GLOBAL_METADATA_LOGS_PAYLOAD_LEN)
|
||||
%append_to_trie_data
|
||||
// stack: receipt_ptr, txn_nb, new_cum_gas, txn_nb, retdest
|
||||
%mload_global_metadata(@GLOBAL_METADATA_LOGS_LEN)
|
||||
// Then the number of logs.
|
||||
// stack: num_logs, receipt_ptr, txn_nb, new_cum_gas, txn_nb, retdest
|
||||
DUP1 %append_to_trie_data
|
||||
PUSH 0
|
||||
|
||||
// Each log is written in MPT_TRIE_DATA as:
|
||||
// [payload_len, address, num_topics, [topics], data_len, [data]].
|
||||
process_receipt_logs_loop:
|
||||
// stack: i, num_logs, receipt_ptr, txn_nb, new_cum_gas, txn_nb, retdest
|
||||
DUP2 DUP2
|
||||
EQ
|
||||
// stack: i == num_logs, i, num_logs, receipt_ptr, txn_nb, new_cum_gas, txn_nb, retdest
|
||||
%jumpi(process_receipt_after_write)
|
||||
// stack: i, num_logs, receipt_ptr, txn_nb, new_cum_gas, txn_nb, retdest
|
||||
DUP1
|
||||
%mload_kernel(@SEGMENT_LOGS)
|
||||
// stack: log_ptr, i, num_logs, receipt_ptr, txn_nb, new_cum_gas, txn_nb, retdest
|
||||
// Write payload_len.
|
||||
DUP1
|
||||
%mload_kernel(@SEGMENT_LOGS_DATA)
|
||||
%append_to_trie_data
|
||||
// stack: log_ptr, i, num_logs, receipt_ptr, txn_nb, new_cum_gas, txn_nb, retdest
|
||||
// Write address.
|
||||
%increment
|
||||
// stack: addr_ptr, i, num_logs, receipt_ptr, txn_nb, new_cum_gas, txn_nb, retdest
|
||||
DUP1
|
||||
%mload_kernel(@SEGMENT_LOGS_DATA)
|
||||
%append_to_trie_data
|
||||
// stack: addr_ptr, i, num_logs, receipt_ptr, txn_nb, new_cum_gas, txn_nb, retdest
|
||||
//Write num_topics.
|
||||
%increment
|
||||
// stack: num_topics_ptr, i, num_logs, receipt_ptr, txn_nb, new_cum_gas, txn_nb, retdest
|
||||
DUP1
|
||||
%mload_kernel(@SEGMENT_LOGS_DATA)
|
||||
// stack: num_topics, num_topics_ptr, i, num_logs, receipt_ptr, txn_nb, new_cum_gas, txn_nb, retdest
|
||||
DUP1
|
||||
%append_to_trie_data
|
||||
// stack: num_topics, num_topics_ptr, i, num_logs, receipt_ptr, txn_nb, new_cum_gas, txn_nb, retdest
|
||||
SWAP1 %increment SWAP1
|
||||
// stack: num_topics, topics_ptr, i, num_logs, receipt_ptr, txn_nb, new_cum_gas, txn_nb, retdest
|
||||
PUSH 0
|
||||
|
||||
process_receipt_topics_loop:
|
||||
// stack: j, num_topics, topics_ptr, i, num_logs, receipt_ptr, txn_nb, new_cum_gas, txn_nb, retdest
|
||||
DUP2 DUP2
|
||||
EQ
|
||||
// stack: j == num_topics, j, num_topics, topics_ptr, i, num_logs, receipt_ptr, txn_nb, new_cum_gas, txn_nb, retdest
|
||||
%jumpi(process_receipt_topics_end)
|
||||
// stack: j, num_topics, topics_ptr, i, num_logs, receipt_ptr, txn_nb, new_cum_gas, txn_nb, retdest
|
||||
// Write j-th topic.
|
||||
DUP3 DUP2
|
||||
ADD
|
||||
// stack: cur_topic_ptr, j, num_topics, topics_ptr, i, num_logs, receipt_ptr, txn_nb, new_cum_gas, txn_nb, retdest
|
||||
%mload_kernel(@SEGMENT_LOGS_DATA)
|
||||
%append_to_trie_data
|
||||
// stack: j, num_topics, topics_ptr, i, num_logs, receipt_ptr, txn_nb, new_cum_gas, txn_nb, retdest
|
||||
%increment
|
||||
%jump(process_receipt_topics_loop)
|
||||
|
||||
process_receipt_topics_end:
|
||||
// stack: num_topics, num_topics, topics_ptr, i, num_logs, receipt_ptr, txn_nb, new_cum_gas, txn_nb, retdest
|
||||
POP
|
||||
ADD
|
||||
// stack: data_len_ptr, i, num_logs, receipt_ptr, txn_nb, new_cum_gas, txn_nb, retdest
|
||||
// Write data_len
|
||||
DUP1
|
||||
%mload_kernel(@SEGMENT_LOGS_DATA)
|
||||
// stack: data_len, data_len_ptr, i, num_logs, receipt_ptr, txn_nb, new_cum_gas, txn_nb, retdest
|
||||
DUP1
|
||||
%append_to_trie_data
|
||||
// stack: data_len, data_len_ptr, i, num_logs, receipt_ptr, txn_nb, new_cum_gas, txn_nb, retdest
|
||||
SWAP1 %increment SWAP1
|
||||
// stack: data_len, data_ptr, i, num_logs, receipt_ptr, txn_nb, new_cum_gas, txn_nb, retdest
|
||||
PUSH 0
|
||||
|
||||
process_receipt_data_loop:
|
||||
// stack: j, data_len, data_ptr, i, num_logs, receipt_ptr, txn_nb, new_cum_gas, txn_nb, retdest
|
||||
DUP2 DUP2
|
||||
EQ
|
||||
// stack: j == data_len, j, data_len, data_ptr, i, num_logs, receipt_ptr, txn_nb, new_cum_gas, txn_nb, retdest
|
||||
%jumpi(process_receipt_data_end)
|
||||
// stack: j, data_len, data_ptr, i, num_logs, receipt_ptr, txn_nb, new_cum_gas, txn_nb, retdest
|
||||
// Write j-th data byte.
|
||||
DUP3 DUP2
|
||||
ADD
|
||||
// stack: cur_data_ptr, j, data_len, data_ptr, i, num_logs, receipt_ptr, txn_nb, new_cum_gas, txn_nb, retdest
|
||||
%mload_kernel(@SEGMENT_LOGS_DATA)
|
||||
%append_to_trie_data
|
||||
// stack: j, data_len, data_ptr, i, num_logs, receipt_ptr, txn_nb, new_cum_gas, txn_nb, retdest
|
||||
%increment
|
||||
%jump(process_receipt_data_loop)
|
||||
|
||||
process_receipt_data_end:
|
||||
// stack: data_len, data_len, data_ptr, i, num_logs, receipt_ptr, txn_nb, new_cum_gas, txn_nb, retdest
|
||||
%pop3
|
||||
%increment
|
||||
%jump(process_receipt_logs_loop)
|
||||
|
||||
process_receipt_after_write:
|
||||
// stack: num_logs, num_logs, receipt_ptr, txn_nb, new_cum_gas, txn_nb, retdest
|
||||
%pop2
|
||||
// stack: receipt_ptr, txn_nb, new_cum_gas, txn_nb, retdest
|
||||
SWAP1
|
||||
// stack: txn_nb, receipt_ptr, new_cum_gas, txn_nb, retdest
|
||||
%mpt_insert_receipt_trie
|
||||
// stack: new_cum_gas, txn_nb, retdest
|
||||
// Now, we set the Bloom filter back to 0.
|
||||
PUSH 0
|
||||
%rep 256
|
||||
// stack: counter, new_cum_gas, txn_nb, retdest
|
||||
PUSH 0 DUP2
|
||||
// stack: counter, 0, counter, new_cum_gas, txn_nb, retdest
|
||||
%mstore_kernel(@SEGMENT_TXN_BLOOM)
|
||||
// stack: counter, new_cum_gas, txn_nb, retdest
|
||||
%increment
|
||||
%endrep
|
||||
POP
|
||||
// stack: new_cum_gas, txn_nb, retdest
|
||||
%stack (new_cum_gas, txn_nb, retdest) -> (retdest, new_cum_gas, txn_nb)
|
||||
JUMP
|
||||
|
||||
failed_receipt:
|
||||
// stack: status, new_cum_gas, txn_nb
|
||||
// It is the receipt of a failed transaction, so set num_logs to 0. This will also lead to Bloom filter = 0.
|
||||
PUSH 0
|
||||
%mstore_global_metadata(@GLOBAL_METADATA_LOGS_LEN)
|
||||
// stack: status, new_cum_gas, txn_nb
|
||||
%jump(process_receipt_after_status)
|
||||
|
||||
%macro process_receipt
|
||||
// stack: success, leftover_gas, cur_cum_gas, txn_nb
|
||||
%stack (success, leftover_gas, cur_cum_gas, txn_nb) -> (success, leftover_gas, cur_cum_gas, txn_nb, %%after)
|
||||
%jump(process_receipt)
|
||||
%%after:
|
||||
%endmacro
|
||||
|
||||
%macro compute_cumulative_gas
|
||||
// stack: cur_cum_gas, leftover_gas
|
||||
DUP2
|
||||
// stack: leftover_gas, prev_cum_gas, leftover_gas
|
||||
%mload_txn_field(@TXN_FIELD_GAS_LIMIT)
|
||||
// stack: gas_limit, leftover_gas, prev_cum_gas, leftover_gas
|
||||
DUP2 DUP2 LT %jumpi(panic)
|
||||
// stack: gas_limit, leftover_gas, prev_cum_gas, leftover_gas
|
||||
SUB
|
||||
// stack: used_txn_gas, prev_cum_gas, leftover_gas
|
||||
ADD SWAP1 POP
|
||||
// stack: new_cum_gas
|
||||
%endmacro
|
||||
@ -1,5 +1,3 @@
|
||||
// TODO: Implement receipts
|
||||
|
||||
global sys_log0:
|
||||
%check_static
|
||||
// stack: kexit_info, offset, size
|
||||
@ -14,8 +12,10 @@ log0_after_mem_gas:
|
||||
DUP3 %mul_const(@GAS_LOGDATA) %add_const(@GAS_LOG)
|
||||
// stack: gas, kexit_info, offset, size
|
||||
%charge_gas
|
||||
%stack (kexit_info, offset, size) -> (kexit_info)
|
||||
EXIT_KERNEL
|
||||
%address
|
||||
PUSH 0
|
||||
%stack (zero, address, kexit_info, offset, size) -> (address, zero, size, offset, finish_sys_log, kexit_info)
|
||||
%jump(log_n_entry)
|
||||
|
||||
global sys_log1:
|
||||
%check_static
|
||||
@ -31,8 +31,10 @@ log1_after_mem_gas:
|
||||
DUP3 %mul_const(@GAS_LOGDATA) %add_const(@GAS_LOG) %add_const(@GAS_LOGTOPIC)
|
||||
// stack: gas, kexit_info, offset, size, topic
|
||||
%charge_gas
|
||||
%stack (kexit_info, offset, size, topic) -> (kexit_info)
|
||||
EXIT_KERNEL
|
||||
%address
|
||||
PUSH 1
|
||||
%stack (one, address, kexit_info, offset, size, topic) -> (address, one, topic, size, offset, finish_sys_log, kexit_info)
|
||||
%jump(log_n_entry)
|
||||
|
||||
global sys_log2:
|
||||
%check_static
|
||||
@ -48,8 +50,10 @@ log2_after_mem_gas:
|
||||
DUP3 %mul_const(@GAS_LOGDATA) %add_const(@GAS_LOG) %add_const(@GAS_LOGTOPIC) %add_const(@GAS_LOGTOPIC)
|
||||
// stack: gas, kexit_info, offset, size, topic1, topic2
|
||||
%charge_gas
|
||||
%stack (kexit_info, offset, size, topic1, topic2) -> (kexit_info)
|
||||
EXIT_KERNEL
|
||||
%address
|
||||
PUSH 2
|
||||
%stack (two, address, kexit_info, offset, size, topic1, topic2) -> (address, two, topic1, topic2, size, offset, finish_sys_log, kexit_info)
|
||||
%jump(log_n_entry)
|
||||
|
||||
global sys_log3:
|
||||
%check_static
|
||||
@ -65,8 +69,10 @@ log3_after_mem_gas:
|
||||
DUP3 %mul_const(@GAS_LOGDATA) %add_const(@GAS_LOG) %add_const(@GAS_LOGTOPIC) %add_const(@GAS_LOGTOPIC) %add_const(@GAS_LOGTOPIC)
|
||||
// stack: gas, kexit_info, offset, size, topic1, topic2, topic3
|
||||
%charge_gas
|
||||
%stack (kexit_info, offset, size, topic1, topic2, topic3) -> (kexit_info)
|
||||
EXIT_KERNEL
|
||||
%address
|
||||
PUSH 3
|
||||
%stack (three, address, kexit_info, offset, size, topic1, topic2, topic3) -> (address, three, topic1, topic2, topic3, size, offset, finish_sys_log, kexit_info)
|
||||
%jump(log_n_entry)
|
||||
|
||||
global sys_log4:
|
||||
%check_static
|
||||
@ -82,5 +88,178 @@ log4_after_mem_gas:
|
||||
DUP3 %mul_const(@GAS_LOGDATA) %add_const(@GAS_LOG) %add_const(@GAS_LOGTOPIC) %add_const(@GAS_LOGTOPIC) %add_const(@GAS_LOGTOPIC) %add_const(@GAS_LOGTOPIC)
|
||||
// stack: gas, kexit_info, offset, size, topic1, topic2, topic3, topic4
|
||||
%charge_gas
|
||||
%stack (kexit_info, offset, size, topic1, topic2, topic3, topic4) -> (kexit_info)
|
||||
%address
|
||||
PUSH 4
|
||||
%stack (four, address, kexit_info, offset, size, topic1, topic2, topic3, topic4) -> (address, four, topic1, topic2, topic3, topic4, size, offset, finish_sys_log, kexit_info)
|
||||
%jump(log_n_entry)
|
||||
|
||||
finish_sys_log:
|
||||
// stack: kexit_info
|
||||
EXIT_KERNEL
|
||||
|
||||
global log_n_entry:
|
||||
// stack: address, num_topics, topics, data_len, data_offset, retdest
|
||||
%mload_global_metadata(@GLOBAL_METADATA_LOGS_LEN)
|
||||
%mload_global_metadata(@GLOBAL_METADATA_LOGS_DATA_LEN)
|
||||
// stack: log_ptr, logs_len, address, num_topics, topics, data_len, data_offset, retdest
|
||||
DUP1 DUP3
|
||||
// stack: log_ptr, logs_len, log_ptr, logs_len, address, num_topics, topics, data_len, data_offset, retdest
|
||||
%mstore_kernel(@SEGMENT_LOGS)
|
||||
// stack: log_ptr, logs_len, address, num_topics, topics, data_len, data_offset, retdest
|
||||
SWAP1 %increment
|
||||
%mstore_global_metadata(@GLOBAL_METADATA_LOGS_LEN)
|
||||
// stack: log_ptr, address, num_topics, topics, data_len, data_offset, retdest
|
||||
%increment
|
||||
// stack: addr_ptr, address, num_topics, topics, data_len, data_offset, retdest
|
||||
// Store the address.
|
||||
DUP2 DUP2
|
||||
%mstore_kernel(@SEGMENT_LOGS_DATA)
|
||||
%increment
|
||||
// stack: num_topics_ptr, address, num_topics, topics, data_len, data_offset, retdest
|
||||
SWAP1 POP
|
||||
// stack: num_topics_ptr, num_topics, topics, data_len, data_offset, retdest
|
||||
// Store num_topics.
|
||||
DUP2 DUP2
|
||||
%mstore_kernel(@SEGMENT_LOGS_DATA)
|
||||
%increment
|
||||
// stack: topics_ptr, num_topics, topics, data_len, data_offset, retdest
|
||||
DUP2
|
||||
// stack: num_topics, topics_ptr, num_topics, topics, data_len, data_offset, retdest
|
||||
ISZERO
|
||||
%jumpi(log_after_topics)
|
||||
// stack: topics_ptr, num_topics, topics, data_len, data_offset, retdest
|
||||
// Store the first topic.
|
||||
DUP3 DUP2
|
||||
%mstore_kernel(@SEGMENT_LOGS_DATA)
|
||||
%increment
|
||||
%stack (curr_topic_ptr, num_topics, topic1) -> (curr_topic_ptr, num_topics)
|
||||
DUP2 %eq_const(1)
|
||||
%jumpi(log_after_topics)
|
||||
// stack: curr_topic_ptr, num_topics, remaining_topics, data_len, data_offset, retdest
|
||||
// Store the second topic.
|
||||
DUP3 DUP2
|
||||
%mstore_kernel(@SEGMENT_LOGS_DATA)
|
||||
%increment
|
||||
%stack (curr_topic_ptr, num_topics, topic2) -> (curr_topic_ptr, num_topics)
|
||||
DUP2 %eq_const(2)
|
||||
%jumpi(log_after_topics)
|
||||
// stack: curr_topic_ptr, num_topics, remaining_topics, data_len, data_offset, retdest
|
||||
// Store the third topic.
|
||||
DUP3 DUP2
|
||||
%mstore_kernel(@SEGMENT_LOGS_DATA)
|
||||
%increment
|
||||
%stack (curr_topic_ptr, num_topics, topic3) -> (curr_topic_ptr, num_topics)
|
||||
DUP2 %eq_const(3)
|
||||
%jumpi(log_after_topics)
|
||||
// stack: curr_topic_ptr, num_topics, remaining_topic, data_len, data_offset, retdest
|
||||
// Store the fourth topic.
|
||||
DUP3 DUP2
|
||||
%mstore_kernel(@SEGMENT_LOGS_DATA)
|
||||
%increment
|
||||
%stack (data_len_ptr, num_topics, topic4) -> (data_len_ptr, num_topics)
|
||||
DUP2 %eq_const(4)
|
||||
%jumpi(log_after_topics)
|
||||
// Invalid num_topics.
|
||||
PANIC
|
||||
|
||||
log_after_topics:
|
||||
// stack: data_len_ptr, num_topics, data_len, data_offset, retdest
|
||||
// Compute RLP length of the log.
|
||||
DUP3
|
||||
// stack: data_len, data_len_ptr, num_topics, data_len, data_offset, retdest
|
||||
DUP5 SWAP1
|
||||
%rlp_data_len
|
||||
// stack: rlp_data_len, data_len_ptr, num_topics, data_len, data_offset, retdest
|
||||
DUP3
|
||||
// stack: num_topics, rlp_data_len, data_len_ptr, num_topics, data_len, data_offset, retdest
|
||||
// Each topic is encoded with 1+32 bytes.
|
||||
%mul_const(33)
|
||||
%rlp_list_len
|
||||
// stack: rlp_topics_len, rlp_data_len, data_len_ptr, num_topics, data_len, data_offset, retdest
|
||||
ADD
|
||||
// The address is encoded with 1+20 bytes.
|
||||
%add_const(21)
|
||||
// stack: log_payload_len, data_len_ptr, num_topics, data_len, data_offset, retdest
|
||||
%mload_global_metadata(@GLOBAL_METADATA_LOGS_DATA_LEN)
|
||||
DUP2 SWAP1
|
||||
// stack: log_ptr, log_payload_len, log_payload_len, data_len_ptr, num_topics, data_len, data_offset, retdest
|
||||
%mstore_kernel(@SEGMENT_LOGS_DATA)
|
||||
// stack: log_payload_len, data_len_ptr, num_topics, data_len, data_offset, retdest
|
||||
%rlp_list_len
|
||||
// stack: rlp_log_len, data_len_ptr, num_topics, data_len, data_offset, retdest
|
||||
%mload_global_metadata(@GLOBAL_METADATA_LOGS_PAYLOAD_LEN)
|
||||
ADD
|
||||
%mstore_global_metadata(@GLOBAL_METADATA_LOGS_PAYLOAD_LEN)
|
||||
// stack: data_len_ptr, num_topics, data_len, data_offset, retdest
|
||||
// Store data_len.
|
||||
DUP3 DUP2
|
||||
%mstore_kernel(@SEGMENT_LOGS_DATA)
|
||||
%increment
|
||||
// stack: data_ptr, num_topics, data_len, data_offset, retdest
|
||||
SWAP1 POP
|
||||
// stack: data_ptr, data_len, data_offset, retdest
|
||||
DUP1 SWAP2
|
||||
// stack: data_len, data_ptr, data_ptr, data_offset, retdest
|
||||
ADD
|
||||
// stack: next_log_ptr, data_ptr, data_offset, retdest
|
||||
SWAP1
|
||||
// stack: data_ptr, next_log_ptr, data_offset, retdest
|
||||
|
||||
store_log_data_loop:
|
||||
// stack: cur_data_ptr, next_log_ptr, cur_data_offset, retdest
|
||||
DUP2 DUP2 EQ
|
||||
// stack: cur_data_ptr == next_log_ptr, cur_data_ptr, next_log_ptr, cur_data_offset, retdest
|
||||
%jumpi(store_log_data_loop_end)
|
||||
// stack: cur_data_ptr, next_log_ptr, cur_data_offset, retdest
|
||||
DUP3
|
||||
%mload_current(@SEGMENT_MAIN_MEMORY)
|
||||
// stack: cur_data, cur_data_ptr, next_log_ptr, cur_data_offset, retdest
|
||||
// Store current data byte.
|
||||
DUP2
|
||||
%mstore_kernel(@SEGMENT_LOGS_DATA)
|
||||
// stack: cur_data_ptr, next_log_ptr, cur_data_offset, retdest
|
||||
SWAP2 %increment SWAP2
|
||||
// stack: cur_data_ptr, next_log_ptr, next_data_offset, retdest
|
||||
%increment
|
||||
%jump(store_log_data_loop)
|
||||
|
||||
store_log_data_loop_end:
|
||||
// stack: cur_data_ptr, next_log_ptr, cur_data_offset, retdest
|
||||
POP
|
||||
%mstore_global_metadata(@GLOBAL_METADATA_LOGS_DATA_LEN)
|
||||
POP
|
||||
JUMP
|
||||
|
||||
rlp_data_len:
|
||||
// stack: data_len, data_ptr, retdest
|
||||
DUP1 ISZERO %jumpi(data_single_byte) // data will be encoded with a single byte
|
||||
DUP1 PUSH 1 EQ %jumpi(one_byte_data) // data is encoded with either 1 or 2 bytes
|
||||
// If we are here, data_len >= 2, and we can use rlp_list_len to determine the encoding length
|
||||
%rlp_list_len
|
||||
// stack: rlp_data_len, data_ptr, retdest
|
||||
SWAP1 POP SWAP1
|
||||
JUMP
|
||||
|
||||
data_single_byte:
|
||||
// stack: data_len, data_ptr, retdest
|
||||
%pop2
|
||||
PUSH 1
|
||||
SWAP1
|
||||
JUMP
|
||||
|
||||
one_byte_data:
|
||||
// stack: data_len, data_ptr, retdest
|
||||
DUP2
|
||||
%mload_current(@SEGMENT_MAIN_MEMORY)
|
||||
// stack: data_byte, data_len, data_ptr, retdest
|
||||
%lt_const(0x80) %jumpi(data_single_byte) // special byte that only requires one byte to be encoded
|
||||
%pop2
|
||||
PUSH 2 SWAP1
|
||||
JUMP
|
||||
|
||||
%macro rlp_data_len
|
||||
// stack: data_len, data_ptr
|
||||
%stack (data_len, data_ptr) -> (data_len, data_ptr, %%after)
|
||||
%jump(rlp_data_len)
|
||||
%%after:
|
||||
%endmacro
|
||||
|
||||
@ -4,7 +4,7 @@
|
||||
// TODO: Save checkpoints in @CTX_METADATA_STATE_TRIE_CHECKPOINT_PTR and @SEGMENT_STORAGE_TRIE_CHECKPOINT_PTRS.
|
||||
|
||||
// Pre stack: retdest
|
||||
// Post stack: (empty)
|
||||
// Post stack: success, leftover_gas
|
||||
global process_normalized_txn:
|
||||
// stack: retdest
|
||||
%compute_fees
|
||||
@ -29,16 +29,16 @@ global process_normalized_txn:
|
||||
// stack: sender, retdest
|
||||
|
||||
// Check that txn nonce matches account nonce.
|
||||
DUP1 %nonce
|
||||
DUP1 %eq_const(@MAX_NONCE) %assert_zero(invalid_txn) // EIP-2681
|
||||
DUP1 %nonce
|
||||
DUP1 %eq_const(@MAX_NONCE) %assert_zero(invalid_txn_2) // EIP-2681
|
||||
// stack: sender_nonce, sender, retdest
|
||||
%mload_txn_field(@TXN_FIELD_NONCE)
|
||||
// stack: tx_nonce, sender_nonce, sender, retdest
|
||||
%assert_eq(invalid_txn)
|
||||
%assert_eq(invalid_txn_1)
|
||||
// stack: sender, retdest
|
||||
|
||||
// Assert sender has no code.
|
||||
DUP1 %ext_code_empty %assert_nonzero(invalid_txn)
|
||||
DUP1 %ext_code_empty %assert_nonzero(invalid_txn_1)
|
||||
// stack: sender, retdest
|
||||
|
||||
// Assert sender balance >= gas_limit * gas_price + value.
|
||||
@ -182,40 +182,44 @@ global process_contract_creation_txn_after_code_loaded:
|
||||
|
||||
global process_contract_creation_txn_after_constructor:
|
||||
// stack: success, leftover_gas, new_ctx, address, retdest
|
||||
DUP1 POP // TODO: Success will go into the receipt when we support that.
|
||||
// We eventually return leftover_gas and success.
|
||||
%stack (success, leftover_gas, new_ctx, address, retdest) -> (success, leftover_gas, new_ctx, address, retdest, success)
|
||||
|
||||
ISZERO %jumpi(contract_creation_fault_3)
|
||||
|
||||
// EIP-3541: Reject new contract code starting with the 0xEF byte
|
||||
PUSH 0 %mload_current(@SEGMENT_RETURNDATA) %eq_const(0xEF) %jumpi(contract_creation_fault_3_zero_leftover)
|
||||
|
||||
// stack: leftover_gas, new_ctx, address, retdest
|
||||
// stack: leftover_gas, new_ctx, address, retdest, success
|
||||
%returndatasize // Size of the code.
|
||||
// stack: code_size, leftover_gas, new_ctx, address, retdest
|
||||
// stack: code_size, leftover_gas, new_ctx, address, retdest, success
|
||||
DUP1 %gt_const(@MAX_CODE_SIZE) %jumpi(contract_creation_fault_4)
|
||||
// stack: code_size, leftover_gas, new_ctx, address, retdest
|
||||
// stack: code_size, leftover_gas, new_ctx, address, retdest, success
|
||||
%mul_const(@GAS_CODEDEPOSIT) SWAP1
|
||||
// stack: leftover_gas, codedeposit_cost, new_ctx, address, retdest
|
||||
// stack: leftover_gas, codedeposit_cost, new_ctx, address, retdest, success
|
||||
DUP2 DUP2 LT %jumpi(contract_creation_fault_4)
|
||||
// stack: leftover_gas, codedeposit_cost, new_ctx, address, retdest
|
||||
// stack: leftover_gas, codedeposit_cost, new_ctx, address, retdest, success
|
||||
SUB
|
||||
|
||||
// Store the code hash of the new contract.
|
||||
// stack: leftover_gas, new_ctx, address, retdest
|
||||
// stack: leftover_gas, new_ctx, address, retdest, success
|
||||
GET_CONTEXT
|
||||
%returndatasize
|
||||
%stack (size, ctx) -> (ctx, @SEGMENT_RETURNDATA, 0, size) // context, segment, offset, len
|
||||
KECCAK_GENERAL
|
||||
// stack: codehash, leftover_gas, new_ctx, address, retdest
|
||||
// stack: codehash, leftover_gas, new_ctx, address, retdest, success
|
||||
%observe_new_contract
|
||||
DUP4
|
||||
// stack: address, codehash, leftover_gas, new_ctx, address, retdest
|
||||
// stack: address, codehash, leftover_gas, new_ctx, address, retdest, success
|
||||
%set_codehash
|
||||
|
||||
// stack: leftover_gas, new_ctx, address, retdest
|
||||
%stack (leftover_gas, new_ctx, address, retdest, success) -> (leftover_gas, new_ctx, address, retdest, success, leftover_gas)
|
||||
%pay_coinbase_and_refund_sender
|
||||
// stack: leftover_gas', new_ctx, address, retdest, success, leftover_gas
|
||||
SWAP5 POP
|
||||
%delete_all_touched_addresses
|
||||
%delete_all_selfdestructed_addresses
|
||||
// stack: new_ctx, address, retdest
|
||||
// stack: new_ctx, address, retdest, success, leftover_gas
|
||||
POP
|
||||
POP
|
||||
JUMP
|
||||
@ -258,10 +262,17 @@ global process_message_txn_return:
|
||||
// stack: retdest
|
||||
// Since no code was executed, the leftover gas is the non-intrinsic gas.
|
||||
%non_intrinisic_gas
|
||||
// stack: leftover_gas, retdest
|
||||
DUP1
|
||||
// stack: leftover_gas, leftover_gas, retdest
|
||||
%pay_coinbase_and_refund_sender
|
||||
// stack: leftover_gas', leftover_gas, retdest
|
||||
SWAP1 POP
|
||||
%delete_all_touched_addresses
|
||||
// stack: retdest
|
||||
// stack: leftover_gas', retdest
|
||||
SWAP1
|
||||
PUSH 1 // success
|
||||
SWAP1
|
||||
// stack: retdest, success, leftover_gas
|
||||
JUMP
|
||||
|
||||
global process_message_txn_code_loaded:
|
||||
@ -291,19 +302,22 @@ process_message_txn_code_loaded_finish:
|
||||
|
||||
global process_message_txn_after_call:
|
||||
// stack: success, leftover_gas, new_ctx, retdest
|
||||
DUP1 POP // TODO: Success will go into the receipt when we support that.
|
||||
// We will return leftover_gas and success.
|
||||
%stack (success, leftover_gas, new_ctx, retdest) -> (success, leftover_gas, new_ctx, retdest, success, leftover_gas)
|
||||
ISZERO %jumpi(process_message_txn_fail)
|
||||
process_message_txn_after_call_contd:
|
||||
// stack: leftover_gas, new_ctx, retdest
|
||||
// stack: leftover_gas, new_ctx, retdest, success, leftover_gas
|
||||
%pay_coinbase_and_refund_sender
|
||||
// stack: leftover_gas', new_ctx, retdest, success, leftover_gas
|
||||
SWAP4 POP
|
||||
%delete_all_touched_addresses
|
||||
%delete_all_selfdestructed_addresses
|
||||
// stack: new_ctx, retdest
|
||||
// stack: new_ctx, retdest, success, leftover_gas
|
||||
POP
|
||||
JUMP
|
||||
|
||||
process_message_txn_fail:
|
||||
// stack: leftover_gas, new_ctx, retdest
|
||||
// stack: leftover_gas, new_ctx, retdest, success, leftover_gas
|
||||
// Transfer value back to the caller.
|
||||
%mload_txn_field(@TXN_FIELD_VALUE) ISZERO %jumpi(process_message_txn_after_call_contd)
|
||||
%mload_txn_field(@TXN_FIELD_VALUE)
|
||||
@ -340,15 +354,16 @@ process_message_txn_fail:
|
||||
// stack: coinbase, used_gas_tip, leftover_gas'
|
||||
%add_eth
|
||||
// stack: leftover_gas'
|
||||
DUP1
|
||||
|
||||
// Refund gas to the origin.
|
||||
%mload_txn_field(@TXN_FIELD_COMPUTED_FEE_PER_GAS)
|
||||
MUL
|
||||
// stack: leftover_gas_cost
|
||||
// stack: leftover_gas_cost, leftover_gas'
|
||||
%mload_txn_field(@TXN_FIELD_ORIGIN)
|
||||
// stack: origin, leftover_gas_cost
|
||||
// stack: origin, leftover_gas_cost, leftover_gas'
|
||||
%add_eth
|
||||
// stack: (empty)
|
||||
// stack: leftover_gas'
|
||||
%endmacro
|
||||
|
||||
// Sets @TXN_FIELD_MAX_FEE_PER_GAS and @TXN_FIELD_MAX_PRIORITY_FEE_PER_GAS.
|
||||
@ -358,9 +373,9 @@ process_message_txn_fail:
|
||||
%mload_txn_field(@TXN_FIELD_MAX_PRIORITY_FEE_PER_GAS)
|
||||
%mload_txn_field(@TXN_FIELD_MAX_FEE_PER_GAS)
|
||||
// stack: max_fee, max_priority_fee, base_fee
|
||||
DUP3 DUP2 %assert_ge(invalid_txn) // Assert max_fee >= base_fee
|
||||
DUP3 DUP2 %assert_ge(invalid_txn_3) // Assert max_fee >= base_fee
|
||||
// stack: max_fee, max_priority_fee, base_fee
|
||||
DUP2 DUP2 %assert_ge(invalid_txn) // Assert max_fee >= max_priority_fee
|
||||
DUP2 DUP2 %assert_ge(invalid_txn_3) // Assert max_fee >= max_priority_fee
|
||||
%stack (max_fee, max_priority_fee, base_fee) -> (max_fee, base_fee, max_priority_fee, base_fee)
|
||||
SUB
|
||||
// stack: max_fee - base_fee, max_priority_fee, base_fee
|
||||
@ -386,41 +401,73 @@ create_contract_account_fault:
|
||||
%revert_checkpoint
|
||||
// stack: address, retdest
|
||||
POP
|
||||
PUSH 0 // leftover gas
|
||||
PUSH 0 // leftover_gas
|
||||
// stack: leftover_gas, retdest
|
||||
%pay_coinbase_and_refund_sender
|
||||
// stack: leftover_gas', retdest
|
||||
%delete_all_touched_addresses
|
||||
%delete_all_selfdestructed_addresses
|
||||
// stack: leftover_gas', retdest
|
||||
SWAP1 PUSH 0 // success
|
||||
// stack: success, retdest, leftover_gas
|
||||
SWAP1
|
||||
JUMP
|
||||
|
||||
contract_creation_fault_3:
|
||||
%revert_checkpoint
|
||||
// stack: leftover_gas, new_ctx, address, retdest
|
||||
%stack (leftover_gas, new_ctx, address, retdest) -> (leftover_gas, retdest)
|
||||
%stack (leftover_gas, new_ctx, address, retdest, success) -> (leftover_gas, retdest, success)
|
||||
%pay_coinbase_and_refund_sender
|
||||
// stack: leftover_gas', retdest, success
|
||||
%delete_all_touched_addresses
|
||||
%delete_all_selfdestructed_addresses
|
||||
%stack (leftover_gas, retdest, success) -> (retdest, success, leftover_gas)
|
||||
JUMP
|
||||
|
||||
contract_creation_fault_3_zero_leftover:
|
||||
%revert_checkpoint
|
||||
// stack: leftover_gas, new_ctx, address, retdest
|
||||
// stack: leftover_gas, new_ctx, address, retdest, success
|
||||
%pop3
|
||||
PUSH 0 // leftover gas
|
||||
// stack: leftover_gas, retdest, success
|
||||
%pay_coinbase_and_refund_sender
|
||||
%delete_all_touched_addresses
|
||||
%delete_all_selfdestructed_addresses
|
||||
%stack (leftover_gas, retdest, success) -> (retdest, success, leftover_gas)
|
||||
JUMP
|
||||
|
||||
contract_creation_fault_4:
|
||||
%revert_checkpoint
|
||||
// stack: code_size, leftover_gas, new_ctx, address, retdest
|
||||
// stack: code_size/leftover_gas, leftover_gas/codedeposit_cost, new_ctx, address, retdest, success
|
||||
%pop4
|
||||
PUSH 0 // leftover gas
|
||||
// stack: leftover_gas, retdest, success
|
||||
%pay_coinbase_and_refund_sender
|
||||
%delete_all_touched_addresses
|
||||
%delete_all_selfdestructed_addresses
|
||||
%stack (leftover_gas, retdest, success) -> (retdest, success, leftover_gas)
|
||||
JUMP
|
||||
|
||||
|
||||
global invalid_txn:
|
||||
%jump(txn_loop)
|
||||
POP
|
||||
%mload_txn_field(@TXN_FIELD_GAS_LIMIT)
|
||||
PUSH 0
|
||||
%jump(txn_loop_after)
|
||||
|
||||
global invalid_txn_1:
|
||||
%pop2
|
||||
%mload_txn_field(@TXN_FIELD_GAS_LIMIT)
|
||||
PUSH 0
|
||||
%jump(txn_loop_after)
|
||||
|
||||
global invalid_txn_2:
|
||||
%pop3
|
||||
%mload_txn_field(@TXN_FIELD_GAS_LIMIT)
|
||||
PUSH 0
|
||||
%jump(txn_loop_after)
|
||||
|
||||
global invalid_txn_3:
|
||||
%pop4
|
||||
%mload_txn_field(@TXN_FIELD_GAS_LIMIT)
|
||||
PUSH 0
|
||||
%jump(txn_loop_after)
|
||||
|
||||
@ -11,16 +11,33 @@ global hash_initial_tries:
|
||||
%mpt_hash_txn_trie %mload_global_metadata(@GLOBAL_METADATA_TXN_TRIE_DIGEST_BEFORE) %assert_eq
|
||||
%mpt_hash_receipt_trie %mload_global_metadata(@GLOBAL_METADATA_RECEIPT_TRIE_DIGEST_BEFORE) %assert_eq
|
||||
|
||||
global txn_loop:
|
||||
global start_txns:
|
||||
// stack: (empty)
|
||||
// Last mpt input is txn_nb.
|
||||
PROVER_INPUT(mpt)
|
||||
PUSH 0
|
||||
// stack: init_used_gas, txn_nb
|
||||
|
||||
txn_loop:
|
||||
// If the prover has no more txns for us to process, halt.
|
||||
PROVER_INPUT(end_of_txns)
|
||||
%jumpi(hash_final_tries)
|
||||
|
||||
// Call route_txn. When we return, continue the txn loop.
|
||||
PUSH txn_loop
|
||||
PUSH txn_loop_after
|
||||
// stack: retdest, prev_used_gas, txn_nb
|
||||
%jump(route_txn)
|
||||
|
||||
global txn_loop_after:
|
||||
// stack: success, leftover_gas, cur_cum_gas, txn_nb
|
||||
%process_receipt
|
||||
// stack: new_cum_gas, txn_nb
|
||||
SWAP1 %increment SWAP1
|
||||
%jump(txn_loop)
|
||||
|
||||
global hash_final_tries:
|
||||
// stack: cum_gas, txn_nb
|
||||
%pop2
|
||||
%mpt_hash_state_trie %mload_global_metadata(@GLOBAL_METADATA_STATE_TRIE_DIGEST_AFTER) %assert_eq
|
||||
%mpt_hash_txn_trie %mload_global_metadata(@GLOBAL_METADATA_TXN_TRIE_DIGEST_AFTER) %assert_eq
|
||||
%mpt_hash_receipt_trie %mload_global_metadata(@GLOBAL_METADATA_RECEIPT_TRIE_DIGEST_AFTER) %assert_eq
|
||||
|
||||
@ -99,8 +99,140 @@ global encode_account:
|
||||
global encode_txn:
|
||||
PANIC // TODO
|
||||
|
||||
// We assume a receipt in memory is stored as:
|
||||
// [payload_len, status, cum_gas_used, bloom, logs_payload_len, num_logs, [logs]].
|
||||
// A log is [payload_len, address, num_topics, [topics], data_len, [data]].
|
||||
// TODO: support type >0 receipts.
|
||||
global encode_receipt:
|
||||
PANIC // TODO
|
||||
// stack: rlp_pos, value_ptr, retdest
|
||||
// There is a double encoding! What we compute is:
|
||||
// RLP(RLP(receipt)).
|
||||
// First encode the wrapper prefix.
|
||||
DUP2 %mload_trie_data
|
||||
%rlp_list_len
|
||||
// stack: rlp_receipt_len, rlp_pos, value_ptr, retdest
|
||||
SWAP1 %encode_rlp_multi_byte_string_prefix
|
||||
// stack: rlp_pos, value_ptr, retdest
|
||||
// Then encode the receipt prefix.
|
||||
DUP2 %mload_trie_data
|
||||
// stack: payload_len, rlp_pos, value_ptr, retdest
|
||||
SWAP1 %encode_rlp_list_prefix
|
||||
// stack: rlp_pos, value_ptr, retdest
|
||||
// Encode status.
|
||||
DUP2 %increment %mload_trie_data
|
||||
// stack: status, rlp_pos, value_ptr, retdest
|
||||
SWAP1 %encode_rlp_scalar
|
||||
// stack: rlp_pos, value_ptr, retdest
|
||||
// Encode cum_gas_used.
|
||||
DUP2 %add_const(2) %mload_trie_data
|
||||
// stack: cum_gas_used, rlp_pos, value_ptr, retdest
|
||||
SWAP1 %encode_rlp_scalar
|
||||
// stack: rlp_pos, value_ptr, retdest
|
||||
// Encode bloom.
|
||||
PUSH 256 // Bloom length.
|
||||
DUP3 %add_const(3) PUSH @SEGMENT_TRIE_DATA PUSH 0 // MPT src address.
|
||||
DUP5
|
||||
// stack: rlp_pos, SRC, 256, rlp_pos, value_ptr, retdest
|
||||
%encode_rlp_string
|
||||
// stack: rlp_pos, old_rlp_pos, value_ptr, retdest
|
||||
SWAP1 POP
|
||||
// stack: rlp_pos, value_ptr, retdest
|
||||
// Encode logs prefix.
|
||||
DUP2 %add_const(259) %mload_trie_data
|
||||
// stack: logs_payload_len, rlp_pos, value_ptr, retdest
|
||||
SWAP1 %encode_rlp_list_prefix
|
||||
// stack: rlp_pos, value_ptr, retdest
|
||||
DUP2 %add_const(261)
|
||||
// stack: logs_ptr, rlp_pos, value_ptr, retdest
|
||||
DUP3 %add_const(260) %mload_trie_data
|
||||
// stack: num_logs, logs_ptr, rlp_pos, value_ptr, retdest
|
||||
PUSH 0
|
||||
|
||||
encode_receipt_logs_loop:
|
||||
// stack: i, num_logs, current_log_ptr, rlp_pos, value_ptr, retdest
|
||||
DUP2 DUP2 EQ
|
||||
// stack: i == num_logs, i, num_logs, current_log_ptr, rlp_pos, value_ptr, retdest
|
||||
%jumpi(encode_receipt_end)
|
||||
// stack: i, num_logs, current_log_ptr, rlp_pos, value_ptr, retdest
|
||||
DUP3 DUP5
|
||||
// stack: rlp_pos, current_log_ptr, i, num_logs, current_log_ptr, old_rlp_pos, value_ptr, retdest
|
||||
// Encode log prefix.
|
||||
DUP2 %mload_trie_data
|
||||
// stack: payload_len, rlp_pos, current_log_ptr, i, num_logs, current_log_ptr, old_rlp_pos, value_ptr, retdest
|
||||
SWAP1 %encode_rlp_list_prefix
|
||||
// stack: rlp_pos, current_log_ptr, i, num_logs, current_log_ptr, old_rlp_pos, value_ptr, retdest
|
||||
// Encode address.
|
||||
DUP2 %increment %mload_trie_data
|
||||
// stack: address, rlp_pos, current_log_ptr, i, num_logs, current_log_ptr, old_rlp_pos, value_ptr, retdest
|
||||
SWAP1 %encode_rlp_160
|
||||
// stack: rlp_pos, current_log_ptr, i, num_logs, current_log_ptr, old_rlp_pos, value_ptr, retdest
|
||||
DUP2 %add_const(2) %mload_trie_data
|
||||
// stack: num_topics, rlp_pos, current_log_ptr, i, num_logs, current_log_ptr, old_rlp_pos, value_ptr, retdest
|
||||
// Encode topics prefix.
|
||||
DUP1 %mul_const(33)
|
||||
// stack: topics_payload_len, num_topics, rlp_pos, current_log_ptr, i, num_logs, current_log_ptr, old_rlp_pos, value_ptr, retdest
|
||||
DUP3 %encode_rlp_list_prefix
|
||||
// stack: new_rlp_pos, num_topics, rlp_pos, current_log_ptr, i, num_logs, current_log_ptr, old_rlp_pos, value_ptr, retdest
|
||||
SWAP2 POP
|
||||
// stack: num_topics, rlp_pos, current_log_ptr, i, num_logs, current_log_ptr, old_rlp_pos, value_ptr, retdest
|
||||
SWAP2 %add_const(3)
|
||||
// stack: topics_ptr, rlp_pos, num_topics, i, num_logs, current_log_ptr, old_rlp_pos, value_ptr, retdest
|
||||
PUSH 0
|
||||
|
||||
encode_receipt_topics_loop:
|
||||
// stack: j, topics_ptr, rlp_pos, num_topics, i, num_logs, current_log_ptr, old_rlp_pos, value_ptr, retdest
|
||||
DUP4 DUP2 EQ
|
||||
// stack: j == num_topics, j, topics_ptr, rlp_pos, num_topics, i, num_logs, current_log_ptr, old_rlp_pos, value_ptr, retdest
|
||||
%jumpi(encode_receipt_topics_end)
|
||||
// stack: j, topics_ptr, rlp_pos, num_topics, i, num_logs, current_log_ptr, old_rlp_pos, value_ptr, retdest
|
||||
DUP2 DUP2 ADD
|
||||
%mload_trie_data
|
||||
// stack: current_topic, j, topics_ptr, rlp_pos, num_topics, i, num_logs, current_log_ptr, old_rlp_pos, value_ptr, retdest
|
||||
DUP4
|
||||
// stack: rlp_pos, current_topic, j, topics_ptr, rlp_pos, num_topics, i, num_logs, current_log_ptr, old_rlp_pos, value_ptr, retdest
|
||||
%encode_rlp_256
|
||||
// stack: new_rlp_pos, j, topics_ptr, rlp_pos, num_topics, i, num_logs, current_log_ptr, old_rlp_pos, value_ptr, retdest
|
||||
SWAP3 POP
|
||||
// stack: j, topics_ptr, new_rlp_pos, num_topics, i, num_logs, current_log_ptr, old_rlp_pos, value_ptr, retdest
|
||||
%increment
|
||||
%jump(encode_receipt_topics_loop)
|
||||
|
||||
encode_receipt_topics_end:
|
||||
// stack: num_topics, topics_ptr, rlp_pos, num_topics, i, num_logs, current_log_ptr, old_rlp_pos, value_ptr, retdest
|
||||
ADD
|
||||
// stack: data_len_ptr, rlp_pos, num_topics, i, num_logs, current_log_ptr, old_rlp_pos, value_ptr, retdest
|
||||
SWAP5 POP
|
||||
// stack: rlp_pos, num_topics, i, num_logs, data_len_ptr, old_rlp_pos, value_ptr, retdest
|
||||
SWAP5 POP
|
||||
// stack: num_topics, i, num_logs, data_len_ptr, rlp_pos, value_ptr, retdest
|
||||
POP
|
||||
// stack: i, num_logs, data_len_ptr, rlp_pos, value_ptr, retdest
|
||||
// Encode data prefix.
|
||||
DUP3 %mload_trie_data
|
||||
// stack: data_len, i, num_logs, data_len_ptr, rlp_pos, value_ptr, retdest
|
||||
DUP4 %increment DUP2 ADD
|
||||
// stack: next_log_ptr, data_len, i, num_logs, data_len_ptr, rlp_pos, value_ptr, retdest
|
||||
SWAP4 %increment
|
||||
// stack: data_ptr, data_len, i, num_logs, next_log_ptr, rlp_pos, value_ptr, retdest
|
||||
PUSH @SEGMENT_TRIE_DATA PUSH 0
|
||||
// stack: SRC, data_len, i, num_logs, next_log_ptr, rlp_pos, value_ptr, retdest
|
||||
DUP8
|
||||
// stack: rlp_pos, SRC, data_len, i, num_logs, next_log_ptr, rlp_pos, value_ptr, retdest
|
||||
%encode_rlp_string
|
||||
// stack: new_rlp_pos, i, num_logs, next_log_ptr, rlp_pos, value_ptr, retdest
|
||||
SWAP4 POP
|
||||
// stack: i, num_logs, next_log_ptr, new_rlp_pos, value_ptr, retdest
|
||||
%increment
|
||||
%jump(encode_receipt_logs_loop)
|
||||
|
||||
encode_receipt_end:
|
||||
// stack: num_logs, num_logs, current_log_ptr, rlp_pos, value_ptr, retdest
|
||||
%pop3
|
||||
// stack: rlp_pos, value_ptr, retdest
|
||||
SWAP1 POP
|
||||
// stack: rlp_pos, retdest
|
||||
SWAP1
|
||||
JUMP
|
||||
|
||||
global encode_storage_value:
|
||||
// stack: rlp_pos, value_ptr, retdest
|
||||
@ -114,3 +246,4 @@ global encode_storage_value:
|
||||
// stack: rlp_pos', retdest
|
||||
SWAP1
|
||||
JUMP
|
||||
|
||||
|
||||
@ -22,3 +22,53 @@ mpt_insert_state_trie_save:
|
||||
%jump(mpt_insert_state_trie)
|
||||
%%after:
|
||||
%endmacro
|
||||
|
||||
global mpt_insert_receipt_trie:
|
||||
// stack: scalar, value_ptr, retdest
|
||||
%stack (scalar, value_ptr)
|
||||
-> (scalar, value_ptr, mpt_insert_receipt_trie_save)
|
||||
// The key is the RLP encoding of scalar.
|
||||
%scalar_to_rlp
|
||||
// stack: key, value_ptr, mpt_insert_receipt_trie_save, retdest
|
||||
DUP1
|
||||
%num_bytes %mul_const(2)
|
||||
// stack: num_nibbles, key, value_ptr, mpt_insert_receipt_trie_save, retdest
|
||||
%mload_global_metadata(@GLOBAL_METADATA_RECEIPT_TRIE_ROOT)
|
||||
// stack: receipt_root_ptr, num_nibbles, key, value_ptr, mpt_insert_receipt_trie_save, retdest
|
||||
%jump(mpt_insert)
|
||||
mpt_insert_receipt_trie_save:
|
||||
// stack: updated_node_ptr, retdest
|
||||
%mstore_global_metadata(@GLOBAL_METADATA_RECEIPT_TRIE_ROOT)
|
||||
JUMP
|
||||
|
||||
%macro mpt_insert_receipt_trie
|
||||
%stack (key, value_ptr) -> (key, value_ptr, %%after)
|
||||
%jump(mpt_insert_receipt_trie)
|
||||
%%after:
|
||||
%endmacro
|
||||
|
||||
// Pre stack: scalar, retdest
|
||||
// Post stack: rlp_scalar
|
||||
// We will make use of %encode_rlp_scalar, which clobbers RlpRaw.
|
||||
// We're not hashing tries yet, so it's not an issue.
|
||||
global scalar_to_rlp:
|
||||
// stack: scalar, retdest
|
||||
PUSH 0
|
||||
// stack: pos, scalar, retdest
|
||||
%encode_rlp_scalar
|
||||
// stack: pos', retdest
|
||||
// Now our rlp_encoding is in RlpRaw in the first pos' cells.
|
||||
DUP1 // len of the key
|
||||
PUSH 0 PUSH @SEGMENT_RLP_RAW PUSH 0 // address where we get the key from
|
||||
%mload_packing
|
||||
// stack: packed_key, pos', retdest
|
||||
SWAP1 POP
|
||||
// stack: key, retdest
|
||||
SWAP1
|
||||
JUMP
|
||||
|
||||
%macro scalar_to_rlp
|
||||
%stack (scalar) -> (scalar, %%after)
|
||||
%jump(scalar_to_rlp)
|
||||
%%after:
|
||||
%endmacro
|
||||
|
||||
@ -33,7 +33,89 @@ global mpt_load_txn_trie_value:
|
||||
|
||||
global mpt_load_receipt_trie_value:
|
||||
// stack: retdest
|
||||
PANIC // TODO
|
||||
|
||||
// Load payload_len.
|
||||
PROVER_INPUT(mpt) %append_to_trie_data
|
||||
// Load status.
|
||||
PROVER_INPUT(mpt) %append_to_trie_data
|
||||
// Load cum_gas_used.
|
||||
PROVER_INPUT(mpt) %append_to_trie_data
|
||||
// Load bloom.
|
||||
%rep 256
|
||||
PROVER_INPUT(mpt) %append_to_trie_data
|
||||
%endrep
|
||||
// Load logs_payload_len.
|
||||
PROVER_INPUT(mpt) %append_to_trie_data
|
||||
// Load num_logs.
|
||||
PROVER_INPUT(mpt)
|
||||
DUP1
|
||||
%append_to_trie_data
|
||||
// stack: num_logs, retdest
|
||||
// Load logs.
|
||||
PUSH 0
|
||||
|
||||
mpt_load_receipt_trie_value_logs_loop:
|
||||
// stack: i, num_logs, retdest
|
||||
DUP2 DUP2 EQ
|
||||
// stack: i == num_logs, i, num_logs, retdest
|
||||
%jumpi(mpt_load_receipt_trie_value_end)
|
||||
// stack: i, num_logs, retdest
|
||||
// Load log_payload_len.
|
||||
PROVER_INPUT(mpt) %append_to_trie_data
|
||||
// Load address.
|
||||
PROVER_INPUT(mpt) %append_to_trie_data
|
||||
// Load num_topics.
|
||||
PROVER_INPUT(mpt)
|
||||
DUP1
|
||||
%append_to_trie_data
|
||||
// stack: num_topics, i, num_logs, retdest
|
||||
// Load topics.
|
||||
PUSH 0
|
||||
|
||||
mpt_load_receipt_trie_value_topics_loop:
|
||||
// stack: j, num_topics, i, num_logs, retdest
|
||||
DUP2 DUP2 EQ
|
||||
// stack: j == num_topics, j, num_topics, i, num_logs, retdest
|
||||
%jumpi(mpt_load_receipt_trie_value_topics_end)
|
||||
// stack: j, num_topics, i, num_logs, retdest
|
||||
// Load topic.
|
||||
PROVER_INPUT(mpt) %append_to_trie_data
|
||||
%increment
|
||||
%jump(mpt_load_receipt_trie_value_topics_loop)
|
||||
|
||||
mpt_load_receipt_trie_value_topics_end:
|
||||
// stack: num_topics, num_topics, i, num_logs, retdest
|
||||
%pop2
|
||||
// stack: i, num_logs, retdest
|
||||
// Load data_len.
|
||||
PROVER_INPUT(mpt)
|
||||
DUP1
|
||||
%append_to_trie_data
|
||||
// stack: data_len, i, num_logs, retdest
|
||||
// Load data.
|
||||
PUSH 0
|
||||
|
||||
mpt_load_receipt_trie_value_data_loop:
|
||||
// stack: j, data_len, i, num_logs, retdest
|
||||
DUP2 DUP2 EQ
|
||||
// stack: j == data_len, j, data_len, i, num_logs, retdest
|
||||
%jumpi(mpt_load_receipt_trie_value_data_end)
|
||||
// stack: j, data_len, i, num_logs, retdest
|
||||
// Load data byte.
|
||||
PROVER_INPUT(mpt) %append_to_trie_data
|
||||
%increment
|
||||
%jump(mpt_load_receipt_trie_value_data_loop)
|
||||
|
||||
mpt_load_receipt_trie_value_data_end:
|
||||
// stack: data_len, data_len, i, num_logs, retdest
|
||||
%pop2
|
||||
%increment
|
||||
%jump(mpt_load_receipt_trie_value_logs_loop)
|
||||
|
||||
mpt_load_receipt_trie_value_end:
|
||||
// stack: num_logs, num_logs, retdest
|
||||
%pop2
|
||||
JUMP
|
||||
|
||||
global mpt_load_storage_trie_value:
|
||||
// stack: retdest
|
||||
|
||||
@ -72,3 +72,9 @@ global encode_rlp_string_large_after_writing_len:
|
||||
%stack (pos3, pos2, ADDR: 3, len, retdest)
|
||||
-> (0, @SEGMENT_RLP_RAW, pos2, ADDR, len, retdest, pos3)
|
||||
%jump(memcpy)
|
||||
|
||||
%macro encode_rlp_string
|
||||
%stack (pos, ADDR: 3, len) -> (pos, ADDR, len, %%after)
|
||||
%jump(encode_rlp_string)
|
||||
%%after:
|
||||
%endmacro
|
||||
|
||||
@ -21,36 +21,38 @@ pub(crate) enum GlobalMetadata {
|
||||
ReceiptTrieRoot = 6,
|
||||
|
||||
// The root digests of each Merkle trie before these transactions.
|
||||
StateTrieRootDigestBefore = 8,
|
||||
TransactionTrieRootDigestBefore = 9,
|
||||
ReceiptTrieRootDigestBefore = 10,
|
||||
StateTrieRootDigestBefore = 7,
|
||||
TransactionTrieRootDigestBefore = 8,
|
||||
ReceiptTrieRootDigestBefore = 9,
|
||||
|
||||
// The root digests of each Merkle trie after these transactions.
|
||||
StateTrieRootDigestAfter = 11,
|
||||
TransactionTrieRootDigestAfter = 12,
|
||||
ReceiptTrieRootDigestAfter = 13,
|
||||
StateTrieRootDigestAfter = 10,
|
||||
TransactionTrieRootDigestAfter = 11,
|
||||
ReceiptTrieRootDigestAfter = 12,
|
||||
|
||||
/// The sizes of the `TrieEncodedChild` and `TrieEncodedChildLen` buffers. In other words, the
|
||||
/// next available offset in these buffers.
|
||||
TrieEncodedChildSize = 14,
|
||||
TrieEncodedChildSize = 13,
|
||||
|
||||
// Block metadata.
|
||||
BlockBeneficiary = 15,
|
||||
BlockTimestamp = 16,
|
||||
BlockNumber = 17,
|
||||
BlockDifficulty = 18,
|
||||
BlockGasLimit = 19,
|
||||
BlockChainId = 20,
|
||||
BlockBaseFee = 21,
|
||||
BlockBeneficiary = 14,
|
||||
BlockTimestamp = 15,
|
||||
BlockNumber = 16,
|
||||
BlockDifficulty = 17,
|
||||
BlockGasLimit = 18,
|
||||
BlockChainId = 19,
|
||||
BlockBaseFee = 20,
|
||||
|
||||
/// Gas to refund at the end of the transaction.
|
||||
RefundCounter = 22,
|
||||
RefundCounter = 21,
|
||||
/// Length of the addresses access list.
|
||||
AccessedAddressesLen = 23,
|
||||
AccessedAddressesLen = 22,
|
||||
/// Length of the storage keys access list.
|
||||
AccessedStorageKeysLen = 24,
|
||||
AccessedStorageKeysLen = 23,
|
||||
/// Length of the self-destruct list.
|
||||
SelfDestructListLen = 25,
|
||||
SelfDestructListLen = 24,
|
||||
/// Length of the bloom entry buffer.
|
||||
BloomEntryLen = 25,
|
||||
|
||||
/// Length of the journal.
|
||||
JournalLen = 26,
|
||||
@ -69,10 +71,14 @@ pub(crate) enum GlobalMetadata {
|
||||
ContractCreation = 33,
|
||||
IsPrecompileFromEoa = 34,
|
||||
CallStackDepth = 35,
|
||||
/// Transaction logs list length
|
||||
LogsLen = 36,
|
||||
LogsDataLen = 37,
|
||||
LogsPayloadLen = 38,
|
||||
}
|
||||
|
||||
impl GlobalMetadata {
|
||||
pub(crate) const COUNT: usize = 35;
|
||||
pub(crate) const COUNT: usize = 39;
|
||||
|
||||
pub(crate) fn all() -> [Self; Self::COUNT] {
|
||||
[
|
||||
@ -101,6 +107,7 @@ impl GlobalMetadata {
|
||||
Self::AccessedAddressesLen,
|
||||
Self::AccessedStorageKeysLen,
|
||||
Self::SelfDestructListLen,
|
||||
Self::BloomEntryLen,
|
||||
Self::JournalLen,
|
||||
Self::JournalDataLen,
|
||||
Self::CurrentCheckpoint,
|
||||
@ -111,6 +118,9 @@ impl GlobalMetadata {
|
||||
Self::ContractCreation,
|
||||
Self::IsPrecompileFromEoa,
|
||||
Self::CallStackDepth,
|
||||
Self::LogsLen,
|
||||
Self::LogsDataLen,
|
||||
Self::LogsPayloadLen,
|
||||
]
|
||||
}
|
||||
|
||||
@ -142,6 +152,7 @@ impl GlobalMetadata {
|
||||
Self::AccessedAddressesLen => "GLOBAL_METADATA_ACCESSED_ADDRESSES_LEN",
|
||||
Self::AccessedStorageKeysLen => "GLOBAL_METADATA_ACCESSED_STORAGE_KEYS_LEN",
|
||||
Self::SelfDestructListLen => "GLOBAL_METADATA_SELFDESTRUCT_LIST_LEN",
|
||||
Self::BloomEntryLen => "GLOBAL_METADATA_BLOOM_ENTRY_LEN",
|
||||
Self::JournalLen => "GLOBAL_METADATA_JOURNAL_LEN",
|
||||
Self::JournalDataLen => "GLOBAL_METADATA_JOURNAL_DATA_LEN",
|
||||
Self::CurrentCheckpoint => "GLOBAL_METADATA_CURRENT_CHECKPOINT",
|
||||
@ -152,6 +163,9 @@ impl GlobalMetadata {
|
||||
Self::ContractCreation => "GLOBAL_METADATA_CONTRACT_CREATION",
|
||||
Self::IsPrecompileFromEoa => "GLOBAL_METADATA_IS_PRECOMPILE_FROM_EOA",
|
||||
Self::CallStackDepth => "GLOBAL_METADATA_CALL_STACK_DEPTH",
|
||||
Self::LogsLen => "GLOBAL_METADATA_LOGS_LEN",
|
||||
Self::LogsDataLen => "GLOBAL_METADATA_LOGS_DATA_LEN",
|
||||
Self::LogsPayloadLen => "GLOBAL_METADATA_LOGS_PAYLOAD_LEN",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -225,6 +225,10 @@ impl<'a> Interpreter<'a> {
|
||||
.content = memory;
|
||||
}
|
||||
|
||||
pub(crate) fn set_memory_segment(&mut self, segment: Segment, memory: Vec<U256>) {
|
||||
self.generation_state.memory.contexts[0].segments[segment as usize].content = memory;
|
||||
}
|
||||
|
||||
pub(crate) fn set_memory_segment_bytes(&mut self, segment: Segment, memory: Vec<u8>) {
|
||||
self.generation_state.memory.contexts[0].segments[segment as usize].content =
|
||||
memory.into_iter().map(U256::from).collect();
|
||||
@ -1020,7 +1024,8 @@ impl<'a> Interpreter<'a> {
|
||||
fn run_mload_general(&mut self) {
|
||||
let context = self.pop().as_usize();
|
||||
let segment = Segment::all()[self.pop().as_usize()];
|
||||
let offset = self.pop().as_usize();
|
||||
let offset_u256 = self.pop();
|
||||
let offset = offset_u256.as_usize();
|
||||
let value = self
|
||||
.generation_state
|
||||
.memory
|
||||
|
||||
198
evm/src/cpu/kernel/tests/log.rs
Normal file
198
evm/src/cpu/kernel/tests/log.rs
Normal file
@ -0,0 +1,198 @@
|
||||
use anyhow::Result;
|
||||
use ethereum_types::{Address, U256};
|
||||
use rand::{thread_rng, Rng};
|
||||
|
||||
use crate::cpu::kernel::aggregator::KERNEL;
|
||||
use crate::cpu::kernel::constants::global_metadata::GlobalMetadata;
|
||||
use crate::cpu::kernel::interpreter::Interpreter;
|
||||
use crate::memory::segments::Segment;
|
||||
|
||||
#[test]
|
||||
fn test_log_0() -> Result<()> {
|
||||
let logs_entry = KERNEL.global_labels["log_n_entry"];
|
||||
let address: Address = thread_rng().gen();
|
||||
let num_topics = U256::from(0);
|
||||
let data_len = U256::from(0);
|
||||
let data_offset = U256::from(0);
|
||||
|
||||
let retdest = 0xDEADBEEFu32.into();
|
||||
|
||||
let initial_stack = vec![
|
||||
retdest,
|
||||
data_offset,
|
||||
data_len,
|
||||
num_topics,
|
||||
U256::from_big_endian(&address.to_fixed_bytes()),
|
||||
];
|
||||
|
||||
let mut interpreter = Interpreter::new_with_kernel(logs_entry, initial_stack);
|
||||
interpreter.set_global_metadata_field(GlobalMetadata::LogsLen, 0.into());
|
||||
interpreter.set_global_metadata_field(GlobalMetadata::LogsDataLen, 0.into());
|
||||
|
||||
interpreter.run()?;
|
||||
|
||||
// The address is encoded in 1+20 bytes. There are no topics or data, so each is encoded in 1 byte. This leads to a payload of 23.
|
||||
let payload_len = 23;
|
||||
assert_eq!(
|
||||
interpreter.get_memory_segment(Segment::LogsData),
|
||||
[
|
||||
payload_len.into(),
|
||||
U256::from_big_endian(&address.to_fixed_bytes()),
|
||||
0.into(),
|
||||
0.into(),
|
||||
]
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_log_2() -> Result<()> {
|
||||
let logs_entry = KERNEL.global_labels["log_n_entry"];
|
||||
let address: Address = thread_rng().gen();
|
||||
let num_topics = U256::from(2);
|
||||
let topics = vec![4.into(), 5.into()];
|
||||
let data_len = U256::from(3);
|
||||
let data_offset = U256::from(0);
|
||||
|
||||
let memory = vec![10.into(), 20.into(), 30.into()];
|
||||
|
||||
let retdest = 0xDEADBEEFu32.into();
|
||||
|
||||
let initial_stack = vec![
|
||||
retdest,
|
||||
data_offset,
|
||||
data_len,
|
||||
topics[1],
|
||||
topics[0],
|
||||
num_topics,
|
||||
U256::from_big_endian(&address.to_fixed_bytes()),
|
||||
];
|
||||
|
||||
let mut interpreter = Interpreter::new_with_kernel(logs_entry, initial_stack);
|
||||
interpreter.set_global_metadata_field(GlobalMetadata::LogsLen, 2.into());
|
||||
interpreter.set_global_metadata_field(GlobalMetadata::LogsDataLen, 5.into());
|
||||
|
||||
interpreter.set_memory_segment(Segment::MainMemory, memory);
|
||||
|
||||
interpreter.run()?;
|
||||
assert_eq!(
|
||||
interpreter.get_memory_segment(Segment::Logs),
|
||||
[0.into(), 0.into(), 5.into(),]
|
||||
);
|
||||
|
||||
// The data has length 3 bytes, and is encoded in 4 bytes. Each of the two topics is encoded in 1+32 bytes. The prefix for the topics list requires 2 bytes. The address is encoded in 1+20 bytes. Overall, we have a logs payload length of 93 bytes.
|
||||
let payload_len = 93;
|
||||
assert_eq!(
|
||||
interpreter.get_memory_segment(Segment::LogsData),
|
||||
[
|
||||
0.into(),
|
||||
0.into(),
|
||||
0.into(),
|
||||
0.into(),
|
||||
0.into(),
|
||||
payload_len.into(),
|
||||
U256::from_big_endian(&address.to_fixed_bytes()),
|
||||
2.into(),
|
||||
4.into(),
|
||||
5.into(),
|
||||
3.into(),
|
||||
10.into(),
|
||||
20.into(),
|
||||
30.into(),
|
||||
]
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_log_4() -> Result<()> {
|
||||
let logs_entry = KERNEL.global_labels["log_n_entry"];
|
||||
let address: Address = thread_rng().gen();
|
||||
let num_topics = U256::from(4);
|
||||
let topics = vec![45.into(), 46.into(), 47.into(), 48.into()];
|
||||
let data_len = U256::from(1);
|
||||
let data_offset = U256::from(2);
|
||||
|
||||
let memory = vec![0.into(), 0.into(), 123.into()];
|
||||
|
||||
let retdest = 0xDEADBEEFu32.into();
|
||||
|
||||
let initial_stack = vec![
|
||||
retdest,
|
||||
data_offset,
|
||||
data_len,
|
||||
topics[3],
|
||||
topics[2],
|
||||
topics[1],
|
||||
topics[0],
|
||||
num_topics,
|
||||
U256::from_big_endian(&address.to_fixed_bytes()),
|
||||
];
|
||||
|
||||
let mut interpreter = Interpreter::new_with_kernel(logs_entry, initial_stack);
|
||||
interpreter.set_global_metadata_field(GlobalMetadata::LogsLen, 2.into());
|
||||
interpreter.set_global_metadata_field(GlobalMetadata::LogsDataLen, 5.into());
|
||||
|
||||
interpreter.set_memory_segment(Segment::MainMemory, memory);
|
||||
|
||||
interpreter.run()?;
|
||||
assert_eq!(
|
||||
interpreter.get_memory_segment(Segment::Logs),
|
||||
[0.into(), 0.into(), 5.into(),]
|
||||
);
|
||||
|
||||
// The data is of length 1 byte, and is encoded in 1 byte. Each of the four topics is encoded in 1+32 bytes. The topics list is prefixed by 2 bytes. The address is encoded in 1+20 bytes. Overall, this leads to a log payload length of 156.
|
||||
let payload_len = 156;
|
||||
assert_eq!(
|
||||
interpreter.get_memory_segment(Segment::LogsData),
|
||||
[
|
||||
0.into(),
|
||||
0.into(),
|
||||
0.into(),
|
||||
0.into(),
|
||||
0.into(),
|
||||
payload_len.into(),
|
||||
U256::from_big_endian(&address.to_fixed_bytes()),
|
||||
4.into(),
|
||||
45.into(),
|
||||
46.into(),
|
||||
47.into(),
|
||||
48.into(),
|
||||
1.into(),
|
||||
123.into(),
|
||||
]
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_log_5() -> Result<()> {
|
||||
let logs_entry = KERNEL.global_labels["log_n_entry"];
|
||||
let address: Address = thread_rng().gen();
|
||||
let num_topics = U256::from(5);
|
||||
let topics = vec![1.into(), 2.into(), 3.into(), 4.into(), 5.into()];
|
||||
let data_len = U256::from(0);
|
||||
let data_offset = U256::from(0);
|
||||
|
||||
let retdest = 0xDEADBEEFu32.into();
|
||||
|
||||
let initial_stack = vec![
|
||||
retdest,
|
||||
data_offset,
|
||||
data_len,
|
||||
topics[4],
|
||||
topics[3],
|
||||
topics[2],
|
||||
topics[1],
|
||||
topics[0],
|
||||
num_topics,
|
||||
U256::from_big_endian(&address.to_fixed_bytes()),
|
||||
];
|
||||
|
||||
let mut interpreter = Interpreter::new_with_kernel(logs_entry, initial_stack);
|
||||
interpreter.set_global_metadata_field(GlobalMetadata::LogsLen, 0.into());
|
||||
interpreter.set_global_metadata_field(GlobalMetadata::LogsDataLen, 0.into());
|
||||
|
||||
assert!(interpreter.run().is_err());
|
||||
Ok(())
|
||||
}
|
||||
@ -8,8 +8,10 @@ mod core;
|
||||
mod ecc;
|
||||
mod exp;
|
||||
mod hash;
|
||||
mod log;
|
||||
mod mpt;
|
||||
mod packing;
|
||||
mod receipt;
|
||||
mod rlp;
|
||||
mod signed_syscalls;
|
||||
mod transaction_parsing;
|
||||
|
||||
595
evm/src/cpu/kernel/tests/receipt.rs
Normal file
595
evm/src/cpu/kernel/tests/receipt.rs
Normal file
@ -0,0 +1,595 @@
|
||||
use anyhow::Result;
|
||||
use ethereum_types::{Address, U256};
|
||||
use hex_literal::hex;
|
||||
use keccak_hash::keccak;
|
||||
use rand::{thread_rng, Rng};
|
||||
|
||||
use crate::cpu::kernel::aggregator::KERNEL;
|
||||
use crate::cpu::kernel::constants::global_metadata::GlobalMetadata;
|
||||
use crate::cpu::kernel::constants::txn_fields::NormalizedTxnField;
|
||||
use crate::cpu::kernel::interpreter::Interpreter;
|
||||
use crate::generation::mpt::{all_mpt_prover_inputs_reversed, LegacyReceiptRlp, LogRlp};
|
||||
use crate::memory::segments::Segment;
|
||||
|
||||
#[test]
|
||||
fn test_process_receipt() -> Result<()> {
|
||||
/* Tests process_receipt, which:
|
||||
- computes the cumulative gas
|
||||
- computes the bloom filter
|
||||
- inserts the receipt data in MPT_TRIE_DATA
|
||||
- inserts a node in receipt_trie
|
||||
- resets the bloom filter to 0 for the next transaction. */
|
||||
let process_receipt = KERNEL.global_labels["process_receipt"];
|
||||
let success = U256::from(1);
|
||||
let leftover_gas = U256::from(4000);
|
||||
let prev_cum_gas = U256::from(1000);
|
||||
let retdest = 0xDEADBEEFu32.into();
|
||||
|
||||
// Log.
|
||||
let address: Address = thread_rng().gen();
|
||||
let num_topics = 1;
|
||||
|
||||
let mut topic = vec![0_u8; 32];
|
||||
topic[31] = 4;
|
||||
|
||||
// Compute the expected Bloom filter.
|
||||
let test_logs_list = vec![(address.to_fixed_bytes().to_vec(), vec![topic])];
|
||||
let expected_bloom = logs_bloom_bytes_fn(test_logs_list).to_vec();
|
||||
|
||||
// Set memory.
|
||||
let initial_stack = vec![retdest, 0.into(), prev_cum_gas, leftover_gas, success];
|
||||
let mut interpreter = Interpreter::new_with_kernel(process_receipt, initial_stack);
|
||||
interpreter.set_memory_segment(
|
||||
Segment::LogsData,
|
||||
vec![
|
||||
56.into(), // payload len
|
||||
U256::from_big_endian(&address.to_fixed_bytes()), // address
|
||||
num_topics.into(), // num_topics
|
||||
4.into(), // topic
|
||||
0.into(), // data_len
|
||||
],
|
||||
);
|
||||
interpreter.set_txn_field(NormalizedTxnField::GasLimit, U256::from(5000));
|
||||
interpreter.set_memory_segment(Segment::TxnBloom, vec![0.into(); 256]);
|
||||
interpreter.set_memory_segment(Segment::BlockBloom, vec![0.into(); 256]);
|
||||
interpreter.set_memory_segment(Segment::Logs, vec![0.into()]);
|
||||
interpreter.set_global_metadata_field(GlobalMetadata::LogsPayloadLen, 58.into());
|
||||
interpreter.set_global_metadata_field(GlobalMetadata::LogsLen, U256::from(1));
|
||||
interpreter.set_global_metadata_field(GlobalMetadata::ReceiptTrieRoot, 500.into());
|
||||
interpreter.run()?;
|
||||
|
||||
let segment_read = interpreter.get_memory_segment(Segment::TrieData);
|
||||
|
||||
// The expected TrieData has the form [payload_len, status, cum_gas_used, bloom_filter, logs_payload_len, num_logs, [logs]]
|
||||
let mut expected_trie_data: Vec<U256> = vec![323.into(), success, 2000.into()];
|
||||
expected_trie_data.extend(
|
||||
expected_bloom
|
||||
.into_iter()
|
||||
.map(|elt| elt.into())
|
||||
.collect::<Vec<U256>>(),
|
||||
);
|
||||
expected_trie_data.push(58.into()); // logs_payload_len
|
||||
expected_trie_data.push(1.into()); // num_logs
|
||||
expected_trie_data.extend(vec![
|
||||
56.into(), // payload len
|
||||
U256::from_big_endian(&address.to_fixed_bytes()), // address
|
||||
num_topics.into(), // num_topics
|
||||
4.into(), // topic
|
||||
0.into(), // data_len
|
||||
]);
|
||||
|
||||
assert_eq!(
|
||||
expected_trie_data,
|
||||
segment_read[0..expected_trie_data.len()]
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Values taken from the block 1000000 of Goerli: https://goerli.etherscan.io/txs?block=1000000
|
||||
#[test]
|
||||
fn test_receipt_encoding() -> Result<()> {
|
||||
// Initialize interpreter.
|
||||
let success = U256::from(1);
|
||||
|
||||
let retdest = 0xDEADBEEFu32.into();
|
||||
let num_topics = 3;
|
||||
|
||||
let encode_receipt = KERNEL.global_labels["encode_receipt"];
|
||||
|
||||
// Logs and receipt in encodable form.
|
||||
let log_1 = LogRlp {
|
||||
address: hex!("7ef66b77759e12Caf3dDB3E4AFF524E577C59D8D").into(),
|
||||
topics: vec![
|
||||
hex!("8a22ee899102a366ac8ad0495127319cb1ff2403cfae855f83a89cda1266674d").into(),
|
||||
hex!("0000000000000000000000000000000000000000000000000000000000000004").into(),
|
||||
hex!("00000000000000000000000000000000000000000000000000000000004920ea").into(),
|
||||
],
|
||||
data: hex!("a814f7df6a2203dc0e472e8828be95957c6b329fee8e2b1bb6f044c1eb4fc243")
|
||||
.to_vec()
|
||||
.into(),
|
||||
};
|
||||
|
||||
let receipt_1 = LegacyReceiptRlp {
|
||||
status: true,
|
||||
cum_gas_used: 0x02dcb6u64.into(),
|
||||
bloom: hex!("00000000000000000000000000000000000000000000000000800000000000000040000000001000000000000000000000000000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000008000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001000000400000000000000000000000000000002000040000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000000000000008000000000000000000000000").to_vec().into(),
|
||||
logs: vec![log_1],
|
||||
};
|
||||
// Get the expected RLP encoding.
|
||||
let expected_rlp = rlp::encode(&rlp::encode(&receipt_1));
|
||||
|
||||
let initial_stack = vec![retdest, 0.into(), 0.into()];
|
||||
let mut interpreter = Interpreter::new_with_kernel(encode_receipt, initial_stack);
|
||||
|
||||
// Write data to memory.
|
||||
let expected_bloom_bytes = vec![
|
||||
00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00,
|
||||
00, 00, 0x80, 00, 00, 00, 00, 00, 00, 00, 0x40, 00, 00, 00, 00, 0x10, 00, 00, 00, 00, 00,
|
||||
00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00,
|
||||
00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 0x02, 00, 00, 00, 00, 00, 00, 00,
|
||||
00, 00, 00, 00, 00, 00, 00, 0x08, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00,
|
||||
00, 00, 00, 00, 00, 00, 0x01, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00,
|
||||
00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00,
|
||||
00, 00, 00, 00, 00, 00, 00, 00, 00, 0x01, 00, 00, 00, 0x40, 00, 00, 00, 00, 00, 00, 00, 00,
|
||||
00, 00, 00, 00, 00, 00, 00, 0x20, 00, 0x04, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00,
|
||||
00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 0x80, 00, 00, 00, 00, 00, 00, 00, 00, 00,
|
||||
00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 0x08,
|
||||
00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00,
|
||||
];
|
||||
let expected_bloom: Vec<U256> = expected_bloom_bytes
|
||||
.into_iter()
|
||||
.map(|elt| elt.into())
|
||||
.collect();
|
||||
|
||||
let addr = U256::from([
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0x7e, 0xf6, 0x6b, 0x77, 0x75, 0x9e, 0x12, 0xca, 0xf3,
|
||||
0xdd, 0xb3, 0xe4, 0xaf, 0xf5, 0x24, 0xe5, 0x77, 0xc5, 0x9d, 0x8d,
|
||||
]);
|
||||
|
||||
let topic1 = U256::from([
|
||||
0x8a, 0x22, 0xee, 0x89, 0x91, 0x02, 0xa3, 0x66, 0xac, 0x8a, 0xd0, 0x49, 0x51, 0x27, 0x31,
|
||||
0x9c, 0xb1, 0xff, 0x24, 0x03, 0xcf, 0xae, 0x85, 0x5f, 0x83, 0xa8, 0x9c, 0xda, 0x12, 0x66,
|
||||
0x67, 0x4d,
|
||||
]);
|
||||
|
||||
let topic2 = 4.into();
|
||||
let topic3 = 0x4920ea.into();
|
||||
|
||||
let mut logs = vec![
|
||||
155.into(), // unused
|
||||
addr,
|
||||
num_topics.into(), // num_topics
|
||||
topic1, // topic1
|
||||
topic2, // topic2
|
||||
topic3, // topic3
|
||||
32.into(), // data length
|
||||
];
|
||||
let cur_data = hex!("a814f7df6a2203dc0e472e8828be95957c6b329fee8e2b1bb6f044c1eb4fc243")
|
||||
.iter()
|
||||
.copied()
|
||||
.map(U256::from);
|
||||
logs.extend(cur_data);
|
||||
|
||||
let mut receipt = vec![423.into(), success, receipt_1.cum_gas_used];
|
||||
receipt.extend(expected_bloom.clone());
|
||||
receipt.push(157.into()); // logs_payload_len
|
||||
receipt.push(1.into()); // num_logs
|
||||
receipt.extend(logs.clone());
|
||||
interpreter.set_memory_segment(Segment::LogsData, logs);
|
||||
|
||||
interpreter.set_memory_segment(Segment::TxnBloom, expected_bloom);
|
||||
|
||||
interpreter.set_memory_segment(Segment::Logs, vec![0.into()]);
|
||||
interpreter.set_global_metadata_field(GlobalMetadata::LogsLen, 1.into());
|
||||
interpreter.set_global_metadata_field(GlobalMetadata::LogsPayloadLen, 157.into());
|
||||
interpreter.set_memory_segment(Segment::TrieData, receipt);
|
||||
|
||||
interpreter.run()?;
|
||||
let rlp_pos = interpreter.pop();
|
||||
|
||||
let rlp_read: Vec<u8> = interpreter.get_rlp_memory();
|
||||
|
||||
assert_eq!(rlp_pos.as_usize(), expected_rlp.len());
|
||||
for i in 0..rlp_read.len() {
|
||||
assert_eq!(rlp_read[i], expected_rlp[i]);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Values taken from the block 1000000 of Goerli: https://goerli.etherscan.io/txs?block=1000000
|
||||
#[test]
|
||||
fn test_receipt_bloom_filter() -> Result<()> {
|
||||
let logs_bloom = KERNEL.global_labels["logs_bloom"];
|
||||
|
||||
let num_topics = 3;
|
||||
|
||||
// Expected bloom
|
||||
let first_bloom_bytes = vec![
|
||||
00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00,
|
||||
00, 00, 0x80, 00, 00, 00, 00, 00, 00, 00, 0x40, 00, 00, 00, 00, 0x50, 00, 00, 00, 00, 00,
|
||||
00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00,
|
||||
00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 0x02, 00, 00, 00, 00, 00, 00, 00,
|
||||
00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00,
|
||||
00, 00, 00, 00, 00, 00, 00, 00, 00, 0x08, 00, 0x08, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00,
|
||||
00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00,
|
||||
00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 0x50, 00, 00, 00, 00, 00, 00, 00, 00, 00,
|
||||
00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00,
|
||||
00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 0x10,
|
||||
00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 0x20, 00, 00, 00, 00, 00, 0x08, 00, 00,
|
||||
00, 00, 00, 00, 00, 00, 00, 00, 00, 00,
|
||||
];
|
||||
|
||||
let retdest = 0xDEADBEEFu32.into();
|
||||
|
||||
let addr = U256::from([
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0x7e, 0xf6, 0x6b, 0x77, 0x75, 0x9e, 0x12, 0xca, 0xf3,
|
||||
0xdd, 0xb3, 0xe4, 0xaf, 0xf5, 0x24, 0xe5, 0x77, 0xc5, 0x9d, 0x8d,
|
||||
]);
|
||||
|
||||
let topic1 = U256::from([
|
||||
0x8a, 0x22, 0xee, 0x89, 0x91, 0x02, 0xa3, 0x66, 0xac, 0x8a, 0xd0, 0x49, 0x51, 0x27, 0x31,
|
||||
0x9c, 0xb1, 0xff, 0x24, 0x03, 0xcf, 0xae, 0x85, 0x5f, 0x83, 0xa8, 0x9c, 0xda, 0x12, 0x66,
|
||||
0x67, 0x4d,
|
||||
]);
|
||||
|
||||
let topic02 = 0x2a.into();
|
||||
let topic03 = 0xbd9fe6.into();
|
||||
|
||||
// Set logs memory and initialize TxnBloom and BlockBloom segments.
|
||||
let initial_stack = vec![retdest];
|
||||
|
||||
let mut interpreter = Interpreter::new_with_kernel(logs_bloom, initial_stack);
|
||||
let mut logs = vec![
|
||||
0.into(), // unused
|
||||
addr,
|
||||
num_topics.into(), // num_topics
|
||||
topic1, // topic1
|
||||
topic02, // topic2
|
||||
topic03, // topic3
|
||||
32.into(), // data_len
|
||||
];
|
||||
let cur_data = hex!("a814f7df6a2203dc0e472e8828be95957c6b329fee8e2b1bb6f044c1eb4fc243")
|
||||
.iter()
|
||||
.copied()
|
||||
.map(U256::from);
|
||||
logs.extend(cur_data);
|
||||
// The Bloom filter initialization is required for this test to ensure we have the correct length for the filters. Otherwise, some trailing zeroes could be missing.
|
||||
interpreter.set_memory_segment(Segment::TxnBloom, vec![0.into(); 256]); // Initialize transaction Bloom filter.
|
||||
interpreter.set_memory_segment(Segment::BlockBloom, vec![0.into(); 256]); // Initialize block Bloom filter.
|
||||
interpreter.set_memory_segment(Segment::LogsData, logs);
|
||||
interpreter.set_memory_segment(Segment::Logs, vec![0.into()]);
|
||||
interpreter.set_global_metadata_field(GlobalMetadata::LogsLen, U256::from(1));
|
||||
interpreter.run()?;
|
||||
|
||||
// Second transaction.
|
||||
let loaded_bloom_u256 = interpreter.get_memory_segment(Segment::TxnBloom);
|
||||
let loaded_bloom: Vec<u8> = loaded_bloom_u256
|
||||
.into_iter()
|
||||
.map(|elt| elt.0[0] as u8)
|
||||
.collect();
|
||||
|
||||
assert_eq!(first_bloom_bytes, loaded_bloom);
|
||||
let topic12 = 0x4.into();
|
||||
let topic13 = 0x4920ea.into();
|
||||
let mut logs2 = vec![
|
||||
0.into(), // unused
|
||||
addr,
|
||||
num_topics.into(), // num_topics
|
||||
topic1, // topic1
|
||||
topic12, // topic2
|
||||
topic13, // topic3
|
||||
32.into(), // data_len
|
||||
];
|
||||
let cur_data = hex!("a814f7df6a2203dc0e472e8828be95957c6b329fee8e2b1bb6f044c1eb4fc243")
|
||||
.iter()
|
||||
.copied()
|
||||
.map(U256::from);
|
||||
logs2.extend(cur_data);
|
||||
|
||||
interpreter.push(retdest);
|
||||
interpreter.generation_state.registers.program_counter = logs_bloom;
|
||||
interpreter.set_memory_segment(Segment::TxnBloom, vec![0.into(); 256]); // Initialize transaction Bloom filter.
|
||||
interpreter.set_memory_segment(Segment::LogsData, logs2);
|
||||
interpreter.set_memory_segment(Segment::Logs, vec![0.into()]);
|
||||
interpreter.set_global_metadata_field(GlobalMetadata::LogsLen, U256::from(1));
|
||||
interpreter.run()?;
|
||||
|
||||
let second_bloom_bytes = vec![
|
||||
00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00,
|
||||
00, 00, 0x80, 00, 00, 00, 00, 00, 00, 00, 0x40, 00, 00, 00, 00, 0x10, 00, 00, 00, 00, 00,
|
||||
00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00,
|
||||
00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 0x02, 00, 00, 00, 00, 00, 00, 00,
|
||||
00, 00, 00, 00, 00, 00, 00, 0x08, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00,
|
||||
00, 00, 00, 00, 00, 00, 0x01, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00,
|
||||
00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00,
|
||||
00, 00, 00, 00, 00, 00, 00, 00, 00, 0x01, 00, 00, 00, 0x40, 00, 00, 00, 00, 00, 00, 00, 00,
|
||||
00, 00, 00, 00, 00, 00, 00, 0x20, 00, 0x04, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00,
|
||||
00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 0x80, 00, 00, 00, 00, 00, 00, 00, 00, 00,
|
||||
00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 0x08,
|
||||
00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00,
|
||||
];
|
||||
|
||||
let second_loaded_bloom_u256 = interpreter.get_memory_segment(Segment::TxnBloom);
|
||||
let second_loaded_bloom: Vec<u8> = second_loaded_bloom_u256
|
||||
.into_iter()
|
||||
.map(|elt| elt.0[0] as u8)
|
||||
.collect();
|
||||
|
||||
assert_eq!(second_bloom_bytes, second_loaded_bloom);
|
||||
|
||||
// Check the final block Bloom.
|
||||
let block_bloom = hex!("00000000000000000000000000000000000000000000000000800000000000000040000000005000000000000000000000000000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000008000000000000000000000000000000000000000001000000080008000000000000000000000000000000000000000000000000000000000000000000000000000000000001000000500000000000000000000000000000002000040000000000000000000000000000000000000000000000008000000000000000000000100000000000000000000000000020000000000008000000000000000000000000").to_vec();
|
||||
let loaded_block_bloom: Vec<u8> = interpreter
|
||||
.get_memory_segment(Segment::BlockBloom)
|
||||
.into_iter()
|
||||
.map(|elt| elt.0[0] as u8)
|
||||
.collect();
|
||||
|
||||
assert_eq!(block_bloom, loaded_block_bloom);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_mpt_insert_receipt() -> Result<()> {
|
||||
// This test simulates a receipt processing to test `mpt_insert_receipt_trie`.
|
||||
// For this, we need to set the data correctly in memory.
|
||||
// In TrieData, we need to insert a receipt of the form:
|
||||
// `[payload_len, status, cum_gas_used, bloom, logs_payload_len, num_logs, [logs]]`.
|
||||
// We also need to set TrieDataSize correctly.
|
||||
|
||||
let retdest = 0xDEADBEEFu32.into();
|
||||
let trie_inputs = Default::default();
|
||||
let load_all_mpts = KERNEL.global_labels["load_all_mpts"];
|
||||
let mpt_insert = KERNEL.global_labels["mpt_insert_receipt_trie"];
|
||||
let num_topics = 3; // Both transactions have the same number of topics.
|
||||
let payload_len = 423; // Total payload length for each receipt.
|
||||
let logs_payload_len = 157; // Payload length for all logs.
|
||||
let log_payload_len = 155; // Payload length for one log.
|
||||
let num_logs = 1;
|
||||
|
||||
// Receipt_0:
|
||||
let status_0 = 1;
|
||||
let cum_gas_used_0 = 0x016e5b;
|
||||
let logs_bloom_0_bytes = vec![
|
||||
00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00,
|
||||
00, 00, 0x80, 00, 00, 00, 00, 00, 00, 00, 0x40, 00, 00, 00, 00, 0x50, 00, 00, 00, 00, 00,
|
||||
00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00,
|
||||
00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 0x02, 00, 00, 00, 00, 00, 00, 00,
|
||||
00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00,
|
||||
00, 00, 00, 00, 00, 00, 00, 00, 00, 0x08, 00, 0x08, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00,
|
||||
00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00,
|
||||
00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 0x50, 00, 00, 00, 00, 00, 00, 00, 00, 00,
|
||||
00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00,
|
||||
00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 0x10,
|
||||
00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 0x20, 00, 00, 00, 00, 00, 0x08, 00, 00,
|
||||
00, 00, 00, 00, 00, 00, 00, 00, 00, 00,
|
||||
];
|
||||
|
||||
// Logs_0:
|
||||
let logs_bloom_0: Vec<U256> = logs_bloom_0_bytes
|
||||
.into_iter()
|
||||
.map(|elt| elt.into())
|
||||
.collect();
|
||||
|
||||
let addr = U256::from([
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0x7e, 0xf6, 0x6b, 0x77, 0x75, 0x9e, 0x12, 0xca, 0xf3,
|
||||
0xdd, 0xb3, 0xe4, 0xaf, 0xf5, 0x24, 0xe5, 0x77, 0xc5, 0x9d, 0x8d,
|
||||
]);
|
||||
|
||||
// The first topic is shared by the two transactions.
|
||||
let topic1 = U256::from([
|
||||
0x8a, 0x22, 0xee, 0x89, 0x91, 0x02, 0xa3, 0x66, 0xac, 0x8a, 0xd0, 0x49, 0x51, 0x27, 0x31,
|
||||
0x9c, 0xb1, 0xff, 0x24, 0x03, 0xcf, 0xae, 0x85, 0x5f, 0x83, 0xa8, 0x9c, 0xda, 0x12, 0x66,
|
||||
0x67, 0x4d,
|
||||
]);
|
||||
|
||||
let topic02 = 0x2a.into();
|
||||
let topic03 = 0xbd9fe6.into();
|
||||
|
||||
let mut logs_0 = vec![
|
||||
log_payload_len.into(), // payload_len
|
||||
addr,
|
||||
num_topics.into(), // num_topics
|
||||
topic1, // topic1
|
||||
topic02, // topic2
|
||||
topic03, // topic3
|
||||
32.into(), // data_len
|
||||
];
|
||||
let cur_data = hex!("f7af1cc94b1aef2e0fa15f1b4baefa86eb60e78fa4bd082372a0a446d197fb58")
|
||||
.iter()
|
||||
.copied()
|
||||
.map(U256::from);
|
||||
logs_0.extend(cur_data);
|
||||
|
||||
let mut receipt: Vec<U256> = vec![423.into(), status_0.into(), cum_gas_used_0.into()];
|
||||
receipt.extend(logs_bloom_0);
|
||||
receipt.push(logs_payload_len.into()); // logs_payload_len
|
||||
receipt.push(num_logs.into()); // num_logs
|
||||
receipt.extend(logs_0.clone());
|
||||
|
||||
// First, we load all mpts.
|
||||
let initial_stack = vec![retdest];
|
||||
|
||||
let mut interpreter = Interpreter::new_with_kernel(load_all_mpts, initial_stack);
|
||||
interpreter.generation_state.mpt_prover_inputs = all_mpt_prover_inputs_reversed(&trie_inputs);
|
||||
interpreter.run()?;
|
||||
|
||||
// If TrieData is empty, we need to push 0 because the first value is always 0.
|
||||
let mut cur_trie_data = interpreter.get_memory_segment(Segment::TrieData);
|
||||
if cur_trie_data.is_empty() {
|
||||
cur_trie_data.push(0.into());
|
||||
}
|
||||
|
||||
// stack: transaction_nb, value_ptr, retdest
|
||||
let initial_stack = vec![retdest, cur_trie_data.len().into(), 0.into()];
|
||||
for i in 0..initial_stack.len() {
|
||||
interpreter.push(initial_stack[i]);
|
||||
}
|
||||
|
||||
interpreter.generation_state.registers.program_counter = mpt_insert;
|
||||
|
||||
// Set memory.
|
||||
cur_trie_data.extend(receipt);
|
||||
interpreter.set_memory_segment(Segment::TrieData, cur_trie_data.clone());
|
||||
interpreter.set_global_metadata_field(GlobalMetadata::TrieDataSize, cur_trie_data.len().into());
|
||||
// First insertion.
|
||||
interpreter.run()?;
|
||||
|
||||
// receipt_1:
|
||||
let status_1 = 1;
|
||||
let cum_gas_used_1 = 0x02dcb6;
|
||||
let logs_bloom_1_bytes = vec![
|
||||
00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00,
|
||||
00, 00, 0x80, 00, 00, 00, 00, 00, 00, 00, 0x40, 00, 00, 00, 00, 0x10, 00, 00, 00, 00, 00,
|
||||
00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00,
|
||||
00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 0x02, 00, 00, 00, 00, 00, 00, 00,
|
||||
00, 00, 00, 00, 00, 00, 00, 0x08, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00,
|
||||
00, 00, 00, 00, 00, 00, 0x01, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00,
|
||||
00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00,
|
||||
00, 00, 00, 00, 00, 00, 00, 00, 00, 0x01, 00, 00, 00, 0x40, 00, 00, 00, 00, 00, 00, 00, 00,
|
||||
00, 00, 00, 00, 00, 00, 00, 0x20, 00, 0x04, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00,
|
||||
00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 0x80, 00, 00, 00, 00, 00, 00, 00, 00, 00,
|
||||
00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 0x08,
|
||||
00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00, 00,
|
||||
];
|
||||
|
||||
// Logs_1:
|
||||
let logs_bloom_1: Vec<U256> = logs_bloom_1_bytes
|
||||
.into_iter()
|
||||
.map(|elt| elt.into())
|
||||
.collect();
|
||||
|
||||
let topic12 = 4.into();
|
||||
let topic13 = 0x4920ea.into();
|
||||
|
||||
let mut logs_1 = vec![
|
||||
log_payload_len.into(), // payload length
|
||||
addr,
|
||||
num_topics.into(), // nb topics
|
||||
topic1, // topic1
|
||||
topic12, // topic2
|
||||
topic13, // topic3
|
||||
32.into(), // data length
|
||||
];
|
||||
let cur_data = hex!("a814f7df6a2203dc0e472e8828be95957c6b329fee8e2b1bb6f044c1eb4fc243")
|
||||
.iter()
|
||||
.copied()
|
||||
.map(U256::from);
|
||||
logs_1.extend(cur_data);
|
||||
|
||||
let mut receipt_1: Vec<U256> = vec![payload_len.into(), status_1.into(), cum_gas_used_1.into()];
|
||||
receipt_1.extend(logs_bloom_1);
|
||||
receipt_1.push(logs_payload_len.into()); // logs payload len
|
||||
receipt_1.push(num_logs.into()); // nb logs
|
||||
receipt_1.extend(logs_1.clone());
|
||||
|
||||
// Get updated TrieData segment.
|
||||
cur_trie_data = interpreter.get_memory_segment(Segment::TrieData);
|
||||
let initial_stack2 = vec![retdest, cur_trie_data.len().into(), 1.into()];
|
||||
for i in 0..initial_stack2.len() {
|
||||
interpreter.push(initial_stack2[i]);
|
||||
}
|
||||
cur_trie_data.extend(receipt_1);
|
||||
|
||||
// Set memory.
|
||||
interpreter.generation_state.registers.program_counter = mpt_insert;
|
||||
interpreter.set_memory_segment(Segment::TrieData, cur_trie_data.clone());
|
||||
interpreter.set_global_metadata_field(GlobalMetadata::TrieDataSize, cur_trie_data.len().into());
|
||||
interpreter.run()?;
|
||||
|
||||
// Finally, check that the hashes correspond.
|
||||
let mpt_hash_receipt = KERNEL.global_labels["mpt_hash_receipt_trie"];
|
||||
interpreter.generation_state.registers.program_counter = mpt_hash_receipt;
|
||||
interpreter.push(retdest);
|
||||
interpreter.run()?;
|
||||
assert_eq!(
|
||||
interpreter.stack()[0],
|
||||
U256::from(hex!(
|
||||
"da46cdd329bfedace32da95f2b344d314bc6f55f027d65f9f4ac04ee425e1f98"
|
||||
))
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_bloom_two_logs() -> Result<()> {
|
||||
// Tests the Bloom filter computation with two logs in one transaction.
|
||||
|
||||
// address
|
||||
let to = [
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0x09, 0x5e, 0x7b, 0xae, 0xa6, 0xa6, 0xc7, 0xc4, 0xc2,
|
||||
0xdf, 0xeb, 0x97, 0x7e, 0xfa, 0xc3, 0x26, 0xaf, 0x55, 0x2d, 0x87,
|
||||
];
|
||||
|
||||
let retdest = 0xDEADBEEFu32.into();
|
||||
let logs_bloom = KERNEL.global_labels["logs_bloom"];
|
||||
|
||||
let initial_stack = vec![retdest];
|
||||
|
||||
// Set memory.
|
||||
let logs = vec![
|
||||
0.into(), // unused
|
||||
to.into(), // address
|
||||
0.into(), // num_topics
|
||||
0.into(), // data_len,
|
||||
0.into(), // unused: rlp
|
||||
to.into(),
|
||||
2.into(), // num_topics
|
||||
0x62.into(),
|
||||
0x63.into(),
|
||||
5.into(),
|
||||
[
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0xa1,
|
||||
0xb2, 0xc3, 0xd4, 0xe5,
|
||||
]
|
||||
.into(),
|
||||
];
|
||||
let mut interpreter = Interpreter::new_with_kernel(logs_bloom, initial_stack);
|
||||
interpreter.set_memory_segment(Segment::TxnBloom, vec![0.into(); 256]); // Initialize transaction Bloom filter.
|
||||
interpreter.set_memory_segment(Segment::BlockBloom, vec![0.into(); 256]); // Initialize block Bloom filter.
|
||||
interpreter.set_memory_segment(Segment::LogsData, logs);
|
||||
interpreter.set_memory_segment(Segment::Logs, vec![0.into(), 4.into()]);
|
||||
interpreter.set_global_metadata_field(GlobalMetadata::LogsLen, U256::from(2));
|
||||
interpreter.run()?;
|
||||
|
||||
let loaded_bloom_bytes: Vec<u8> = interpreter
|
||||
.get_memory_segment(Segment::TxnBloom)
|
||||
.into_iter()
|
||||
.map(|elt| elt.0[0] as u8)
|
||||
.collect();
|
||||
|
||||
let expected = hex!("00000000000000001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000000000000000000000000000000000000000004000000000000000000000000000000000000000000000000000000000000000000000000000000000000000002000000000000000000000004000000000000000000000000000000800000000000000000000000000000000000000000000000000000000000000000000000000400000000000040000000000000000000000000002000000000000000000000000000").to_vec();
|
||||
|
||||
assert_eq!(expected, loaded_bloom_bytes);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn logs_bloom_bytes_fn(logs_list: Vec<(Vec<u8>, Vec<Vec<u8>>)>) -> [u8; 256] {
|
||||
// The first element of logs_list.
|
||||
let mut bloom = [0_u8; 256];
|
||||
|
||||
for log in logs_list {
|
||||
let cur_addr = log.0;
|
||||
let topics = log.1;
|
||||
|
||||
add_to_bloom(&mut bloom, &cur_addr);
|
||||
for topic in topics {
|
||||
add_to_bloom(&mut bloom, &topic);
|
||||
}
|
||||
}
|
||||
bloom
|
||||
}
|
||||
|
||||
fn add_to_bloom(bloom: &mut [u8; 256], bloom_entry: &[u8]) {
|
||||
let bloom_hash = keccak(bloom_entry).to_fixed_bytes();
|
||||
|
||||
for idx in 0..3 {
|
||||
let bit_pair = u16::from_be_bytes(bloom_hash[2 * idx..2 * (idx + 1)].try_into().unwrap());
|
||||
let bit_to_set = 0x07FF - (bit_pair & 0x07FF);
|
||||
let byte_index = bit_to_set / 8;
|
||||
let bit_value = 1 << (7 - bit_to_set % 8);
|
||||
bloom[byte_index as usize] |= bit_value;
|
||||
}
|
||||
}
|
||||
@ -1,10 +1,12 @@
|
||||
use std::collections::HashMap;
|
||||
use std::ops::Deref;
|
||||
|
||||
use bytes::Bytes;
|
||||
use eth_trie_utils::nibbles::Nibbles;
|
||||
use eth_trie_utils::partial_trie::{HashedPartialTrie, PartialTrie};
|
||||
use ethereum_types::{BigEndianHash, H256, U256, U512};
|
||||
use ethereum_types::{Address, BigEndianHash, H256, U256, U512};
|
||||
use keccak_hash::keccak;
|
||||
use rlp::PayloadInfo;
|
||||
use rlp_derive::{RlpDecodable, RlpEncodable};
|
||||
|
||||
use crate::cpu::kernel::constants::trie_type::PartialTrieType;
|
||||
@ -30,12 +32,67 @@ impl Default for AccountRlp {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(RlpEncodable, RlpDecodable, Debug)]
|
||||
pub struct LegacyTransactionRlp {
|
||||
pub nonce: U256,
|
||||
pub gas_price: U256,
|
||||
pub gas: U256,
|
||||
pub to: Address,
|
||||
pub value: U256,
|
||||
pub data: Bytes,
|
||||
pub v: U256,
|
||||
pub r: U256,
|
||||
pub s: U256,
|
||||
}
|
||||
|
||||
#[derive(RlpEncodable, RlpDecodable, Debug)]
|
||||
pub struct LogRlp {
|
||||
pub address: Address,
|
||||
pub topics: Vec<H256>,
|
||||
pub data: Bytes,
|
||||
}
|
||||
|
||||
#[derive(RlpEncodable, RlpDecodable, Debug)]
|
||||
pub struct LegacyReceiptRlp {
|
||||
pub status: bool,
|
||||
pub cum_gas_used: U256,
|
||||
pub bloom: Bytes,
|
||||
pub logs: Vec<LogRlp>,
|
||||
}
|
||||
|
||||
pub(crate) fn all_mpt_prover_inputs_reversed(trie_inputs: &TrieInputs) -> Vec<U256> {
|
||||
let mut inputs = all_mpt_prover_inputs(trie_inputs);
|
||||
inputs.reverse();
|
||||
inputs
|
||||
}
|
||||
|
||||
pub(crate) fn parse_receipts(rlp: &[u8]) -> Vec<U256> {
|
||||
let payload_info = PayloadInfo::from(rlp).unwrap();
|
||||
let decoded_receipt: LegacyReceiptRlp = rlp::decode(rlp).unwrap();
|
||||
let mut parsed_receipt = Vec::new();
|
||||
|
||||
parsed_receipt.push(payload_info.value_len.into()); // payload_len of the entire receipt
|
||||
parsed_receipt.push((decoded_receipt.status as u8).into());
|
||||
parsed_receipt.push(decoded_receipt.cum_gas_used);
|
||||
parsed_receipt.extend(decoded_receipt.bloom.iter().map(|byte| U256::from(*byte)));
|
||||
let encoded_logs = rlp::encode_list(&decoded_receipt.logs);
|
||||
let logs_payload_info = PayloadInfo::from(&encoded_logs).unwrap();
|
||||
parsed_receipt.push(logs_payload_info.value_len.into()); // payload_len of all the logs
|
||||
parsed_receipt.push(decoded_receipt.logs.len().into());
|
||||
|
||||
for log in decoded_receipt.logs {
|
||||
let encoded_log = rlp::encode(&log);
|
||||
let log_payload_info = PayloadInfo::from(&encoded_log).unwrap();
|
||||
parsed_receipt.push(log_payload_info.value_len.into()); // payload of one log
|
||||
parsed_receipt.push(U256::from_big_endian(&log.address.to_fixed_bytes()));
|
||||
parsed_receipt.push(log.topics.len().into());
|
||||
parsed_receipt.extend(log.topics.iter().map(|topic| U256::from(topic.as_bytes())));
|
||||
parsed_receipt.push(log.data.len().into());
|
||||
parsed_receipt.extend(log.data.iter().map(|byte| U256::from(*byte)));
|
||||
}
|
||||
|
||||
parsed_receipt
|
||||
}
|
||||
/// Generate prover inputs for the initial MPT data, in the format expected by `mpt/load.asm`.
|
||||
pub(crate) fn all_mpt_prover_inputs(trie_inputs: &TrieInputs) -> Vec<U256> {
|
||||
let mut prover_inputs = vec![];
|
||||
@ -60,10 +117,20 @@ pub(crate) fn all_mpt_prover_inputs(trie_inputs: &TrieInputs) -> Vec<U256> {
|
||||
rlp::decode_list(rlp)
|
||||
});
|
||||
|
||||
mpt_prover_inputs(&trie_inputs.receipts_trie, &mut prover_inputs, &|_rlp| {
|
||||
// TODO: Decode receipt RLP.
|
||||
vec![]
|
||||
});
|
||||
mpt_prover_inputs(
|
||||
&trie_inputs.receipts_trie,
|
||||
&mut prover_inputs,
|
||||
&parse_receipts,
|
||||
);
|
||||
|
||||
// Temporary! The actual number of transactions in the trie cannot be known if the trie
|
||||
// contains hash nodes.
|
||||
let num_transactions = trie_inputs
|
||||
.transactions_trie
|
||||
.values()
|
||||
.collect::<Vec<_>>()
|
||||
.len();
|
||||
prover_inputs.push(num_transactions.into());
|
||||
|
||||
prover_inputs
|
||||
}
|
||||
|
||||
@ -49,18 +49,25 @@ pub enum Segment {
|
||||
AccessedStorageKeys = 24,
|
||||
/// List of addresses that have called SELFDESTRUCT in the current transaction.
|
||||
SelfDestructList = 25,
|
||||
/// Contains the bloom filter of a transaction.
|
||||
TxnBloom = 26,
|
||||
/// Contains the bloom filter of a block.
|
||||
BlockBloom = 27,
|
||||
/// List of log pointers pointing to the LogsData segment.
|
||||
Logs = 28,
|
||||
LogsData = 29,
|
||||
/// Journal of state changes. List of pointers to `JournalData`. Length in `GlobalMetadata`.
|
||||
Journal = 26,
|
||||
JournalData = 27,
|
||||
JournalCheckpoints = 28,
|
||||
Journal = 30,
|
||||
JournalData = 31,
|
||||
JournalCheckpoints = 32,
|
||||
/// List of addresses that have been touched in the current transaction.
|
||||
TouchedAddresses = 29,
|
||||
TouchedAddresses = 33,
|
||||
/// List of checkpoints for the current context. Length in `ContextMetadata`.
|
||||
ContextCheckpoints = 30,
|
||||
ContextCheckpoints = 34,
|
||||
}
|
||||
|
||||
impl Segment {
|
||||
pub(crate) const COUNT: usize = 31;
|
||||
pub(crate) const COUNT: usize = 35;
|
||||
|
||||
pub(crate) fn all() -> [Self; Self::COUNT] {
|
||||
[
|
||||
@ -90,6 +97,10 @@ impl Segment {
|
||||
Self::AccessedAddresses,
|
||||
Self::AccessedStorageKeys,
|
||||
Self::SelfDestructList,
|
||||
Self::TxnBloom,
|
||||
Self::BlockBloom,
|
||||
Self::Logs,
|
||||
Self::LogsData,
|
||||
Self::Journal,
|
||||
Self::JournalData,
|
||||
Self::JournalCheckpoints,
|
||||
@ -127,6 +138,10 @@ impl Segment {
|
||||
Segment::AccessedAddresses => "SEGMENT_ACCESSED_ADDRESSES",
|
||||
Segment::AccessedStorageKeys => "SEGMENT_ACCESSED_STORAGE_KEYS",
|
||||
Segment::SelfDestructList => "SEGMENT_SELFDESTRUCT_LIST",
|
||||
Segment::TxnBloom => "SEGMENT_TXN_BLOOM",
|
||||
Segment::BlockBloom => "SEGMENT_BLOCK_BLOOM",
|
||||
Segment::Logs => "SEGMENT_LOGS",
|
||||
Segment::LogsData => "SEGMENT_LOGS_DATA",
|
||||
Segment::Journal => "SEGMENT_JOURNAL",
|
||||
Segment::JournalData => "SEGMENT_JOURNAL_DATA",
|
||||
Segment::JournalCheckpoints => "SEGMENT_JOURNAL_CHECKPOINTS",
|
||||
@ -164,6 +179,10 @@ impl Segment {
|
||||
Segment::AccessedAddresses => 256,
|
||||
Segment::AccessedStorageKeys => 256,
|
||||
Segment::SelfDestructList => 256,
|
||||
Segment::TxnBloom => 8,
|
||||
Segment::BlockBloom => 8,
|
||||
Segment::Logs => 256,
|
||||
Segment::LogsData => 256,
|
||||
Segment::Journal => 256,
|
||||
Segment::JournalData => 256,
|
||||
Segment::JournalCheckpoints => 256,
|
||||
|
||||
@ -71,6 +71,7 @@ pub struct BlockMetadata {
|
||||
pub block_gaslimit: U256,
|
||||
pub block_chain_id: U256,
|
||||
pub block_base_fee: U256,
|
||||
pub block_bloom: [U256; 8],
|
||||
}
|
||||
|
||||
/// Memory values which are public.
|
||||
@ -90,9 +91,9 @@ impl PublicValuesTarget {
|
||||
receipts_root: receipts_root_before,
|
||||
} = self.trie_roots_before;
|
||||
|
||||
buffer.write_target_vec(&state_root_before)?;
|
||||
buffer.write_target_vec(&transactions_root_before)?;
|
||||
buffer.write_target_vec(&receipts_root_before)?;
|
||||
buffer.write_target_array(&state_root_before)?;
|
||||
buffer.write_target_array(&transactions_root_before)?;
|
||||
buffer.write_target_array(&receipts_root_before)?;
|
||||
|
||||
let TrieRootsTarget {
|
||||
state_root: state_root_after,
|
||||
@ -100,9 +101,9 @@ impl PublicValuesTarget {
|
||||
receipts_root: receipts_root_after,
|
||||
} = self.trie_roots_after;
|
||||
|
||||
buffer.write_target_vec(&state_root_after)?;
|
||||
buffer.write_target_vec(&transactions_root_after)?;
|
||||
buffer.write_target_vec(&receipts_root_after)?;
|
||||
buffer.write_target_array(&state_root_after)?;
|
||||
buffer.write_target_array(&transactions_root_after)?;
|
||||
buffer.write_target_array(&receipts_root_after)?;
|
||||
|
||||
let BlockMetadataTarget {
|
||||
block_beneficiary,
|
||||
@ -112,40 +113,43 @@ impl PublicValuesTarget {
|
||||
block_gaslimit,
|
||||
block_chain_id,
|
||||
block_base_fee,
|
||||
block_bloom,
|
||||
} = self.block_metadata;
|
||||
|
||||
buffer.write_target_vec(&block_beneficiary)?;
|
||||
buffer.write_target_array(&block_beneficiary)?;
|
||||
buffer.write_target(block_timestamp)?;
|
||||
buffer.write_target(block_number)?;
|
||||
buffer.write_target(block_difficulty)?;
|
||||
buffer.write_target(block_gaslimit)?;
|
||||
buffer.write_target(block_chain_id)?;
|
||||
buffer.write_target_vec(&block_base_fee)?;
|
||||
buffer.write_target_array(&block_base_fee)?;
|
||||
buffer.write_target_array(&block_bloom)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn from_buffer(buffer: &mut Buffer) -> IoResult<Self> {
|
||||
let trie_roots_before = TrieRootsTarget {
|
||||
state_root: buffer.read_target_vec()?.try_into().unwrap(),
|
||||
transactions_root: buffer.read_target_vec()?.try_into().unwrap(),
|
||||
receipts_root: buffer.read_target_vec()?.try_into().unwrap(),
|
||||
state_root: buffer.read_target_array()?,
|
||||
transactions_root: buffer.read_target_array()?,
|
||||
receipts_root: buffer.read_target_array()?,
|
||||
};
|
||||
|
||||
let trie_roots_after = TrieRootsTarget {
|
||||
state_root: buffer.read_target_vec()?.try_into().unwrap(),
|
||||
transactions_root: buffer.read_target_vec()?.try_into().unwrap(),
|
||||
receipts_root: buffer.read_target_vec()?.try_into().unwrap(),
|
||||
state_root: buffer.read_target_array()?,
|
||||
transactions_root: buffer.read_target_array()?,
|
||||
receipts_root: buffer.read_target_array()?,
|
||||
};
|
||||
|
||||
let block_metadata = BlockMetadataTarget {
|
||||
block_beneficiary: buffer.read_target_vec()?.try_into().unwrap(),
|
||||
block_beneficiary: buffer.read_target_array()?,
|
||||
block_timestamp: buffer.read_target()?,
|
||||
block_number: buffer.read_target()?,
|
||||
block_difficulty: buffer.read_target()?,
|
||||
block_gaslimit: buffer.read_target()?,
|
||||
block_chain_id: buffer.read_target()?,
|
||||
block_base_fee: buffer.read_target_vec()?.try_into().unwrap(),
|
||||
block_base_fee: buffer.read_target_array()?,
|
||||
block_bloom: buffer.read_target_array()?,
|
||||
};
|
||||
|
||||
Ok(Self {
|
||||
@ -265,10 +269,11 @@ pub struct BlockMetadataTarget {
|
||||
pub block_gaslimit: Target,
|
||||
pub block_chain_id: Target,
|
||||
pub block_base_fee: [Target; 2],
|
||||
pub block_bloom: [Target; 64],
|
||||
}
|
||||
|
||||
impl BlockMetadataTarget {
|
||||
const SIZE: usize = 12;
|
||||
const SIZE: usize = 76;
|
||||
|
||||
pub fn from_public_inputs(pis: &[Target]) -> Self {
|
||||
let block_beneficiary = pis[0..5].try_into().unwrap();
|
||||
@ -278,6 +283,7 @@ impl BlockMetadataTarget {
|
||||
let block_gaslimit = pis[8];
|
||||
let block_chain_id = pis[9];
|
||||
let block_base_fee = pis[10..12].try_into().unwrap();
|
||||
let block_bloom = pis[12..76].try_into().unwrap();
|
||||
|
||||
Self {
|
||||
block_beneficiary,
|
||||
@ -287,6 +293,7 @@ impl BlockMetadataTarget {
|
||||
block_gaslimit,
|
||||
block_chain_id,
|
||||
block_base_fee,
|
||||
block_bloom,
|
||||
}
|
||||
}
|
||||
|
||||
@ -312,6 +319,9 @@ impl BlockMetadataTarget {
|
||||
block_base_fee: core::array::from_fn(|i| {
|
||||
builder.select(condition, bm0.block_base_fee[i], bm1.block_base_fee[i])
|
||||
}),
|
||||
block_bloom: core::array::from_fn(|i| {
|
||||
builder.select(condition, bm0.block_bloom[i], bm1.block_bloom[i])
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
@ -331,6 +341,9 @@ impl BlockMetadataTarget {
|
||||
for i in 0..2 {
|
||||
builder.connect(bm0.block_base_fee[i], bm1.block_base_fee[i])
|
||||
}
|
||||
for i in 0..64 {
|
||||
builder.connect(bm0.block_bloom[i], bm1.block_bloom[i])
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -683,6 +683,7 @@ pub(crate) fn add_virtual_block_metadata<F: RichField + Extendable<D>, const D:
|
||||
let block_gaslimit = builder.add_virtual_public_input();
|
||||
let block_chain_id = builder.add_virtual_public_input();
|
||||
let block_base_fee = builder.add_virtual_public_input_arr();
|
||||
let block_bloom = builder.add_virtual_public_input_arr();
|
||||
BlockMetadataTarget {
|
||||
block_beneficiary,
|
||||
block_timestamp,
|
||||
@ -691,6 +692,7 @@ pub(crate) fn add_virtual_block_metadata<F: RichField + Extendable<D>, const D:
|
||||
block_gaslimit,
|
||||
block_chain_id,
|
||||
block_base_fee,
|
||||
block_bloom,
|
||||
}
|
||||
}
|
||||
|
||||
@ -892,4 +894,9 @@ pub(crate) fn set_block_metadata_target<F, W, const D: usize>(
|
||||
block_metadata_target.block_base_fee[1],
|
||||
F::from_canonical_u32((block_metadata.block_base_fee.as_u64() >> 32) as u32),
|
||||
);
|
||||
let mut block_bloom_limbs = [F::ZERO; 64];
|
||||
for (i, limbs) in block_bloom_limbs.chunks_exact_mut(8).enumerate() {
|
||||
limbs.copy_from_slice(&u256_limbs(block_metadata.block_bloom[i]));
|
||||
}
|
||||
witness.set_target_arr(&block_metadata_target.block_bloom, &block_bloom_limbs);
|
||||
}
|
||||
|
||||
@ -202,12 +202,20 @@ impl Default for MemoryState {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Default, Debug)]
|
||||
#[derive(Clone, Debug)]
|
||||
pub(crate) struct MemoryContextState {
|
||||
/// The content of each memory segment.
|
||||
pub(crate) segments: [MemorySegmentState; Segment::COUNT],
|
||||
}
|
||||
|
||||
impl Default for MemoryContextState {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
segments: std::array::from_fn(|_| MemorySegmentState::default()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Default, Debug)]
|
||||
pub(crate) struct MemorySegmentState {
|
||||
pub(crate) content: Vec<U256>,
|
||||
|
||||
@ -1,4 +1,5 @@
|
||||
use std::collections::HashMap;
|
||||
use std::str::FromStr;
|
||||
use std::time::Duration;
|
||||
|
||||
use env_logger::{try_init_from_env, Env, DEFAULT_FILTER_ENV};
|
||||
@ -12,7 +13,7 @@ use plonky2::plonk::config::KeccakGoldilocksConfig;
|
||||
use plonky2::util::timing::TimingTree;
|
||||
use plonky2_evm::all_stark::AllStark;
|
||||
use plonky2_evm::config::StarkConfig;
|
||||
use plonky2_evm::generation::mpt::AccountRlp;
|
||||
use plonky2_evm::generation::mpt::{AccountRlp, LegacyReceiptRlp};
|
||||
use plonky2_evm::generation::{GenerationInputs, TrieInputs};
|
||||
use plonky2_evm::proof::{BlockMetadata, TrieRoots};
|
||||
use plonky2_evm::prover::prove;
|
||||
@ -85,6 +86,7 @@ fn add11_yml() -> anyhow::Result<()> {
|
||||
block_gaslimit: 0xff112233u32.into(),
|
||||
block_chain_id: 1.into(),
|
||||
block_base_fee: 0xa.into(),
|
||||
block_bloom: [0.into(); 8],
|
||||
};
|
||||
|
||||
let mut contract_code = HashMap::new();
|
||||
@ -123,10 +125,22 @@ fn add11_yml() -> anyhow::Result<()> {
|
||||
expected_state_trie_after.insert(to_nibbles, rlp::encode(&to_account_after).to_vec());
|
||||
expected_state_trie_after
|
||||
};
|
||||
|
||||
let receipt_0 = LegacyReceiptRlp {
|
||||
status: true,
|
||||
cum_gas_used: 0xa868u64.into(),
|
||||
bloom: vec![0; 256].into(),
|
||||
logs: vec![],
|
||||
};
|
||||
let mut receipts_trie = HashedPartialTrie::from(Node::Empty);
|
||||
receipts_trie.insert(
|
||||
Nibbles::from_str("0x80").unwrap(),
|
||||
rlp::encode(&receipt_0).to_vec(),
|
||||
);
|
||||
let trie_roots_after = TrieRoots {
|
||||
state_root: expected_state_trie_after.hash(),
|
||||
transactions_root: tries_before.transactions_trie.hash(), // TODO: Fix this when we have transactions trie.
|
||||
receipts_root: tries_before.receipts_trie.hash(), // TODO: Fix this when we have receipts trie.
|
||||
receipts_root: receipts_trie.hash(),
|
||||
};
|
||||
let inputs = GenerationInputs {
|
||||
signed_txns: vec![txn.to_vec()],
|
||||
|
||||
@ -1,4 +1,5 @@
|
||||
use std::collections::HashMap;
|
||||
use std::str::FromStr;
|
||||
use std::time::Duration;
|
||||
|
||||
use env_logger::{try_init_from_env, Env, DEFAULT_FILTER_ENV};
|
||||
@ -13,7 +14,7 @@ use plonky2::util::timing::TimingTree;
|
||||
use plonky2_evm::all_stark::AllStark;
|
||||
use plonky2_evm::config::StarkConfig;
|
||||
use plonky2_evm::cpu::kernel::opcodes::{get_opcode, get_push_opcode};
|
||||
use plonky2_evm::generation::mpt::AccountRlp;
|
||||
use plonky2_evm::generation::mpt::{AccountRlp, LegacyReceiptRlp};
|
||||
use plonky2_evm::generation::{GenerationInputs, TrieInputs};
|
||||
use plonky2_evm::proof::{BlockMetadata, TrieRoots};
|
||||
use plonky2_evm::prover::prove;
|
||||
@ -102,10 +103,9 @@ fn test_basic_smart_contract() -> anyhow::Result<()> {
|
||||
contract_code.insert(keccak(vec![]), vec![]);
|
||||
contract_code.insert(code_hash, code.to_vec());
|
||||
|
||||
let txdata_gas = 2 * 16;
|
||||
let gas_used = 21_000 + code_gas + txdata_gas;
|
||||
let expected_state_trie_after: HashedPartialTrie = {
|
||||
let txdata_gas = 2 * 16;
|
||||
let gas_used = 21_000 + code_gas + txdata_gas;
|
||||
|
||||
let beneficiary_account_after = AccountRlp {
|
||||
balance: beneficiary_account_before.balance + gas_used * 10,
|
||||
..beneficiary_account_before
|
||||
@ -142,10 +142,23 @@ fn test_basic_smart_contract() -> anyhow::Result<()> {
|
||||
}
|
||||
}
|
||||
.into();
|
||||
|
||||
let receipt_0 = LegacyReceiptRlp {
|
||||
status: true,
|
||||
cum_gas_used: gas_used.into(),
|
||||
bloom: vec![0; 256].into(),
|
||||
logs: vec![],
|
||||
};
|
||||
let mut receipts_trie = HashedPartialTrie::from(Node::Empty);
|
||||
receipts_trie.insert(
|
||||
Nibbles::from_str("0x80").unwrap(),
|
||||
rlp::encode(&receipt_0).to_vec(),
|
||||
);
|
||||
|
||||
let trie_roots_after = TrieRoots {
|
||||
state_root: expected_state_trie_after.hash(),
|
||||
transactions_root: tries_before.transactions_trie.hash(), // TODO: Fix this when we have transactions trie.
|
||||
receipts_root: tries_before.receipts_trie.hash(), // TODO: Fix this when we have receipts trie.
|
||||
receipts_root: receipts_trie.hash(),
|
||||
};
|
||||
let inputs = GenerationInputs {
|
||||
signed_txns: vec![txn.to_vec()],
|
||||
|
||||
507
evm/tests/log_opcode.rs
Normal file
507
evm/tests/log_opcode.rs
Normal file
@ -0,0 +1,507 @@
|
||||
#![allow(clippy::upper_case_acronyms)]
|
||||
|
||||
use std::collections::HashMap;
|
||||
use std::str::FromStr;
|
||||
use std::time::Duration;
|
||||
|
||||
use bytes::Bytes;
|
||||
use env_logger::{try_init_from_env, Env, DEFAULT_FILTER_ENV};
|
||||
use eth_trie_utils::nibbles::Nibbles;
|
||||
use eth_trie_utils::partial_trie::{HashedPartialTrie, PartialTrie};
|
||||
use ethereum_types::Address;
|
||||
use hex_literal::hex;
|
||||
use keccak_hash::keccak;
|
||||
use plonky2::field::goldilocks_field::GoldilocksField;
|
||||
use plonky2::plonk::config::PoseidonGoldilocksConfig;
|
||||
use plonky2::util::timing::TimingTree;
|
||||
use plonky2_evm::all_stark::AllStark;
|
||||
use plonky2_evm::config::StarkConfig;
|
||||
use plonky2_evm::generation::mpt::{AccountRlp, LegacyReceiptRlp, LegacyTransactionRlp, LogRlp};
|
||||
use plonky2_evm::generation::{GenerationInputs, TrieInputs};
|
||||
use plonky2_evm::proof::{BlockMetadata, TrieRoots};
|
||||
use plonky2_evm::prover::prove;
|
||||
use plonky2_evm::verifier::verify_proof;
|
||||
use plonky2_evm::Node;
|
||||
|
||||
type F = GoldilocksField;
|
||||
const D: usize = 2;
|
||||
type C = PoseidonGoldilocksConfig;
|
||||
|
||||
/// Variation of `add11_yml` testing LOG opcodes.
|
||||
#[test]
|
||||
#[ignore] // Too slow to run on CI.
|
||||
fn test_log_opcodes() -> anyhow::Result<()> {
|
||||
init_logger();
|
||||
|
||||
let all_stark = AllStark::<F, D>::default();
|
||||
let config = StarkConfig::standard_fast_config();
|
||||
|
||||
let beneficiary = hex!("2adc25665018aa1fe0e6bc666dac8fc2697ff9ba");
|
||||
let sender = hex!("af1276cbb260bb13deddb4209ae99ae6e497f446");
|
||||
// Private key: DCDFF53B4F013DBCDC717F89FE3BF4D8B10512AAE282B48E01D7530470382701
|
||||
let to = hex!("095e7baea6a6c7c4c2dfeb977efac326af552d87");
|
||||
|
||||
let beneficiary_state_key = keccak(beneficiary);
|
||||
let sender_state_key = keccak(sender);
|
||||
let to_hashed = keccak(to);
|
||||
|
||||
let beneficiary_nibbles = Nibbles::from_bytes_be(beneficiary_state_key.as_bytes()).unwrap();
|
||||
let sender_nibbles = Nibbles::from_bytes_be(sender_state_key.as_bytes()).unwrap();
|
||||
let to_nibbles = Nibbles::from_bytes_be(to_hashed.as_bytes()).unwrap();
|
||||
|
||||
// For the first code transaction code, we consider two LOG opcodes. The first deals with 0 topics and empty data. The second deals with two topics, and data of length 5, stored in memory.
|
||||
let code = [
|
||||
0x64, 0xA1, 0xB2, 0xC3, 0xD4, 0xE5, 0x60, 0x0, 0x52, // MSTORE(0x0, 0xA1B2C3D4E5)
|
||||
0x60, 0x0, 0x60, 0x0, 0xA0, // LOG0(0x0, 0x0)
|
||||
0x60, 99, 0x60, 98, 0x60, 5, 0x60, 27, 0xA2, // LOG2(27, 5, 98, 99)
|
||||
0x00,
|
||||
];
|
||||
println!("contract: {:02x?}", code);
|
||||
let code_gas = 3 + 3 + 3 // PUSHs and MSTORE
|
||||
+ 3 + 3 + 375 // PUSHs and LOG0
|
||||
+ 3 + 3 + 3 + 3 + 375 + 375*2 + 8*5 + 3// PUSHs, LOG2 and memory expansion
|
||||
;
|
||||
let gas_used = 21_000 + code_gas;
|
||||
|
||||
let code_hash = keccak(code);
|
||||
|
||||
// Set accounts before the transaction.
|
||||
let beneficiary_account_before = AccountRlp {
|
||||
nonce: 1.into(),
|
||||
..AccountRlp::default()
|
||||
};
|
||||
|
||||
let sender_balance_before = 5000000000000000u64;
|
||||
let sender_account_before = AccountRlp {
|
||||
balance: sender_balance_before.into(),
|
||||
..AccountRlp::default()
|
||||
};
|
||||
let to_account_before = AccountRlp {
|
||||
balance: 9000000000u64.into(),
|
||||
code_hash,
|
||||
..AccountRlp::default()
|
||||
};
|
||||
|
||||
// Initialize the state trie with three accounts.
|
||||
let mut state_trie_before = HashedPartialTrie::from(Node::Empty);
|
||||
state_trie_before.insert(
|
||||
beneficiary_nibbles,
|
||||
rlp::encode(&beneficiary_account_before).to_vec(),
|
||||
);
|
||||
state_trie_before.insert(sender_nibbles, rlp::encode(&sender_account_before).to_vec());
|
||||
state_trie_before.insert(to_nibbles, rlp::encode(&to_account_before).to_vec());
|
||||
|
||||
// We now add two receipts with logs and data. This updates the receipt trie as well.
|
||||
let log_0 = LogRlp {
|
||||
address: hex!("7ef66b77759e12Caf3dDB3E4AFF524E577C59D8D").into(),
|
||||
topics: vec![
|
||||
hex!("8a22ee899102a366ac8ad0495127319cb1ff2403cfae855f83a89cda1266674d").into(),
|
||||
hex!("000000000000000000000000000000000000000000000000000000000000002a").into(),
|
||||
hex!("0000000000000000000000000000000000000000000000000000000000bd9fe6").into(),
|
||||
],
|
||||
data: hex!("f7af1cc94b1aef2e0fa15f1b4baefa86eb60e78fa4bd082372a0a446d197fb58")
|
||||
.to_vec()
|
||||
.into(),
|
||||
};
|
||||
|
||||
let receipt_0 = LegacyReceiptRlp {
|
||||
status: true,
|
||||
cum_gas_used: 0x016e5bu64.into(),
|
||||
bloom: hex!("00000000000000000000000000000000000000000000000000800000000000000040000000005000000000000000000000000000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000000000000000000000080008000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000500000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000020000000000008000000000000000000000000").to_vec().into(),
|
||||
logs: vec![log_0],
|
||||
};
|
||||
|
||||
// Insert the first receipt into the initial receipt trie. The initial receipts trie has an initial node with a random nibble.
|
||||
let mut receipts_trie = HashedPartialTrie::from(Node::Empty);
|
||||
receipts_trie.insert(
|
||||
Nibbles::from_str("0x1337").unwrap(),
|
||||
rlp::encode(&receipt_0).to_vec(),
|
||||
);
|
||||
|
||||
let tries_before = TrieInputs {
|
||||
state_trie: state_trie_before,
|
||||
transactions_trie: Node::Empty.into(),
|
||||
receipts_trie: receipts_trie.clone(),
|
||||
storage_tries: vec![(to_hashed, Node::Empty.into())],
|
||||
};
|
||||
|
||||
// Prove a transaction which carries out two LOG opcodes.
|
||||
let txn_gas_price = 10;
|
||||
let txn = hex!("f860800a830186a094095e7baea6a6c7c4c2dfeb977efac326af552d87808026a0c3040cb042c541f9440771879b6bbf3f91464b265431de87eea1ec3206350eb8a046f5f3d06b8816f19f24ee919fd84bfb736db71df10a72fba4495f479e96f678");
|
||||
|
||||
let block_metadata = BlockMetadata {
|
||||
block_beneficiary: Address::from(beneficiary),
|
||||
block_timestamp: 0x03e8.into(),
|
||||
block_number: 1.into(),
|
||||
block_difficulty: 0x020000.into(),
|
||||
block_gaslimit: 0xffffffffu32.into(),
|
||||
block_chain_id: 1.into(),
|
||||
block_base_fee: 0xa.into(),
|
||||
block_bloom: [0.into(); 8],
|
||||
};
|
||||
|
||||
let mut contract_code = HashMap::new();
|
||||
contract_code.insert(keccak(vec![]), vec![]);
|
||||
contract_code.insert(code_hash, code.to_vec());
|
||||
|
||||
// Update the state and receipt tries after the transaction, so that we have the correct expected tries:
|
||||
// Update accounts
|
||||
let beneficiary_account_after = AccountRlp {
|
||||
nonce: 1.into(),
|
||||
..AccountRlp::default()
|
||||
};
|
||||
|
||||
let sender_balance_after = sender_balance_before - gas_used * txn_gas_price;
|
||||
let sender_account_after = AccountRlp {
|
||||
balance: sender_balance_after.into(),
|
||||
nonce: 1.into(),
|
||||
..AccountRlp::default()
|
||||
};
|
||||
let to_account_after = AccountRlp {
|
||||
balance: 9000000000u64.into(),
|
||||
code_hash,
|
||||
..AccountRlp::default()
|
||||
};
|
||||
|
||||
// Update the receipt trie.
|
||||
let first_log = LogRlp {
|
||||
address: to.into(),
|
||||
topics: vec![],
|
||||
data: Bytes::new(),
|
||||
};
|
||||
|
||||
let second_log = LogRlp {
|
||||
address: to.into(),
|
||||
topics: vec![
|
||||
hex!("0000000000000000000000000000000000000000000000000000000000000062").into(), // dec: 98
|
||||
hex!("0000000000000000000000000000000000000000000000000000000000000063").into(), // dec: 99
|
||||
],
|
||||
data: hex!("a1b2c3d4e5").to_vec().into(),
|
||||
};
|
||||
|
||||
let receipt = LegacyReceiptRlp {
|
||||
status: true,
|
||||
cum_gas_used: gas_used.into(),
|
||||
bloom: hex!("00000000000000001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000000000000000000000000000000000000000004000000000000000000000000000000000000000000000000000000000000000000000000000000000000000002000000000000000000000004000000000000000000000000000000800000000000000000000000000000000000000000000000000000000000000000000000000400000000000040000000000000000000000000002000000000000000000000000000").to_vec().into(),
|
||||
logs: vec![first_log, second_log],
|
||||
};
|
||||
|
||||
let receipt_nibbles = Nibbles::from_str("0x80").unwrap(); // RLP(0) = 0x80
|
||||
|
||||
receipts_trie.insert(receipt_nibbles, rlp::encode(&receipt).to_vec());
|
||||
|
||||
// Update the state trie.
|
||||
let mut expected_state_trie_after = HashedPartialTrie::from(Node::Empty);
|
||||
expected_state_trie_after.insert(
|
||||
beneficiary_nibbles,
|
||||
rlp::encode(&beneficiary_account_after).to_vec(),
|
||||
);
|
||||
expected_state_trie_after.insert(sender_nibbles, rlp::encode(&sender_account_after).to_vec());
|
||||
expected_state_trie_after.insert(to_nibbles, rlp::encode(&to_account_after).to_vec());
|
||||
|
||||
let trie_roots_after = TrieRoots {
|
||||
state_root: expected_state_trie_after.hash(),
|
||||
transactions_root: HashedPartialTrie::from(Node::Empty).hash(),
|
||||
receipts_root: receipts_trie.hash(),
|
||||
};
|
||||
let inputs = GenerationInputs {
|
||||
signed_txns: vec![txn.to_vec()],
|
||||
tries: tries_before,
|
||||
trie_roots_after,
|
||||
contract_code,
|
||||
block_metadata,
|
||||
addresses: vec![],
|
||||
};
|
||||
|
||||
let mut timing = TimingTree::new("prove", log::Level::Debug);
|
||||
let proof = prove::<F, C, D>(&all_stark, &config, inputs, &mut timing)?;
|
||||
timing.filter(Duration::from_millis(100)).print();
|
||||
|
||||
// Assert that the proof leads to the correct state and receipt roots.
|
||||
assert_eq!(
|
||||
proof.public_values.trie_roots_after.state_root,
|
||||
expected_state_trie_after.hash()
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
proof.public_values.trie_roots_after.receipts_root,
|
||||
receipts_trie.hash()
|
||||
);
|
||||
|
||||
verify_proof(&all_stark, proof, &config)
|
||||
}
|
||||
|
||||
/// Values taken from the block 1000000 of Goerli: https://goerli.etherscan.io/txs?block=1000000
|
||||
#[test]
|
||||
fn test_txn_and_receipt_trie_hash() -> anyhow::Result<()> {
|
||||
// This test checks that inserting into the transaction and receipt `HashedPartialTrie`s works as expected.
|
||||
let mut example_txn_trie = HashedPartialTrie::from(Node::Empty);
|
||||
|
||||
// We consider two transactions, with one log each.
|
||||
let transaction_0 = LegacyTransactionRlp {
|
||||
nonce: 157823u64.into(),
|
||||
gas_price: 1000000000u64.into(),
|
||||
gas: 250000u64.into(),
|
||||
to: hex!("7ef66b77759e12Caf3dDB3E4AFF524E577C59D8D").into(),
|
||||
value: 0u64.into(),
|
||||
data: hex!("e9c6c176000000000000000000000000000000000000000000000000000000000000002a0000000000000000000000000000000000000000000000000000000000bd9fe6f7af1cc94b1aef2e0fa15f1b4baefa86eb60e78fa4bd082372a0a446d197fb58")
|
||||
.to_vec()
|
||||
.into(),
|
||||
v: 0x1c.into(),
|
||||
r: hex!("d0eeac4841caf7a894dd79e6e633efc2380553cdf8b786d1aa0b8a8dee0266f4").into(),
|
||||
s: hex!("740710eed9696c663510b7fb71a553112551121595a54ec6d2ec0afcec72a973").into(),
|
||||
};
|
||||
|
||||
// Insert the first transaction into the transaction trie.
|
||||
example_txn_trie.insert(
|
||||
Nibbles::from_str("0x80").unwrap(), // RLP(0) = 0x80
|
||||
rlp::encode(&transaction_0).to_vec(),
|
||||
);
|
||||
|
||||
let transaction_1 = LegacyTransactionRlp {
|
||||
nonce: 157824u64.into(),
|
||||
gas_price: 1000000000u64.into(),
|
||||
gas: 250000u64.into(),
|
||||
to: hex!("7ef66b77759e12Caf3dDB3E4AFF524E577C59D8D").into(),
|
||||
value: 0u64.into(),
|
||||
data: hex!("e9c6c176000000000000000000000000000000000000000000000000000000000000000400000000000000000000000000000000000000000000000000000000004920eaa814f7df6a2203dc0e472e8828be95957c6b329fee8e2b1bb6f044c1eb4fc243")
|
||||
.to_vec()
|
||||
.into(),
|
||||
v: 0x1b.into(),
|
||||
r: hex!("a3ff39967683fc684dc7b857d6f62723e78804a14b091a058ad95cc1b8a0281f").into(),
|
||||
s: hex!("51b156e05f21f499fa1ae47ebf536b15a237208f1d4a62e33956b6b03cf47742").into(),
|
||||
};
|
||||
|
||||
// Insert the second transaction into the transaction trie.
|
||||
example_txn_trie.insert(
|
||||
Nibbles::from_str("0x01").unwrap(),
|
||||
rlp::encode(&transaction_1).to_vec(),
|
||||
);
|
||||
|
||||
// Receipts:
|
||||
let mut example_receipt_trie = HashedPartialTrie::from(Node::Empty);
|
||||
|
||||
let log_0 = LogRlp {
|
||||
address: hex!("7ef66b77759e12Caf3dDB3E4AFF524E577C59D8D").into(),
|
||||
topics: vec![
|
||||
hex!("8a22ee899102a366ac8ad0495127319cb1ff2403cfae855f83a89cda1266674d").into(),
|
||||
hex!("000000000000000000000000000000000000000000000000000000000000002a").into(),
|
||||
hex!("0000000000000000000000000000000000000000000000000000000000bd9fe6").into(),
|
||||
],
|
||||
data: hex!("f7af1cc94b1aef2e0fa15f1b4baefa86eb60e78fa4bd082372a0a446d197fb58")
|
||||
.to_vec()
|
||||
.into(),
|
||||
};
|
||||
|
||||
let receipt_0 = LegacyReceiptRlp {
|
||||
status: true,
|
||||
cum_gas_used: 0x016e5bu64.into(),
|
||||
bloom: hex!("00000000000000000000000000000000000000000000000000800000000000000040000000005000000000000000000000000000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000000000000000000000080008000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000500000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000020000000000008000000000000000000000000").to_vec().into(),
|
||||
logs: vec![log_0],
|
||||
};
|
||||
|
||||
// Insert the first receipt into the receipt trie.
|
||||
example_receipt_trie.insert(
|
||||
Nibbles::from_str("0x80").unwrap(), // RLP(0) is 0x80
|
||||
rlp::encode(&receipt_0).to_vec(),
|
||||
);
|
||||
|
||||
let log_1 = LogRlp {
|
||||
address: hex!("7ef66b77759e12Caf3dDB3E4AFF524E577C59D8D").into(),
|
||||
topics: vec![
|
||||
hex!("8a22ee899102a366ac8ad0495127319cb1ff2403cfae855f83a89cda1266674d").into(),
|
||||
hex!("0000000000000000000000000000000000000000000000000000000000000004").into(),
|
||||
hex!("00000000000000000000000000000000000000000000000000000000004920ea").into(),
|
||||
],
|
||||
data: hex!("a814f7df6a2203dc0e472e8828be95957c6b329fee8e2b1bb6f044c1eb4fc243")
|
||||
.to_vec()
|
||||
.into(),
|
||||
};
|
||||
|
||||
let receipt_1 = LegacyReceiptRlp {
|
||||
status: true,
|
||||
cum_gas_used: 0x02dcb6u64.into(),
|
||||
bloom: hex!("00000000000000000000000000000000000000000000000000800000000000000040000000001000000000000000000000000000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000008000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001000000400000000000000000000000000000002000040000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000000000000008000000000000000000000000").to_vec().into(),
|
||||
logs: vec![log_1],
|
||||
};
|
||||
|
||||
// Insert the second receipt into the receipt trie.
|
||||
example_receipt_trie.insert(
|
||||
Nibbles::from_str("0x01").unwrap(),
|
||||
rlp::encode(&receipt_1).to_vec(),
|
||||
);
|
||||
|
||||
// Check that the trie hashes are correct.
|
||||
assert_eq!(
|
||||
example_txn_trie.hash(),
|
||||
hex!("3ab7120d12e1fc07303508542602beb7eecfe8f262b83fd71eefe7d6205242ce").into()
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
example_receipt_trie.hash(),
|
||||
hex!("da46cdd329bfedace32da95f2b344d314bc6f55f027d65f9f4ac04ee425e1f98").into()
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[ignore] // Too slow to run on CI.
|
||||
fn test_two_txn() -> anyhow::Result<()> {
|
||||
init_logger();
|
||||
|
||||
let all_stark = AllStark::<F, D>::default();
|
||||
let config = StarkConfig::standard_fast_config();
|
||||
|
||||
let beneficiary = hex!("2adc25665018aa1fe0e6bc666dac8fc2697ff9ba");
|
||||
let sender = hex!("af1276cbb260bb13deddb4209ae99ae6e497f446");
|
||||
// Private key: DCDFF53B4F013DBCDC717F89FE3BF4D8B10512AAE282B48E01D7530470382701
|
||||
let to = hex!("095e7baea6a6c7c4c2dfeb977efac326af552d87");
|
||||
|
||||
let beneficiary_state_key = keccak(beneficiary);
|
||||
let sender_state_key = keccak(sender);
|
||||
let to_hashed = keccak(to);
|
||||
|
||||
let beneficiary_nibbles = Nibbles::from_bytes_be(beneficiary_state_key.as_bytes()).unwrap();
|
||||
let sender_nibbles = Nibbles::from_bytes_be(sender_state_key.as_bytes()).unwrap();
|
||||
let to_nibbles = Nibbles::from_bytes_be(to_hashed.as_bytes()).unwrap();
|
||||
|
||||
// Set accounts before the transaction.
|
||||
let beneficiary_account_before = AccountRlp {
|
||||
nonce: 1.into(),
|
||||
..AccountRlp::default()
|
||||
};
|
||||
|
||||
let sender_balance_before = 50000000000000000u64;
|
||||
let sender_account_before = AccountRlp {
|
||||
balance: sender_balance_before.into(),
|
||||
..AccountRlp::default()
|
||||
};
|
||||
let to_account_before = AccountRlp {
|
||||
..AccountRlp::default()
|
||||
};
|
||||
|
||||
// Initialize the state trie with three accounts.
|
||||
let mut state_trie_before = HashedPartialTrie::from(Node::Empty);
|
||||
state_trie_before.insert(
|
||||
beneficiary_nibbles,
|
||||
rlp::encode(&beneficiary_account_before).to_vec(),
|
||||
);
|
||||
state_trie_before.insert(sender_nibbles, rlp::encode(&sender_account_before).to_vec());
|
||||
state_trie_before.insert(to_nibbles, rlp::encode(&to_account_before).to_vec());
|
||||
|
||||
let tries_before = TrieInputs {
|
||||
state_trie: state_trie_before,
|
||||
transactions_trie: Node::Empty.into(),
|
||||
receipts_trie: Node::Empty.into(),
|
||||
storage_tries: vec![(to_hashed, Node::Empty.into())],
|
||||
};
|
||||
|
||||
// Prove two simple transfers.
|
||||
let gas_price = 10;
|
||||
let txn_value = 0x11c37937e08000u64;
|
||||
let txn_0 = hex!("f866800a82520894095e7baea6a6c7c4c2dfeb977efac326af552d878711c37937e080008026a01fcd0ce88ac7600698a771f206df24b70e67981b6f107bd7c1c24ea94f113bcba00d87cc5c7afc2988e4ff200b5a0c7016b0d5498bbc692065ca983fcbbfe02555");
|
||||
let txn_1 = hex!("f866010a82520894095e7baea6a6c7c4c2dfeb977efac326af552d878711c37937e080008026a0d8123f5f537bd3a67283f67eb136f7accdfc4ef012cfbfd3fb1d0ac7fd01b96fa004666d9feef90a1eb568570374dd19977d4da231b289d769e6f95105c06fd672");
|
||||
|
||||
let block_metadata = BlockMetadata {
|
||||
block_beneficiary: Address::from(beneficiary),
|
||||
block_timestamp: 0x03e8.into(),
|
||||
block_number: 1.into(),
|
||||
block_difficulty: 0x020000.into(),
|
||||
block_gaslimit: 0xffffffffu32.into(),
|
||||
block_chain_id: 1.into(),
|
||||
block_base_fee: 0xa.into(),
|
||||
block_bloom: [0.into(); 8],
|
||||
};
|
||||
|
||||
let mut contract_code = HashMap::new();
|
||||
contract_code.insert(keccak(vec![]), vec![]);
|
||||
|
||||
// Update accounts
|
||||
let beneficiary_account_after = AccountRlp {
|
||||
nonce: 1.into(),
|
||||
..AccountRlp::default()
|
||||
};
|
||||
|
||||
let sender_balance_after = sender_balance_before - gas_price * 21000 * 2 - txn_value * 2;
|
||||
let sender_account_after = AccountRlp {
|
||||
balance: sender_balance_after.into(),
|
||||
nonce: 2.into(),
|
||||
..AccountRlp::default()
|
||||
};
|
||||
let to_account_after = AccountRlp {
|
||||
balance: (2 * txn_value).into(),
|
||||
..AccountRlp::default()
|
||||
};
|
||||
|
||||
// Update the state trie.
|
||||
let mut expected_state_trie_after = HashedPartialTrie::from(Node::Empty);
|
||||
expected_state_trie_after.insert(
|
||||
beneficiary_nibbles,
|
||||
rlp::encode(&beneficiary_account_after).to_vec(),
|
||||
);
|
||||
expected_state_trie_after.insert(sender_nibbles, rlp::encode(&sender_account_after).to_vec());
|
||||
expected_state_trie_after.insert(to_nibbles, rlp::encode(&to_account_after).to_vec());
|
||||
|
||||
// Compute new receipt trie.
|
||||
let mut receipts_trie = HashedPartialTrie::from(Node::Empty);
|
||||
|
||||
let receipt_0 = LegacyReceiptRlp {
|
||||
status: true,
|
||||
cum_gas_used: 21000u64.into(),
|
||||
bloom: [0x00; 256].to_vec().into(),
|
||||
logs: vec![],
|
||||
};
|
||||
|
||||
let receipt_1 = LegacyReceiptRlp {
|
||||
status: true,
|
||||
cum_gas_used: 42000u64.into(),
|
||||
bloom: [0x00; 256].to_vec().into(),
|
||||
logs: vec![],
|
||||
};
|
||||
|
||||
receipts_trie.insert(
|
||||
Nibbles::from_str("0x80").unwrap(),
|
||||
rlp::encode(&receipt_0).to_vec(),
|
||||
);
|
||||
|
||||
receipts_trie.insert(
|
||||
Nibbles::from_str("0x01").unwrap(),
|
||||
rlp::encode(&receipt_1).to_vec(),
|
||||
);
|
||||
|
||||
let trie_roots_after = TrieRoots {
|
||||
state_root: expected_state_trie_after.hash(),
|
||||
transactions_root: HashedPartialTrie::from(Node::Empty).hash(),
|
||||
receipts_root: receipts_trie.hash(),
|
||||
};
|
||||
let inputs = GenerationInputs {
|
||||
signed_txns: vec![txn_0.to_vec(), txn_1.to_vec()],
|
||||
tries: tries_before,
|
||||
trie_roots_after,
|
||||
contract_code,
|
||||
block_metadata,
|
||||
addresses: vec![],
|
||||
};
|
||||
|
||||
let mut timing = TimingTree::new("prove", log::Level::Debug);
|
||||
let proof = prove::<F, C, D>(&all_stark, &config, inputs, &mut timing)?;
|
||||
timing.filter(Duration::from_millis(100)).print();
|
||||
|
||||
// Assert trie roots.
|
||||
assert_eq!(
|
||||
proof.public_values.trie_roots_after.state_root,
|
||||
expected_state_trie_after.hash()
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
proof.public_values.trie_roots_after.receipts_root,
|
||||
receipts_trie.hash()
|
||||
);
|
||||
|
||||
verify_proof(&all_stark, proof, &config)
|
||||
}
|
||||
|
||||
fn init_logger() {
|
||||
let _ = try_init_from_env(Env::default().filter_or(DEFAULT_FILTER_ENV, "info"));
|
||||
}
|
||||
@ -13,7 +13,7 @@ use plonky2::plonk::config::KeccakGoldilocksConfig;
|
||||
use plonky2::util::timing::TimingTree;
|
||||
use plonky2_evm::all_stark::AllStark;
|
||||
use plonky2_evm::config::StarkConfig;
|
||||
use plonky2_evm::generation::mpt::AccountRlp;
|
||||
use plonky2_evm::generation::mpt::{AccountRlp, LegacyReceiptRlp};
|
||||
use plonky2_evm::generation::{GenerationInputs, TrieInputs};
|
||||
use plonky2_evm::proof::{BlockMetadata, TrieRoots};
|
||||
use plonky2_evm::prover::prove;
|
||||
@ -48,6 +48,18 @@ fn self_balance_gas_cost() -> anyhow::Result<()> {
|
||||
let code = [
|
||||
0x5a, 0x47, 0x5a, 0x90, 0x50, 0x90, 0x03, 0x60, 0x02, 0x90, 0x03, 0x60, 0x01, 0x55, 0x00,
|
||||
];
|
||||
let code_gas = 2 // GAS
|
||||
+ 5 // SELFBALANCE
|
||||
+ 2 // GAS
|
||||
+ 3 // SWAP1
|
||||
+ 2 // POP
|
||||
+ 3 // SWAP1
|
||||
+ 3 // SUB
|
||||
+ 3 // PUSH1
|
||||
+ 3 // SWAP1
|
||||
+ 3 // SUB
|
||||
+ 3 // PUSH1
|
||||
+ 22100; // SSTORE
|
||||
let code_hash = keccak(code);
|
||||
|
||||
let beneficiary_account_before = AccountRlp::default();
|
||||
@ -120,10 +132,22 @@ fn self_balance_gas_cost() -> anyhow::Result<()> {
|
||||
expected_state_trie_after
|
||||
};
|
||||
|
||||
let gas_used = 21_000 + code_gas;
|
||||
let receipt_0 = LegacyReceiptRlp {
|
||||
status: true,
|
||||
cum_gas_used: gas_used.into(),
|
||||
bloom: vec![0; 256].into(),
|
||||
logs: vec![],
|
||||
};
|
||||
let mut receipts_trie = HashedPartialTrie::from(Node::Empty);
|
||||
receipts_trie.insert(
|
||||
Nibbles::from_str("0x80").unwrap(),
|
||||
rlp::encode(&receipt_0).to_vec(),
|
||||
);
|
||||
let trie_roots_after = TrieRoots {
|
||||
state_root: expected_state_trie_after.hash(),
|
||||
transactions_root: tries_before.transactions_trie.hash(), // TODO: Fix this when we have transactions trie.
|
||||
receipts_root: tries_before.receipts_trie.hash(), // TODO: Fix this when we have receipts trie.
|
||||
receipts_root: receipts_trie.hash(),
|
||||
};
|
||||
let inputs = GenerationInputs {
|
||||
signed_txns: vec![txn.to_vec()],
|
||||
|
||||
@ -1,4 +1,5 @@
|
||||
use std::collections::HashMap;
|
||||
use std::str::FromStr;
|
||||
use std::time::Duration;
|
||||
|
||||
use env_logger::{try_init_from_env, Env, DEFAULT_FILTER_ENV};
|
||||
@ -12,7 +13,7 @@ use plonky2::plonk::config::KeccakGoldilocksConfig;
|
||||
use plonky2::util::timing::TimingTree;
|
||||
use plonky2_evm::all_stark::AllStark;
|
||||
use plonky2_evm::config::StarkConfig;
|
||||
use plonky2_evm::generation::mpt::AccountRlp;
|
||||
use plonky2_evm::generation::mpt::{AccountRlp, LegacyReceiptRlp};
|
||||
use plonky2_evm::generation::{GenerationInputs, TrieInputs};
|
||||
use plonky2_evm::proof::{BlockMetadata, TrieRoots};
|
||||
use plonky2_evm::prover::prove;
|
||||
@ -73,6 +74,7 @@ fn test_simple_transfer() -> anyhow::Result<()> {
|
||||
block_gaslimit: 0xff112233u32.into(),
|
||||
block_chain_id: 1.into(),
|
||||
block_base_fee: 0xa.into(),
|
||||
block_bloom: [0.into(); 8],
|
||||
};
|
||||
|
||||
let mut contract_code = HashMap::new();
|
||||
@ -109,10 +111,23 @@ fn test_simple_transfer() -> anyhow::Result<()> {
|
||||
}
|
||||
.into()
|
||||
};
|
||||
|
||||
let receipt_0 = LegacyReceiptRlp {
|
||||
status: true,
|
||||
cum_gas_used: 21032.into(),
|
||||
bloom: vec![0; 256].into(),
|
||||
logs: vec![],
|
||||
};
|
||||
let mut receipts_trie = HashedPartialTrie::from(Node::Empty);
|
||||
receipts_trie.insert(
|
||||
Nibbles::from_str("0x80").unwrap(),
|
||||
rlp::encode(&receipt_0).to_vec(),
|
||||
);
|
||||
|
||||
let trie_roots_after = TrieRoots {
|
||||
state_root: expected_state_trie_after.hash(),
|
||||
transactions_root: tries_before.transactions_trie.hash(), // TODO: Fix this when we have transactions trie.
|
||||
receipts_root: tries_before.receipts_trie.hash(), // TODO: Fix this when we have receipts trie.
|
||||
receipts_root: receipts_trie.hash(),
|
||||
};
|
||||
let inputs = GenerationInputs {
|
||||
signed_txns: vec![txn.to_vec()],
|
||||
|
||||
@ -225,6 +225,15 @@ pub trait Read {
|
||||
Ok(ExtensionTarget(res))
|
||||
}
|
||||
|
||||
/// Reads an array of Target from `self`.
|
||||
#[inline]
|
||||
fn read_target_array<const N: usize>(&mut self) -> IoResult<[Target; N]> {
|
||||
(0..N)
|
||||
.map(|_| self.read_target())
|
||||
.collect::<Result<Vec<_>, _>>()
|
||||
.map(|v| v.try_into().unwrap())
|
||||
}
|
||||
|
||||
/// Reads a vector of Target from `self`.
|
||||
#[inline]
|
||||
fn read_target_vec(&mut self) -> IoResult<Vec<Target>> {
|
||||
@ -1313,6 +1322,16 @@ pub trait Write {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Writes a vector of Target `v` to `self.`
|
||||
#[inline]
|
||||
fn write_target_array<const N: usize>(&mut self, v: &[Target; N]) -> IoResult<()> {
|
||||
for &elem in v.iter() {
|
||||
self.write_target(elem)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Writes a vector of Target `v` to `self.`
|
||||
#[inline]
|
||||
fn write_target_vec(&mut self, v: &[Target]) -> IoResult<()> {
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user