Transactions trie support (#1232)

* Implement transactions.

* Fix receipts and transactions

* Add some fixes

* Update tests

* Remove changes added for debugging purposes only

* Clippy

* Remove additional debug changes

* Remove unused

* Apply comments

---------

Co-authored-by: Linda Guiga <lindaguiga3@gmail.com>
Co-authored-by: Robin Salen <salenrobin@gmail.com>
This commit is contained in:
Alonso González 2023-09-27 16:00:16 +02:00 committed by GitHub
parent acc659da07
commit f49fbc8e9b
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
20 changed files with 612 additions and 114 deletions

View File

@ -131,6 +131,7 @@ pub(crate) fn combined_kernel() -> Kernel {
include_str!("asm/rlp/encode.asm"),
include_str!("asm/rlp/encode_rlp_scalar.asm"),
include_str!("asm/rlp/encode_rlp_string.asm"),
include_str!("asm/rlp/increment_bounded_rlp.asm"),
include_str!("asm/rlp/num_bytes.asm"),
include_str!("asm/rlp/read_to_memory.asm"),
include_str!("asm/shift.asm"),

View File

@ -1,4 +1,4 @@
// Pre-stack: status, leftover_gas, prev_cum_gas, txn_nb, retdest
// Pre-stack: status, leftover_gas, prev_cum_gas, txn_nb, num_nibbles, retdest
// Post stack: new_cum_gas, txn_nb
// A receipt is stored in MPT_TRIE_DATA as:
// [payload_len, status, cum_gas_used, bloom, logs_payload_len, num_logs, [logs]]
@ -11,210 +11,211 @@
// - insert a new node in receipt_trie,
// - set the bloom filter back to 0
global process_receipt:
// stack: status, leftover_gas, prev_cum_gas, txn_nb, retdest
// stack: status, leftover_gas, prev_cum_gas, txn_nb, num_nibbles, retdest
DUP2 DUP4
// stack: prev_cum_gas, leftover_gas, status, leftover_gas, prev_cum_gas, txn_nb, retdest
// stack: prev_cum_gas, leftover_gas, status, leftover_gas, prev_cum_gas, txn_nb, num_nibbles, retdest
%compute_cumulative_gas
// stack: new_cum_gas, status, leftover_gas, prev_cum_gas, txn_nb, retdest
// stack: new_cum_gas, status, leftover_gas, prev_cum_gas, txn_nb, num_nibbles, retdest
SWAP3 POP
// stack: status, leftover_gas, new_cum_gas, txn_nb, retdest
// stack: status, leftover_gas, new_cum_gas, txn_nb, num_nibbles, retdest
SWAP1 POP
// stack: status, new_cum_gas, txn_nb, retdest
// stack: status, new_cum_gas, txn_nb, num_nibbles, retdest
// Now, we need to check whether the transaction has failed.
DUP1 ISZERO %jumpi(failed_receipt)
process_receipt_after_status:
// stack: status, new_cum_gas, txn_nb, retdest
// stack: status, new_cum_gas, txn_nb, num_nibbles, retdest
PUSH process_receipt_after_bloom
%jump(logs_bloom)
process_receipt_after_bloom:
// stack: status, new_cum_gas, txn_nb, retdest
// stack: status, new_cum_gas, txn_nb, num_nibbles, retdest
DUP2 DUP4
// stack: txn_nb, new_cum_gas, status, new_cum_gas, txn_nb, retdest
// stack: txn_nb, new_cum_gas, status, new_cum_gas, txn_nb, num_nibbles, retdest
SWAP2
// stack: status, new_cum_gas, txn_nb, new_cum_gas, txn_nb, retdest
// stack: status, new_cum_gas, txn_nb, new_cum_gas, txn_nb, num_nibbles, retdest
// Compute the total RLP payload length of the receipt.
PUSH 1 // status is always 1 byte.
// stack: payload_len, status, new_cum_gas, txn_nb, new_cum_gas, txn_nb, retdest
// stack: payload_len, status, new_cum_gas, txn_nb, new_cum_gas, txn_nb, num_nibbles, retdest
DUP3
%rlp_scalar_len // cum_gas is a simple scalar.
ADD
// stack: payload_len, status, new_cum_gas, txn_nb, new_cum_gas, txn_nb, retdest
// stack: payload_len, status, new_cum_gas, txn_nb, new_cum_gas, txn_nb, num_nibbles, retdest
// Next is the bloom_filter, which is a 256-byte array. Its RLP encoding is
// 1 + 2 + 256 bytes.
%add_const(259)
// stack: payload_len, status, new_cum_gas, txn_nb, new_cum_gas, txn_nb, retdest
// stack: payload_len, status, new_cum_gas, txn_nb, new_cum_gas, txn_nb, num_nibbles, retdest
// Last is the logs.
%mload_global_metadata(@GLOBAL_METADATA_LOGS_PAYLOAD_LEN)
%rlp_list_len
ADD
// stack: payload_len, status, new_cum_gas, txn_nb, new_cum_gas, txn_nb, retdest
// stack: payload_len, status, new_cum_gas, txn_nb, new_cum_gas, txn_nb, num_nibbles, retdest
// Now we can write the receipt in MPT_TRIE_DATA.
%get_trie_data_size
// stack: receipt_ptr, payload_len, status, new_cum_gas, txn_nb, new_cum_gas, txn_nb, retdest
// stack: receipt_ptr, payload_len, status, new_cum_gas, txn_nb, new_cum_gas, txn_nb, num_nibbles, retdest
// Write transaction type if necessary. RLP_RAW contains, at index 0, the current transaction type.
PUSH 0
%mload_kernel(@SEGMENT_RLP_RAW)
// stack: first_txn_byte, receipt_ptr, payload_len, status, new_cum_gas, txn_nb, new_cum_gas, txn_nb, retdest
// stack: first_txn_byte, receipt_ptr, payload_len, status, new_cum_gas, txn_nb, new_cum_gas, txn_nb, num_nibbles, retdest
DUP1 %eq_const(1) %jumpi(receipt_nonzero_type)
DUP1 %eq_const(2) %jumpi(receipt_nonzero_type)
// If we are here, we are dealing with a legacy transaction, and we do not need to write the type.
POP
process_receipt_after_type:
// stack: receipt_ptr, payload_len, status, new_cum_gas, txn_nb, new_cum_gas, txn_nb, retdest
// stack: receipt_ptr, payload_len, status, new_cum_gas, txn_nb, new_cum_gas, txn_nb, num_nibbles, retdest
// Write payload_len.
SWAP1
%append_to_trie_data
// stack: receipt_ptr, status, new_cum_gas, txn_nb, new_cum_gas, txn_nb, retdest
// stack: receipt_ptr, status, new_cum_gas, txn_nb, new_cum_gas, txn_nb, num_nibbles, retdest
// Write status.
SWAP1
%append_to_trie_data
// stack: receipt_ptr, new_cum_gas, txn_nb, new_cum_gas, txn_nb, retdest
// stack: receipt_ptr, new_cum_gas, txn_nb, new_cum_gas, txn_nb, num_nibbles, retdest
// Write cum_gas_used.
SWAP1
%append_to_trie_data
// stack: receipt_ptr, txn_nb, new_cum_gas, txn_nb, retdest
// stack: receipt_ptr, txn_nb, new_cum_gas, txn_nb, num_nibbles, retdest
// Write Bloom filter.
PUSH 256 // Bloom length.
PUSH 0 PUSH @SEGMENT_TXN_BLOOM PUSH 0 // Bloom memory address.
%get_trie_data_size PUSH @SEGMENT_TRIE_DATA PUSH 0 // MPT dest address.
// stack: DST, SRC, 256, receipt_ptr, txn_nb, new_cum_gas, txn_nb, retdest
// stack: DST, SRC, 256, receipt_ptr, txn_nb, new_cum_gas, txn_nb, num_nibbles, retdest
%memcpy
// stack: receipt_ptr, txn_nb, new_cum_gas, txn_nb, retdest
// stack: receipt_ptr, txn_nb, new_cum_gas, txn_nb, num_nibbles, retdest
// Update trie data size.
%get_trie_data_size
%add_const(256)
%set_trie_data_size
// Now we write logs.
// stack: receipt_ptr, txn_nb, new_cum_gas, txn_nb, retdest
// stack: receipt_ptr, txn_nb, new_cum_gas, txn_nb, num_nibbles, retdest
// We start with the logs payload length.
%mload_global_metadata(@GLOBAL_METADATA_LOGS_PAYLOAD_LEN)
%append_to_trie_data
// stack: receipt_ptr, txn_nb, new_cum_gas, txn_nb, retdest
// stack: receipt_ptr, txn_nb, new_cum_gas, txn_nb, num_nibbles, retdest
%mload_global_metadata(@GLOBAL_METADATA_LOGS_LEN)
// Then the number of logs.
// stack: num_logs, receipt_ptr, txn_nb, new_cum_gas, txn_nb, retdest
// stack: num_logs, receipt_ptr, txn_nb, new_cum_gas, txn_nb, num_nibbles, retdest
DUP1 %append_to_trie_data
PUSH 0
// Each log is written in MPT_TRIE_DATA as:
// [payload_len, address, num_topics, [topics], data_len, [data]].
process_receipt_logs_loop:
// stack: i, num_logs, receipt_ptr, txn_nb, new_cum_gas, txn_nb, retdest
// stack: i, num_logs, receipt_ptr, txn_nb, new_cum_gas, txn_nb, num_nibbles, retdest
DUP2 DUP2
EQ
// stack: i == num_logs, i, num_logs, receipt_ptr, txn_nb, new_cum_gas, txn_nb, retdest
// stack: i == num_logs, i, num_logs, receipt_ptr, txn_nb, new_cum_gas, txn_nb, num_nibbles, retdest
%jumpi(process_receipt_after_write)
// stack: i, num_logs, receipt_ptr, txn_nb, new_cum_gas, txn_nb, retdest
// stack: i, num_logs, receipt_ptr, txn_nb, new_cum_gas, txn_nb, num_nibbles, retdest
DUP1
%mload_kernel(@SEGMENT_LOGS)
// stack: log_ptr, i, num_logs, receipt_ptr, txn_nb, new_cum_gas, txn_nb, retdest
// stack: log_ptr, i, num_logs, receipt_ptr, txn_nb, new_cum_gas, txn_nb, num_nibbles, retdest
// Write payload_len.
DUP1
%mload_kernel(@SEGMENT_LOGS_DATA)
%append_to_trie_data
// stack: log_ptr, i, num_logs, receipt_ptr, txn_nb, new_cum_gas, txn_nb, retdest
// stack: log_ptr, i, num_logs, receipt_ptr, txn_nb, new_cum_gas, txn_nb, num_nibbles, retdest
// Write address.
%increment
// stack: addr_ptr, i, num_logs, receipt_ptr, txn_nb, new_cum_gas, txn_nb, retdest
// stack: addr_ptr, i, num_logs, receipt_ptr, txn_nb, new_cum_gas, txn_nb, num_nibbles, retdest
DUP1
%mload_kernel(@SEGMENT_LOGS_DATA)
%append_to_trie_data
// stack: addr_ptr, i, num_logs, receipt_ptr, txn_nb, new_cum_gas, txn_nb, retdest
// stack: addr_ptr, i, num_logs, receipt_ptr, txn_nb, new_cum_gas, txn_nb, num_nibbles, retdest
//Write num_topics.
%increment
// stack: num_topics_ptr, i, num_logs, receipt_ptr, txn_nb, new_cum_gas, txn_nb, retdest
// stack: num_topics_ptr, i, num_logs, receipt_ptr, txn_nb, new_cum_gas, txn_nb, num_nibbles, retdest
DUP1
%mload_kernel(@SEGMENT_LOGS_DATA)
// stack: num_topics, num_topics_ptr, i, num_logs, receipt_ptr, txn_nb, new_cum_gas, txn_nb, retdest
// stack: num_topics, num_topics_ptr, i, num_logs, receipt_ptr, txn_nb, new_cum_gas, txn_nb, num_nibbles, retdest
DUP1
%append_to_trie_data
// stack: num_topics, num_topics_ptr, i, num_logs, receipt_ptr, txn_nb, new_cum_gas, txn_nb, retdest
// stack: num_topics, num_topics_ptr, i, num_logs, receipt_ptr, txn_nb, new_cum_gas, txn_nb, num_nibbles, retdest
SWAP1 %increment SWAP1
// stack: num_topics, topics_ptr, i, num_logs, receipt_ptr, txn_nb, new_cum_gas, txn_nb, retdest
// stack: num_topics, topics_ptr, i, num_logs, receipt_ptr, txn_nb, new_cum_gas, txn_nb, num_nibbles, retdest
PUSH 0
process_receipt_topics_loop:
// stack: j, num_topics, topics_ptr, i, num_logs, receipt_ptr, txn_nb, new_cum_gas, txn_nb, retdest
// stack: j, num_topics, topics_ptr, i, num_logs, receipt_ptr, txn_nb, new_cum_gas, txn_nb, num_nibbles, retdest
DUP2 DUP2
EQ
// stack: j == num_topics, j, num_topics, topics_ptr, i, num_logs, receipt_ptr, txn_nb, new_cum_gas, txn_nb, retdest
// stack: j == num_topics, j, num_topics, topics_ptr, i, num_logs, receipt_ptr, txn_nb, new_cum_gas, txn_nb, num_nibbles, retdest
%jumpi(process_receipt_topics_end)
// stack: j, num_topics, topics_ptr, i, num_logs, receipt_ptr, txn_nb, new_cum_gas, txn_nb, retdest
// stack: j, num_topics, topics_ptr, i, num_logs, receipt_ptr, txn_nb, new_cum_gas, txn_nb, num_nibbles, retdest
// Write j-th topic.
DUP3 DUP2
ADD
// stack: cur_topic_ptr, j, num_topics, topics_ptr, i, num_logs, receipt_ptr, txn_nb, new_cum_gas, txn_nb, retdest
// stack: cur_topic_ptr, j, num_topics, topics_ptr, i, num_logs, receipt_ptr, txn_nb, new_cum_gas, txn_nb, num_nibbles, retdest
%mload_kernel(@SEGMENT_LOGS_DATA)
%append_to_trie_data
// stack: j, num_topics, topics_ptr, i, num_logs, receipt_ptr, txn_nb, new_cum_gas, txn_nb, retdest
// stack: j, num_topics, topics_ptr, i, num_logs, receipt_ptr, txn_nb, new_cum_gas, txn_nb, num_nibbles, retdest
%increment
%jump(process_receipt_topics_loop)
process_receipt_topics_end:
// stack: num_topics, num_topics, topics_ptr, i, num_logs, receipt_ptr, txn_nb, new_cum_gas, txn_nb, retdest
// stack: num_topics, num_topics, topics_ptr, i, num_logs, receipt_ptr, txn_nb, new_cum_gas, txn_nb, num_nibbles, retdest
POP
ADD
// stack: data_len_ptr, i, num_logs, receipt_ptr, txn_nb, new_cum_gas, txn_nb, retdest
// stack: data_len_ptr, i, num_logs, receipt_ptr, txn_nb, new_cum_gas, txn_nb, num_nibbles, retdest
// Write data_len
DUP1
%mload_kernel(@SEGMENT_LOGS_DATA)
// stack: data_len, data_len_ptr, i, num_logs, receipt_ptr, txn_nb, new_cum_gas, txn_nb, retdest
// stack: data_len, data_len_ptr, i, num_logs, receipt_ptr, txn_nb, new_cum_gas, txn_nb, num_nibbles, retdest
DUP1
%append_to_trie_data
// stack: data_len, data_len_ptr, i, num_logs, receipt_ptr, txn_nb, new_cum_gas, txn_nb, retdest
// stack: data_len, data_len_ptr, i, num_logs, receipt_ptr, txn_nb, new_cum_gas, txn_nb, num_nibbles, retdest
SWAP1 %increment SWAP1
// stack: data_len, data_ptr, i, num_logs, receipt_ptr, txn_nb, new_cum_gas, txn_nb, retdest
// stack: data_len, data_ptr, i, num_logs, receipt_ptr, txn_nb, new_cum_gas, txn_nb, num_nibbles, retdest
PUSH 0
process_receipt_data_loop:
// stack: j, data_len, data_ptr, i, num_logs, receipt_ptr, txn_nb, new_cum_gas, txn_nb, retdest
// stack: j, data_len, data_ptr, i, num_logs, receipt_ptr, txn_nb, new_cum_gas, txn_nb, num_nibbles, retdest
DUP2 DUP2
EQ
// stack: j == data_len, j, data_len, data_ptr, i, num_logs, receipt_ptr, txn_nb, new_cum_gas, txn_nb, retdest
// stack: j == data_len, j, data_len, data_ptr, i, num_logs, receipt_ptr, txn_nb, new_cum_gas, txn_nb, num_nibbles, retdest
%jumpi(process_receipt_data_end)
// stack: j, data_len, data_ptr, i, num_logs, receipt_ptr, txn_nb, new_cum_gas, txn_nb, retdest
// stack: j, data_len, data_ptr, i, num_logs, receipt_ptr, txn_nb, new_cum_gas, txn_nb, num_nibbles, retdest
// Write j-th data byte.
DUP3 DUP2
ADD
// stack: cur_data_ptr, j, data_len, data_ptr, i, num_logs, receipt_ptr, txn_nb, new_cum_gas, txn_nb, retdest
// stack: cur_data_ptr, j, data_len, data_ptr, i, num_logs, receipt_ptr, txn_nb, new_cum_gas, txn_nb, num_nibbles, retdest
%mload_kernel(@SEGMENT_LOGS_DATA)
%append_to_trie_data
// stack: j, data_len, data_ptr, i, num_logs, receipt_ptr, txn_nb, new_cum_gas, txn_nb, retdest
// stack: j, data_len, data_ptr, i, num_logs, receipt_ptr, txn_nb, new_cum_gas, txn_nb, num_nibbles, retdest
%increment
%jump(process_receipt_data_loop)
process_receipt_data_end:
// stack: data_len, data_len, data_ptr, i, num_logs, receipt_ptr, txn_nb, new_cum_gas, txn_nb, retdest
// stack: data_len, data_len, data_ptr, i, num_logs, receipt_ptr, txn_nb, new_cum_gas, txn_nb, num_nibbles, retdest
%pop3
%increment
%jump(process_receipt_logs_loop)
process_receipt_after_write:
// stack: num_logs, num_logs, receipt_ptr, txn_nb, new_cum_gas, txn_nb, retdest
// stack: num_logs, num_logs, receipt_ptr, txn_nb, new_cum_gas, txn_nb, num_nibbles, retdest
%pop2
// stack: receipt_ptr, txn_nb, new_cum_gas, txn_nb, retdest
// stack: receipt_ptr, txn_nb, new_cum_gas, txn_nb, num_nibbles, retdest
SWAP1
// stack: txn_nb, receipt_ptr, new_cum_gas, txn_nb, retdest
// stack: txn_nb, receipt_ptr, new_cum_gas, txn_nb, num_nibbles, retdest
DUP5
%mpt_insert_receipt_trie
// stack: new_cum_gas, txn_nb, retdest
// stack: new_cum_gas, txn_nb, num_nibbles, retdest
// Now, we set the Bloom filter back to 0.
PUSH 0
%rep 256
// stack: counter, new_cum_gas, txn_nb, retdest
// stack: counter, new_cum_gas, txn_nb, num_nibbles, retdest
PUSH 0 DUP2
// stack: counter, 0, counter, new_cum_gas, txn_nb, retdest
// stack: counter, 0, counter, new_cum_gas, txn_nb, num_nibbles, retdest
%mstore_kernel(@SEGMENT_TXN_BLOOM)
// stack: counter, new_cum_gas, txn_nb, retdest
// stack: counter, new_cum_gas, txn_nb, num_nibbles, retdest
%increment
%endrep
POP
// stack: new_cum_gas, txn_nb, retdest
%stack (new_cum_gas, txn_nb, retdest) -> (retdest, new_cum_gas, txn_nb)
// stack: new_cum_gas, txn_nb, num_nibbles, retdest
%stack (new_cum_gas, txn_nb, num_nibbles, retdest) -> (retdest, new_cum_gas)
JUMP
receipt_nonzero_type:
@ -223,16 +224,16 @@ receipt_nonzero_type:
%jump(process_receipt_after_type)
failed_receipt:
// stack: status, new_cum_gas, txn_nb
// stack: status, new_cum_gas, num_nibbles, txn_nb
// It is the receipt of a failed transaction, so set num_logs to 0. This will also lead to Bloom filter = 0.
PUSH 0
%mstore_global_metadata(@GLOBAL_METADATA_LOGS_LEN)
// stack: status, new_cum_gas, txn_nb
// stack: status, new_cum_gas, num_nibbles, txn_nb
%jump(process_receipt_after_status)
%macro process_receipt
// stack: success, leftover_gas, cur_cum_gas, txn_nb
%stack (success, leftover_gas, cur_cum_gas, txn_nb) -> (success, leftover_gas, cur_cum_gas, txn_nb, %%after)
// stack: success, leftover_gas, cur_cum_gas, txn_nb, num_nibbles
%stack (success, leftover_gas, cur_cum_gas, txn_nb, num_nibbles) -> (success, leftover_gas, cur_cum_gas, txn_nb, num_nibbles, %%after)
%jump(process_receipt)
%%after:
%endmacro

View File

@ -16,9 +16,18 @@ global hash_initial_tries:
global start_txns:
// stack: (empty)
// The special case of an empty trie (i.e. for the first transaction)
// is handled outside of the kernel.
%mload_global_metadata(@GLOBAL_METADATA_TXN_NUMBER_BEFORE)
// stack: txn_nb
%mload_global_metadata(@GLOBAL_METADATA_BLOCK_GAS_USED_BEFORE)
// stack: init_used_gas, txn_nb
DUP2 %scalar_to_rlp
// stack: txn_counter, init_gas_used, txn_nb
DUP1 %num_bytes %mul_const(2)
// stack: num_nibbles, txn_counter, init_gas_used, txn_nb
SWAP2
// stack: init_gas_used, txn_counter, num_nibbles, txn_nb
txn_loop:
// If the prover has no more txns for us to process, halt.
@ -27,21 +36,24 @@ txn_loop:
// Call route_txn. When we return, continue the txn loop.
PUSH txn_loop_after
// stack: retdest, prev_used_gas, txn_nb
// stack: retdest, prev_gas_used, txn_counter, num_nibbles, txn_nb
DUP4 DUP4 %increment_bounded_rlp
%stack (next_txn_counter, next_num_nibbles, retdest, prev_gas_used, txn_counter, num_nibbles) -> (txn_counter, num_nibbles, retdest, prev_gas_used, txn_counter, num_nibbles, next_txn_counter, next_num_nibbles)
%jump(route_txn)
global txn_loop_after:
// stack: success, leftover_gas, cur_cum_gas, txn_nb
// stack: success, leftover_gas, cur_cum_gas, prev_txn_counter, prev_num_nibbles, txn_counter, num_nibbles, txn_nb
%process_receipt
// stack: new_cum_gas, txn_nb
SWAP1 %increment SWAP1
// stack: new_cum_gas, txn_counter, num_nibbles, txn_nb
SWAP3 %increment SWAP3
%jump(txn_loop)
global hash_final_tries:
// stack: cum_gas, txn_nb
// stack: cum_gas, txn_counter, num_nibbles, txn_nb
// Check that we end up with the correct `cum_gas`, `txn_nb` and bloom filter.
%mload_global_metadata(@GLOBAL_METADATA_BLOCK_GAS_USED_AFTER) %assert_eq
%mload_global_metadata(@GLOBAL_METADATA_TXN_NUMBER_AFTER) %assert_eq
DUP3 %mload_global_metadata(@GLOBAL_METADATA_TXN_NUMBER_AFTER) %assert_eq
%pop3
%check_metadata_block_bloom
%mpt_hash_state_trie %mload_global_metadata(@GLOBAL_METADATA_STATE_TRIE_DIGEST_AFTER) %assert_eq
%mpt_hash_txn_trie %mload_global_metadata(@GLOBAL_METADATA_TXN_TRIE_DIGEST_AFTER) %assert_eq

View File

@ -97,7 +97,30 @@ global encode_account:
JUMP
global encode_txn:
PANIC // TODO
// stack: rlp_pos, value_ptr, retdest
// Load the txn_rlp_len which is at the beginnig of value_ptr
DUP2 %mload_trie_data
// stack: txn_rlp_len, rlp_pos, value_ptr, retdest
SWAP2 %increment
// stack: txn_rlp_ptr=value_ptr+1, rlp_pos, txn_rlp_len, retdest
%stack (txn_rlp_ptr, rlp_pos, txn_rlp_len) -> (rlp_pos, txn_rlp_len, txn_rlp_len, txn_rlp_ptr)
// Encode the txn rlp prefix
// stack: rlp_pos, txn_rlp_len, txn_rlp_len, txn_rlp_ptr, retdest
%encode_rlp_multi_byte_string_prefix
// copy txn_rlp to the new block
// stack: rlp_pos, txn_rlp_len, txn_rlp_ptr, retdest
%stack (rlp_pos, txn_rlp_len, txn_rlp_ptr) -> (
0, @SEGMENT_RLP_RAW, rlp_pos, // dest addr
0, @SEGMENT_TRIE_DATA, txn_rlp_ptr, // src addr. Kernel has context 0
txn_rlp_len, // mcpy len
txn_rlp_len, rlp_pos)
%memcpy
ADD
// stack new_rlp_pos, retdest
SWAP1
JUMP
// We assume a receipt in memory is stored as:
// [payload_len, status, cum_gas_used, bloom, logs_payload_len, num_logs, [logs]].

View File

@ -23,15 +23,34 @@ mpt_insert_state_trie_save:
%%after:
%endmacro
// Insert a node in the transaction trie. The payload
// must be pointing to the rlp encoded txn
// Pre stack: key, txn_rlp_ptr, redest
// Post stack: (empty)
global mpt_insert_txn_trie:
// stack: key=rlp(key), num_nibbles, txn_rlp_ptr, retdest
%stack (key, num_nibbles, txn_rlp_ptr)
-> (num_nibbles, key, txn_rlp_ptr, mpt_insert_txn_trie_save)
%mload_global_metadata(@GLOBAL_METADATA_TXN_TRIE_ROOT)
// stack: txn_trie_root_ptr, num_nibbles, key, txn_rlp_ptr, mpt_insert_state_trie_save, retdest
%jump(mpt_insert)
mpt_insert_txn_trie_save:
// stack: updated_node_ptr, retdest
%mstore_global_metadata(@GLOBAL_METADATA_TXN_TRIE_ROOT)
JUMP
%macro mpt_insert_txn_trie
%stack (key, txn_rpl_ptr) -> (key, txn_rlp_ptr, %%after)
%jump(mpt_insert_txn_trie)
%%after:
%endmacro
global mpt_insert_receipt_trie:
// stack: scalar, value_ptr, retdest
%stack (scalar, value_ptr)
-> (scalar, value_ptr, mpt_insert_receipt_trie_save)
// The key is the RLP encoding of scalar.
%scalar_to_rlp
// stack: key, value_ptr, mpt_insert_receipt_trie_save, retdest
DUP1
%num_bytes %mul_const(2)
// stack: num_nibbles, scalar, value_ptr, retdest
%stack (num_nibbles, scalar, value_ptr)
-> (num_nibbles, scalar, value_ptr, mpt_insert_receipt_trie_save)
// The key is the scalar, which is an RLP encoding of the transaction number
// stack: num_nibbles, key, value_ptr, mpt_insert_receipt_trie_save, retdest
%mload_global_metadata(@GLOBAL_METADATA_RECEIPT_TRIE_ROOT)
// stack: receipt_root_ptr, num_nibbles, key, value_ptr, mpt_insert_receipt_trie_save, retdest
@ -42,27 +61,29 @@ mpt_insert_receipt_trie_save:
JUMP
%macro mpt_insert_receipt_trie
%stack (key, value_ptr) -> (key, value_ptr, %%after)
%stack (num_nibbles, key, value_ptr) -> (num_nibbles, key, value_ptr, %%after)
%jump(mpt_insert_receipt_trie)
%%after:
%endmacro
// Pre stack: scalar, retdest
// Post stack: rlp_scalar
// We will make use of %encode_rlp_scalar, which clobbers RlpRaw.
// We're not hashing tries yet, so it's not an issue.
global scalar_to_rlp:
// stack: scalar, retdest
PUSH 0
%mload_global_metadata(@GLOBAL_METADATA_RLP_DATA_SIZE)
// stack: pos, scalar, retdest
SWAP1 DUP2
%encode_rlp_scalar
// stack: pos', retdest
// Now our rlp_encoding is in RlpRaw in the first pos' cells.
DUP1 // len of the key
PUSH 0 PUSH @SEGMENT_RLP_RAW PUSH 0 // address where we get the key from
// stack: pos', init_pos, retdest
// Now our rlp_encoding is in RlpRaw.
// Set new RlpRaw data size
DUP1 %mstore_global_metadata(@GLOBAL_METADATA_RLP_DATA_SIZE)
DUP2 DUP2 SUB // len of the key
// stack: len, pos', init_pos, retdest
DUP3 PUSH @SEGMENT_RLP_RAW PUSH 0 // address where we get the key from
%mload_packing
// stack: packed_key, pos', retdest
SWAP1 POP
// stack: packed_key, pos', init_pos, retdest
SWAP2 %pop2
// stack: key, retdest
SWAP1
JUMP

View File

@ -29,7 +29,23 @@ global mpt_load_state_trie_value:
global mpt_load_txn_trie_value:
// stack: retdest
PANIC // TODO
PROVER_INPUT(mpt)
// stack: rlp_len, retdest
// The first element is the rlp length
DUP1 %append_to_trie_data
PUSH 0
mpt_load_loop:
// stack: i, rlp_len, retdest
DUP2 DUP2 EQ %jumpi(mpt_load_end)
PROVER_INPUT(mpt) %append_to_trie_data
%increment
%jump(mpt_load_loop)
mpt_load_end:
// stack: i, rlp_len, retdest
%pop2
JUMP
global mpt_load_receipt_trie_value:
// stack: retdest

View File

@ -40,7 +40,7 @@ global encode_rlp_fixed:
%increment // increment pos
// stack: pos, len, string, retdest
%stack (pos, len, string) -> (pos, string, len, encode_rlp_fixed_finish)
// stack: context, segment, pos, string, len, encode_rlp_fixed_finish, retdest
// stack: pos, string, len, encode_rlp_fixed_finish, retdest
%jump(mstore_unpacking_rlp)
encode_rlp_fixed_finish:
// stack: pos', retdest

View File

@ -0,0 +1,38 @@
// Increment by 1 the rlp encoded index and increment
// its number of nibbles when required. Shouldn't be
// called with rlp_index > 0x82 ff ff
global increment_bounded_rlp:
// stack: rlp_index, num_nibbles, retdest
DUP1
%eq_const(0x80)
%jumpi(case_0x80)
DUP1
%eq_const(0x7f)
%jumpi(case_0x7f)
DUP1
%eq_const(0x81ff)
%jumpi(case_0x81ff)
// If rlp_index != 0x80 and rlp_index != 0x7f and rlp_index != 0x81ff
// we only need to add one and keep the number of nibbles
%increment
%stack (rlp_index, num_nibbles, retdest) -> (retdest, rlp_index, num_nibbles)
JUMP
case_0x80:
%stack (rlp_index, num_nibbles, retdest) -> (retdest, 0x01, 2)
JUMP
case_0x7f:
%stack (rlp_index, num_nibbles, retdest) -> (retdest, 0x8180, 4)
JUMP
case_0x81ff:
%stack (rlp_index, num_nibbles, retdest) -> (retdest, 0x820100, 6)
JUMP
%macro increment_bounded_rlp
%stack (rlp_index, num_nibbles) -> (rlp_index, num_nibbles, %%after)
%jump(increment_bounded_rlp)
%%after:
%endmacro

View File

@ -31,6 +31,6 @@ read_rlp_to_memory_loop:
read_rlp_to_memory_finish:
// stack: pos, len, retdest
%pop2
// stack: retdest
JUMP
POP
// stack: len, retdest
SWAP1 JUMP

View File

@ -3,9 +3,12 @@
// jump to the appropriate transaction parsing method.
global route_txn:
// stack: retdest
// stack: txn_counter, num_nibbles, retdest
// First load transaction data into memory, where it will be parsed.
PUSH read_txn_from_memory
SWAP2 SWAP1
PUSH update_txn_trie
// stack: update_txn_trie, tx_counter, num_nibbles, read_txn_from_memory, retdest
%jump(read_rlp_to_memory)
// At this point, the raw txn data is in memory.
@ -34,3 +37,28 @@ read_txn_from_memory:
// At this point, since it's not a type 1 or 2 transaction,
// it must be a legacy (aka type 0) transaction.
%jump(process_type_0_txn)
global update_txn_trie:
// stack: txn_rlp_len, txn_counter, num_nibbles, retdest
// Copy the transaction rlp to the trie data segment.
%get_trie_data_size
// stack: value_ptr, txn_rlp_len, txn_counter, num_nibbles, retdest
SWAP1
// First we write txn rlp length
DUP1 %append_to_trie_data
// stack: txn_rlp_len, value_ptr, txn_counter, num_nibbles, ret_dest
DUP2 %increment
// stack: rlp_start=value_ptr+1, txn_rlp_len, value_ptr, txn_counter, num_nibbles, retdest
// and now copy txn_rlp to the new block
%stack (rlp_start, txn_rlp_len, value_ptr, txn_counter, num_nibbles) -> (
0, @SEGMENT_TRIE_DATA, rlp_start, // dest addr
0, @SEGMENT_RLP_RAW, 0, // src addr. Kernel has context 0
txn_rlp_len, // mcpy len
txn_rlp_len, rlp_start, txn_counter, num_nibbles, value_ptr)
%memcpy
ADD
%set_trie_data_size
// stack: txn_counter, num_nibbles, value_ptr, retdest
%jump(mpt_insert_txn_trie)

View File

@ -1,5 +1,10 @@
use std::str::FromStr;
use anyhow::{anyhow, Result};
use eth_trie_utils::nibbles::Nibbles;
use eth_trie_utils::partial_trie::HashedPartialTrie;
use ethereum_types::{BigEndianHash, H256, U256};
use hex_literal::hex;
use crate::cpu::kernel::aggregator::KERNEL;
use crate::cpu::kernel::constants::global_metadata::GlobalMetadata;
@ -247,3 +252,43 @@ fn load_all_mpts_ext_to_leaf() -> Result<()> {
Ok(())
}
#[test]
fn load_mpt_txn_trie() -> Result<()> {
let load_all_mpts = KERNEL.global_labels["load_all_mpts"];
let txn = hex!("f860010a830186a094095e7baea6a6c7c4c2dfeb977efac326af552e89808025a04a223955b0bd3827e3740a9a427d0ea43beb5bafa44a0204bf0a3306c8219f7ba0502c32d78f233e9e7ce9f5df3b576556d5d49731e0678fd5a068cdf359557b5b").to_vec();
let trie_inputs = TrieInputs {
state_trie: Default::default(),
transactions_trie: HashedPartialTrie::from(Node::Leaf {
nibbles: Nibbles::from_str("0x80").unwrap(),
value: txn.clone(),
}),
receipts_trie: Default::default(),
storage_tries: vec![],
};
let initial_stack = vec![0xDEADBEEFu32.into()];
let mut interpreter = Interpreter::new_with_kernel(load_all_mpts, initial_stack);
interpreter.generation_state.mpt_prover_inputs =
all_mpt_prover_inputs_reversed(&trie_inputs)
.map_err(|err| anyhow!("Invalid MPT data: {:?}", err))?;
interpreter.run()?;
assert_eq!(interpreter.stack(), vec![]);
let mut expected_trie_data = vec![
0.into(),
U256::from(PartialTrieType::Leaf as u32),
2.into(),
128.into(), // Nibble
5.into(), // value_ptr
txn.len().into(),
];
expected_trie_data.extend(txn.into_iter().map(U256::from));
let trie_data = interpreter.get_trie_data();
assert_eq!(trie_data, expected_trie_data);
Ok(())
}

View File

@ -37,7 +37,15 @@ fn test_process_receipt() -> Result<()> {
let expected_bloom = logs_bloom_bytes_fn(test_logs_list).to_vec();
// Set memory.
let initial_stack = vec![retdest, 0.into(), prev_cum_gas, leftover_gas, success];
let num_nibbles = 2.into();
let initial_stack: Vec<U256> = vec![
retdest,
num_nibbles,
0.into(),
prev_cum_gas,
leftover_gas,
success,
];
let mut interpreter = Interpreter::new_with_kernel(process_receipt, initial_stack);
interpreter.set_memory_segment(
Segment::LogsData,
@ -119,7 +127,7 @@ fn test_receipt_encoding() -> Result<()> {
// Get the expected RLP encoding.
let expected_rlp = rlp::encode(&rlp::encode(&receipt_1));
let initial_stack = vec![retdest, 0.into(), 0.into()];
let initial_stack: Vec<U256> = vec![retdest, 0.into(), 0.into()];
let mut interpreter = Interpreter::new_with_kernel(encode_receipt, initial_stack);
// Write data to memory.
@ -238,7 +246,7 @@ fn test_receipt_bloom_filter() -> Result<()> {
let topic03 = 0xbd9fe6.into();
// Set logs memory and initialize TxnBloom and BlockBloom segments.
let initial_stack = vec![retdest];
let initial_stack: Vec<U256> = vec![retdest];
let mut interpreter = Interpreter::new_with_kernel(logs_bloom, initial_stack);
let mut logs = vec![
@ -410,7 +418,7 @@ fn test_mpt_insert_receipt() -> Result<()> {
receipt.extend(logs_0.clone());
// First, we load all mpts.
let initial_stack = vec![retdest];
let initial_stack: Vec<U256> = vec![retdest];
let mut interpreter = Interpreter::new_with_kernel(load_all_mpts, initial_stack);
interpreter.generation_state.mpt_prover_inputs =
@ -425,7 +433,13 @@ fn test_mpt_insert_receipt() -> Result<()> {
}
// stack: transaction_nb, value_ptr, retdest
let initial_stack = [retdest, cur_trie_data.len().into(), 0.into()];
let num_nibbles = 2;
let initial_stack: Vec<U256> = vec![
retdest,
cur_trie_data.len().into(),
0x80.into(),
num_nibbles.into(),
];
for i in 0..initial_stack.len() {
interpreter.push(initial_stack[i]);
}
@ -489,7 +503,13 @@ fn test_mpt_insert_receipt() -> Result<()> {
// Get updated TrieData segment.
cur_trie_data = interpreter.get_memory_segment(Segment::TrieData);
let initial_stack2 = [retdest, cur_trie_data.len().into(), 1.into()];
let num_nibbles = 2;
let initial_stack2: Vec<U256> = vec![
retdest,
cur_trie_data.len().into(),
0x01.into(),
num_nibbles.into(),
];
for i in 0..initial_stack2.len() {
interpreter.push(initial_stack2[i]);
}
@ -528,7 +548,7 @@ fn test_bloom_two_logs() -> Result<()> {
let retdest = 0xDEADBEEFu32.into();
let logs_bloom = KERNEL.global_labels["logs_bloom"];
let initial_stack = vec![retdest];
let initial_stack: Vec<U256> = vec![retdest];
// Set memory.
let logs = vec![

View File

@ -185,7 +185,9 @@ pub(crate) fn all_mpt_prover_inputs(trie_inputs: &TrieInputs) -> Result<Vec<U256
)?;
mpt_prover_inputs(&trie_inputs.transactions_trie, &mut prover_inputs, &|rlp| {
Ok(rlp::decode_list(rlp))
let mut parsed_txn = vec![U256::from(rlp.len())];
parsed_txn.extend(rlp.iter().copied().map(U256::from));
Ok(parsed_txn)
})?;
mpt_prover_inputs(

View File

@ -300,7 +300,7 @@ fn log_kernel_instruction<F: Field>(state: &GenerationState<F>, op: Operation) {
state.registers.context,
KERNEL.offset_name(pc),
op,
state.stack()
state.stack(),
);
assert!(pc < KERNEL.code.len(), "Kernel PC is out of range: {}", pc);

View File

@ -139,9 +139,15 @@ fn add11_yml() -> anyhow::Result<()> {
Nibbles::from_str("0x80").unwrap(),
rlp::encode(&receipt_0).to_vec(),
);
let transactions_trie: HashedPartialTrie = Node::Leaf {
nibbles: Nibbles::from_str("0x80").unwrap(),
value: txn.to_vec(),
}
.into();
let trie_roots_after = TrieRoots {
state_root: expected_state_trie_after.hash(),
transactions_root: tries_before.transactions_trie.hash(), // TODO: Fix this when we have transactions trie.
transactions_root: transactions_trie.hash(),
receipts_root: receipts_trie.hash(),
};
let inputs = GenerationInputs {

View File

@ -171,10 +171,15 @@ fn test_basic_smart_contract() -> anyhow::Result<()> {
Nibbles::from_str("0x80").unwrap(),
rlp::encode(&receipt_0).to_vec(),
);
let transactions_trie: HashedPartialTrie = Node::Leaf {
nibbles: Nibbles::from_str("0x80").unwrap(),
value: txn.to_vec(),
}
.into();
let trie_roots_after = TrieRoots {
state_root: expected_state_trie_after.hash(),
transactions_root: tries_before.transactions_trie.hash(), // TODO: Fix this when we have transactions trie.
transactions_root: transactions_trie.hash(),
receipts_root: receipts_trie.hash(),
};
let inputs = GenerationInputs {

View File

@ -204,9 +204,15 @@ fn test_log_opcodes() -> anyhow::Result<()> {
expected_state_trie_after.insert(sender_nibbles, rlp::encode(&sender_account_after).to_vec());
expected_state_trie_after.insert(to_nibbles, rlp::encode(&to_account_after).to_vec());
let transactions_trie: HashedPartialTrie = Node::Leaf {
nibbles: Nibbles::from_str("0x80").unwrap(),
value: txn.to_vec(),
}
.into();
let trie_roots_after = TrieRoots {
state_root: expected_state_trie_after.hash(),
transactions_root: HashedPartialTrie::from(Node::Empty).hash(),
transactions_root: transactions_trie.hash(),
receipts_root: receipts_trie.hash(),
};
let block_bloom_after = [
@ -417,9 +423,15 @@ fn test_log_with_aggreg() -> anyhow::Result<()> {
rlp::encode(&receipt_0).to_vec(),
);
let mut transactions_trie: HashedPartialTrie = Node::Leaf {
nibbles: Nibbles::from_str("0x80").unwrap(),
value: txn.to_vec(),
}
.into();
let tries_after = TrieRoots {
state_root: expected_state_trie_after.hash(),
transactions_root: HashedPartialTrie::from(Node::Empty).hash(),
transactions_root: transactions_trie.hash(),
receipts_root: receipts_trie.clone().hash(),
};
@ -466,7 +478,7 @@ fn test_log_with_aggreg() -> anyhow::Result<()> {
let tries_before = TrieInputs {
state_trie: state_trie_before,
transactions_trie: Node::Empty.into(),
transactions_trie: transactions_trie.clone(),
receipts_trie: receipts_trie.clone(),
storage_tries: vec![],
};
@ -543,9 +555,11 @@ fn test_log_with_aggreg() -> anyhow::Result<()> {
rlp::encode(&to_account_second_after).to_vec(),
);
transactions_trie.insert(Nibbles::from_str("0x01").unwrap(), txn_2.to_vec());
let trie_roots_after = TrieRoots {
state_root: expected_state_trie_after.hash(),
transactions_root: HashedPartialTrie::from(Node::Empty).hash(),
transactions_root: transactions_trie.hash(),
receipts_root: receipts_trie.hash(),
};
@ -863,9 +877,17 @@ fn test_two_txn() -> anyhow::Result<()> {
rlp::encode(&receipt_1).to_vec(),
);
let mut transactions_trie: HashedPartialTrie = Node::Leaf {
nibbles: Nibbles::from_str("0x80").unwrap(),
value: txn_0.to_vec(),
}
.into();
transactions_trie.insert(Nibbles::from_str("0x01").unwrap(), txn_1.to_vec());
let trie_roots_after = TrieRoots {
state_root: expected_state_trie_after.hash(),
transactions_root: HashedPartialTrie::from(Node::Empty).hash(),
transactions_root: transactions_trie.hash(),
receipts_root: receipts_trie.hash(),
};
let inputs = GenerationInputs {

View File

@ -0,0 +1,246 @@
#![allow(clippy::upper_case_acronyms)]
use std::collections::HashMap;
use std::str::FromStr;
use std::time::Duration;
use env_logger::{try_init_from_env, Env, DEFAULT_FILTER_ENV};
use eth_trie_utils::nibbles::Nibbles;
use eth_trie_utils::partial_trie::{HashedPartialTrie, PartialTrie};
use ethereum_types::{Address, H256, U256};
use hex_literal::hex;
use keccak_hash::keccak;
use plonky2::field::goldilocks_field::GoldilocksField;
use plonky2::plonk::config::KeccakGoldilocksConfig;
use plonky2::util::timing::TimingTree;
use plonky2_evm::all_stark::AllStark;
use plonky2_evm::config::StarkConfig;
use plonky2_evm::cpu::kernel::opcodes::{get_opcode, get_push_opcode};
use plonky2_evm::generation::mpt::{AccountRlp, LegacyReceiptRlp};
use plonky2_evm::generation::{GenerationInputs, TrieInputs};
use plonky2_evm::proof::{BlockHashes, BlockMetadata, TrieRoots};
use plonky2_evm::prover::prove;
use plonky2_evm::verifier::verify_proof;
use plonky2_evm::Node;
type F = GoldilocksField;
const D: usize = 2;
type C = KeccakGoldilocksConfig;
/// Test the validity of four transactions, where only the first one is valid and the other three abort.
#[test]
fn test_four_transactions() -> anyhow::Result<()> {
init_logger();
let all_stark = AllStark::<F, D>::default();
let config = StarkConfig::standard_fast_config();
let beneficiary = hex!("deadbeefdeadbeefdeadbeefdeadbeefdeadbeef");
let sender = hex!("2c7536e3605d9c16a7a3d7b1898e529396a65c23");
let to = hex!("a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0");
let beneficiary_state_key = keccak(beneficiary);
let sender_state_key = keccak(sender);
let to_state_key = keccak(to);
let beneficiary_nibbles = Nibbles::from_bytes_be(beneficiary_state_key.as_bytes()).unwrap();
let sender_nibbles = Nibbles::from_bytes_be(sender_state_key.as_bytes()).unwrap();
let to_nibbles = Nibbles::from_bytes_be(to_state_key.as_bytes()).unwrap();
let push1 = get_push_opcode(1);
let add = get_opcode("ADD");
let stop = get_opcode("STOP");
let code = [push1, 3, push1, 4, add, stop];
let code_gas = 3 + 3 + 3;
let code_hash = keccak(code);
let beneficiary_account_before = AccountRlp::default();
let sender_account_before = AccountRlp {
nonce: 5.into(),
balance: eth_to_wei(100_000.into()),
..AccountRlp::default()
};
let to_account_before = AccountRlp {
code_hash,
..AccountRlp::default()
};
let state_trie_before = {
let mut children = core::array::from_fn(|_| Node::Empty.into());
children[sender_nibbles.get_nibble(0) as usize] = Node::Leaf {
nibbles: sender_nibbles.truncate_n_nibbles_front(1),
value: rlp::encode(&sender_account_before).to_vec(),
}
.into();
children[to_nibbles.get_nibble(0) as usize] = Node::Leaf {
nibbles: to_nibbles.truncate_n_nibbles_front(1),
value: rlp::encode(&to_account_before).to_vec(),
}
.into();
Node::Branch {
children,
value: vec![],
}
}
.into();
let tries_before = TrieInputs {
state_trie: state_trie_before,
transactions_trie: Node::Empty.into(),
receipts_trie: Node::Empty.into(),
storage_tries: vec![],
};
// Generated using a little py-evm script.
let txn1 = hex!("f861050a8255f094a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0a0648242421ba02c89eb757d9deeb1f5b3859a9d4d679951ef610ac47ad4608dc142beb1b7e313a05af7e9fbab825455d36c36c7f4cfcafbeafa9a77bdff936b52afb36d4fe4bcdd");
let txn2 = hex!("f863800a83061a8094095e7baea6a6c7c4c2dfeb977efac326af552d87830186a0801ba0ffb600e63115a7362e7811894a91d8ba4330e526f22121c994c4692035dfdfd5a06198379fcac8de3dbfac48b165df4bf88e2088f294b61efb9a65fe2281c76e16");
let txn3 = hex!("f861800a8405f5e10094100000000000000000000000000000000000000080801ba07e09e26678ed4fac08a249ebe8ed680bf9051a5e14ad223e4b2b9d26e0208f37a05f6e3f188e3e6eab7d7d3b6568f5eac7d687b08d307d3154ccd8c87b4630509b");
let txn4 = hex!("f866800a82520894095e7baea6a6c7c4c2dfeb977efac326af552d878711c37937e080008026a01fcd0ce88ac7600698a771f206df24b70e67981b6f107bd7c1c24ea94f113bcba00d87cc5c7afc2988e4ff200b5a0c7016b0d5498bbc692065ca983fcbbfe02555");
let txdata_gas = 2 * 16;
let gas_used = 21_000 + code_gas + txdata_gas;
let value = U256::from(100u32);
let block_metadata = BlockMetadata {
block_beneficiary: Address::from(beneficiary),
block_timestamp: 0x03e8.into(),
block_number: 1.into(),
block_difficulty: 0x020000.into(),
block_gaslimit: 0x445566u64.into(),
block_chain_id: 1.into(),
block_gas_used: gas_used.into(),
..BlockMetadata::default()
};
let mut contract_code = HashMap::new();
contract_code.insert(keccak(vec![]), vec![]);
contract_code.insert(code_hash, code.to_vec());
// Update trie roots after the 4 transactions.
// State trie.
let expected_state_trie_after: HashedPartialTrie = {
let beneficiary_account_after = AccountRlp {
balance: beneficiary_account_before.balance + gas_used * 10,
..beneficiary_account_before
};
let sender_account_after = AccountRlp {
balance: sender_account_before.balance - value - gas_used * 10,
nonce: sender_account_before.nonce + 1,
..sender_account_before
};
let to_account_after = AccountRlp {
balance: to_account_before.balance + value,
..to_account_before
};
let mut children = core::array::from_fn(|_| Node::Empty.into());
children[beneficiary_nibbles.get_nibble(0) as usize] = Node::Leaf {
nibbles: beneficiary_nibbles.truncate_n_nibbles_front(1),
value: rlp::encode(&beneficiary_account_after).to_vec(),
}
.into();
children[sender_nibbles.get_nibble(0) as usize] = Node::Leaf {
nibbles: sender_nibbles.truncate_n_nibbles_front(1),
value: rlp::encode(&sender_account_after).to_vec(),
}
.into();
children[to_nibbles.get_nibble(0) as usize] = Node::Leaf {
nibbles: to_nibbles.truncate_n_nibbles_front(1),
value: rlp::encode(&to_account_after).to_vec(),
}
.into();
Node::Branch {
children,
value: vec![],
}
}
.into();
// Transactions trie.
let mut transactions_trie: HashedPartialTrie = Node::Leaf {
nibbles: Nibbles::from_str("0x80").unwrap(),
value: txn1.to_vec(),
}
.into();
transactions_trie.insert(Nibbles::from_str("0x01").unwrap(), txn2.to_vec());
transactions_trie.insert(Nibbles::from_str("0x02").unwrap(), txn3.to_vec());
transactions_trie.insert(Nibbles::from_str("0x03").unwrap(), txn4.to_vec());
// Receipts trie.
let mut receipts_trie = HashedPartialTrie::from(Node::Empty);
let receipt_0 = LegacyReceiptRlp {
status: true,
cum_gas_used: gas_used.into(),
bloom: [0x00; 256].to_vec().into(),
logs: vec![],
};
let receipt_1 = LegacyReceiptRlp {
status: false,
cum_gas_used: gas_used.into(),
bloom: [0x00; 256].to_vec().into(),
logs: vec![],
};
receipts_trie.insert(
Nibbles::from_str("0x80").unwrap(),
rlp::encode(&receipt_0).to_vec(),
);
receipts_trie.insert(
Nibbles::from_str("0x01").unwrap(),
rlp::encode(&receipt_1).to_vec(),
);
receipts_trie.insert(
Nibbles::from_str("0x02").unwrap(),
rlp::encode(&receipt_1).to_vec(),
);
receipts_trie.insert(
Nibbles::from_str("0x03").unwrap(),
rlp::encode(&receipt_1).to_vec(),
);
let trie_roots_after = TrieRoots {
state_root: expected_state_trie_after.hash(),
transactions_root: transactions_trie.hash(),
receipts_root: receipts_trie.hash(),
};
let inputs = GenerationInputs {
signed_txns: vec![txn1.to_vec(), txn2.to_vec(), txn3.to_vec(), txn4.to_vec()],
tries: tries_before,
trie_roots_after,
genesis_state_trie_root: HashedPartialTrie::from(Node::Empty).hash(),
contract_code,
block_metadata: block_metadata.clone(),
addresses: vec![],
block_bloom_before: [0.into(); 8],
gas_used_before: 0.into(),
gas_used_after: gas_used.into(),
txn_number_before: 0.into(),
block_bloom_after: [0.into(); 8],
block_hashes: BlockHashes {
prev_hashes: vec![H256::default(); 256],
cur_hash: H256::default(),
},
};
let mut timing = TimingTree::new("prove", log::Level::Debug);
let proof = prove::<F, C, D>(&all_stark, &config, inputs, &mut timing)?;
timing.filter(Duration::from_millis(100)).print();
verify_proof(&all_stark, proof, &config)
}
fn eth_to_wei(eth: U256) -> U256 {
// 1 ether = 10^18 wei.
eth * U256::from(10).pow(18.into())
}
fn init_logger() {
let _ = try_init_from_env(Env::default().filter_or(DEFAULT_FILTER_ENV, "info"));
}

View File

@ -158,9 +158,15 @@ fn self_balance_gas_cost() -> anyhow::Result<()> {
Nibbles::from_str("0x80").unwrap(),
rlp::encode(&receipt_0).to_vec(),
);
let transactions_trie: HashedPartialTrie = Node::Leaf {
nibbles: Nibbles::from_str("0x80").unwrap(),
value: txn.to_vec(),
}
.into();
let trie_roots_after = TrieRoots {
state_root: expected_state_trie_after.hash(),
transactions_root: tries_before.transactions_trie.hash(), // TODO: Fix this when we have transactions trie.
transactions_root: transactions_trie.hash(),
receipts_root: receipts_trie.hash(),
};
let inputs = GenerationInputs {

View File

@ -55,6 +55,7 @@ fn test_simple_transfer() -> anyhow::Result<()> {
value: rlp::encode(&sender_account_before).to_vec(),
}
.into();
let tries_before = TrieInputs {
state_trie: state_trie_before,
transactions_trie: HashedPartialTrie::from(Node::Empty),
@ -125,10 +126,15 @@ fn test_simple_transfer() -> anyhow::Result<()> {
Nibbles::from_str("0x80").unwrap(),
rlp::encode(&receipt_0).to_vec(),
);
let transactions_trie: HashedPartialTrie = Node::Leaf {
nibbles: Nibbles::from_str("0x80").unwrap(),
value: txn.to_vec(),
}
.into();
let trie_roots_after = TrieRoots {
state_root: expected_state_trie_after.hash(),
transactions_root: tries_before.transactions_trie.hash(), // TODO: Fix this when we have transactions trie.
transactions_root: transactions_trie.hash(),
receipts_root: receipts_trie.hash(),
};
let inputs = GenerationInputs {