diff --git a/.circleci/config.yml b/.circleci/config.yml new file mode 100644 index 000000000..02871530e --- /dev/null +++ b/.circleci/config.yml @@ -0,0 +1,41 @@ +# Python CircleCI 2.0 configuration file +version: 2 +jobs: + build: + docker: + - image: circleci/python:3.6 + working_directory: ~/repo + + steps: + - checkout + # Download and cache dependencies + - restore_cache: + keys: + - v1-dependencies-{{ checksum "requirements.txt" }} + # fallback to using the latest cache if no exact match is found + - v1-dependencies- + + - run: + name: install dependencies + command: | + python3 -m venv venv + . venv/bin/activate + pip install -r requirements.txt + - run: + name: build phase0 spec + command: make build/phase0 + + - save_cache: + paths: + - ./venv + key: v1-dependencies-{{ checksum "requirements.txt" }} + + - run: + name: run tests + command: | + . venv/bin/activate + pytest tests + + - store_artifacts: + path: test-reports + destination: test-reports diff --git a/.gitignore b/.gitignore new file mode 100644 index 000000000..dfb38d170 --- /dev/null +++ b/.gitignore @@ -0,0 +1,6 @@ +*.pyc +/__pycache__ +/venv +/.pytest_cache + +build/ diff --git a/Makefile b/Makefile new file mode 100644 index 000000000..b45cec410 --- /dev/null +++ b/Makefile @@ -0,0 +1,29 @@ +SPEC_DIR = ./specs +SCRIPT_DIR = ./scripts +BUILD_DIR = ./build +UTILS_DIR = ./utils + + +.PHONY: clean all test + + +all: $(BUILD_DIR)/phase0 + + +clean: + rm -rf $(BUILD_DIR) + + +# runs a limited set of tests against a minimal config +# run pytest with `-m` option to full suite +test: + pytest -m "sanity and minimal_config" tests/ + + +$(BUILD_DIR)/phase0: + mkdir -p $@ + python3 $(SCRIPT_DIR)/phase0/build_spec.py $(SPEC_DIR)/core/0_beacon-chain.md $@/spec.py + mkdir -p $@/utils + cp $(UTILS_DIR)/phase0/* $@/utils + cp $(UTILS_DIR)/phase0/state_transition.py $@ + touch $@/__init__.py $@/utils/__init__.py diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 000000000..9145e951e --- /dev/null +++ b/requirements.txt @@ -0,0 +1,6 @@ +eth-utils>=1.3.0,<2 +eth-typing>=2.1.0,<3.0.0 +oyaml==0.7 +pycryptodome==3.7.3 +py_ecc>=1.6.0 +pytest>=3.6,<3.7 diff --git a/scripts/__init__.py b/scripts/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/scripts/phase0/__init__.py b/scripts/phase0/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/scripts/phase0/build_spec.py b/scripts/phase0/build_spec.py new file mode 100644 index 000000000..ae5a5a4f2 --- /dev/null +++ b/scripts/phase0/build_spec.py @@ -0,0 +1,78 @@ +import sys +import function_puller + + +def build_spec(sourcefile, outfile): + code_lines = [] + + code_lines.append("from build.phase0.utils.minimal_ssz import *") + code_lines.append("from build.phase0.utils.bls_stub import *") + for i in (1, 2, 3, 4, 8, 32, 48, 96): + code_lines.append("def int_to_bytes%d(x): return x.to_bytes(%d, 'little')" % (i, i)) + code_lines.append("SLOTS_PER_EPOCH = 64") # stub, will get overwritten by real var + code_lines.append("def slot_to_epoch(x): return x // SLOTS_PER_EPOCH") + + code_lines.append(""" +from typing import ( + Any, + Callable, + List, + NewType, + Tuple, +) + + +Slot = NewType('Slot', int) # uint64 +Epoch = NewType('Epoch', int) # uint64 +Shard = NewType('Shard', int) # uint64 +ValidatorIndex = NewType('ValidatorIndex', int) # uint64 +Gwei = NewType('Gwei', int) # uint64 +Bytes32 = NewType('Bytes32', bytes) # bytes32 +BLSPubkey = NewType('BLSPubkey', bytes) # bytes48 +BLSSignature = NewType('BLSSignature', bytes) # bytes96 +Any = None +Store = None + """) + + code_lines += function_puller.get_lines(sourcefile) + + code_lines.append(""" +# Monkey patch validator shuffling cache +_get_shuffling = get_shuffling +shuffling_cache = {} +def get_shuffling(seed: Bytes32, + validators: List[Validator], + epoch: Epoch) -> List[List[ValidatorIndex]]: + + param_hash = (seed, hash_tree_root(validators, [Validator]), epoch) + + if param_hash in shuffling_cache: + # print("Cache hit, epoch={0}".format(epoch)) + return shuffling_cache[param_hash] + else: + # print("Cache miss, epoch={0}".format(epoch)) + ret = _get_shuffling(seed, validators, epoch) + shuffling_cache[param_hash] = ret + return ret + + +# Monkey patch hash cache +_hash = hash +hash_cache = {} +def hash(x): + if x in hash_cache: + return hash_cache[x] + else: + ret = _hash(x) + hash_cache[x] = ret + return ret + """) + + with open(outfile, 'w') as out: + out.write("\n".join(code_lines)) + + +if __name__ == '__main__': + if len(sys.argv) < 3: + print("Error: spec source and outfile must defined") + build_spec(sys.argv[1], sys.argv[2]) diff --git a/scripts/phase0/function_puller.py b/scripts/phase0/function_puller.py new file mode 100644 index 000000000..7d5796fc7 --- /dev/null +++ b/scripts/phase0/function_puller.py @@ -0,0 +1,46 @@ +import sys + + +def get_lines(file_name): + code_lines = [] + pulling_from = None + current_name = None + processing_typedef = False + for linenum, line in enumerate(open(sys.argv[1]).readlines()): + line = line.rstrip() + if pulling_from is None and len(line) > 0 and line[0] == '#' and line[-1] == '`': + current_name = line[line[:-1].rfind('`') + 1: -1] + if line[:9] == '```python': + assert pulling_from is None + pulling_from = linenum + 1 + elif line[:3] == '```': + if pulling_from is None: + pulling_from = linenum + else: + if processing_typedef: + assert code_lines[-1] == '}' + code_lines[-1] = '})' + pulling_from = None + processing_typedef = False + else: + if pulling_from == linenum and line == '{': + code_lines.append('%s = SSZType({' % current_name) + processing_typedef = True + elif pulling_from is not None: + code_lines.append(line) + elif pulling_from is None and len(line) > 0 and line[0] == '|': + row = line[1:].split('|') + if len(row) >= 2: + for i in range(2): + row[i] = row[i].strip().strip('`') + if '`' in row[i]: + row[i] = row[i][:row[i].find('`')] + eligible = True + if row[0][0] not in 'ABCDEFGHIJKLMNOPQRSTUVWXYZ_': + eligible = False + for c in row[0]: + if c not in 'ABCDEFGHIJKLMNOPQRSTUVWXYZ_0123456789': + eligible = False + if eligible: + code_lines.append(row[0] + ' = ' + (row[1].replace('**TBD**', '0x1234567890123567890123456789012357890'))) + return code_lines diff --git a/tests/__init__.py b/tests/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/conftest.py b/tests/conftest.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/phase0/conftest.py b/tests/phase0/conftest.py new file mode 100644 index 000000000..e92896e92 --- /dev/null +++ b/tests/phase0/conftest.py @@ -0,0 +1,82 @@ +import pytest + +from build.phase0 import spec + +from tests.phase0.helpers import ( + privkeys_list, + pubkeys_list, + create_genesis_state, +) + + +DEFAULT_CONFIG = {} # no change + +MINIMAL_CONFIG = { + "SHARD_COUNT": 8, + "MIN_ATTESTATION_INCLUSION_DELAY": 2, + "TARGET_COMMITTEE_SIZE": 4, + "SLOTS_PER_EPOCH": 8, + "GENESIS_EPOCH": spec.GENESIS_SLOT // 8, + "SLOTS_PER_HISTORICAL_ROOT": 64, + "LATEST_RANDAO_MIXES_LENGTH": 64, + "LATEST_ACTIVE_INDEX_ROOTS_LENGTH": 64, + "LATEST_SLASHED_EXIT_LENGTH": 64, +} + + +@pytest.fixture +def privkeys(): + return privkeys_list + + +@pytest.fixture +def pubkeys(): + return pubkeys_list + + +def overwrite_spec_config(config): + for field in config: + setattr(spec, field, config[field]) + if field == "LATEST_RANDAO_MIXES_LENGTH": + spec.BeaconState.fields['latest_randao_mixes'][1] = config[field] + elif field == "SHARD_COUNT": + spec.BeaconState.fields['latest_crosslinks'][1] = config[field] + elif field == "SLOTS_PER_HISTORICAL_ROOT": + spec.BeaconState.fields['latest_block_roots'][1] = config[field] + spec.BeaconState.fields['latest_state_roots'][1] = config[field] + spec.HistoricalBatch.fields['block_roots'][1] = config[field] + spec.HistoricalBatch.fields['state_roots'][1] = config[field] + elif field == "LATEST_ACTIVE_INDEX_ROOTS_LENGTH": + spec.BeaconState.fields['latest_active_index_roots'][1] = config[field] + elif field == "LATEST_SLASHED_EXIT_LENGTH": + spec.BeaconState.fields['latest_slashed_balances'][1] = config[field] + + +@pytest.fixture( + params=[ + pytest.param(MINIMAL_CONFIG, marks=pytest.mark.minimal_config), + DEFAULT_CONFIG, + ] +) +def config(request): + return request.param + + +@pytest.fixture(autouse=True) +def overwrite_config(config): + overwrite_spec_config(config) + + +@pytest.fixture +def num_validators(): + return 100 + + +@pytest.fixture +def deposit_data_leaves(): + return list() + + +@pytest.fixture +def state(num_validators, deposit_data_leaves): + return create_genesis_state(num_validators, deposit_data_leaves) diff --git a/tests/phase0/helpers.py b/tests/phase0/helpers.py new file mode 100644 index 000000000..510361e9c --- /dev/null +++ b/tests/phase0/helpers.py @@ -0,0 +1,145 @@ +from copy import deepcopy + +from py_ecc import bls + +import build.phase0.spec as spec +from build.phase0.utils.minimal_ssz import signed_root +from build.phase0.spec import ( + # constants + EMPTY_SIGNATURE, + # SSZ + AttestationData, + Deposit, + DepositInput, + DepositData, + Eth1Data, + # functions + get_block_root, + get_current_epoch, + get_domain, + get_empty_block, + get_epoch_start_slot, + get_genesis_beacon_state, + verify_merkle_branch, + hash, +) +from build.phase0.utils.merkle_minimal import ( + calc_merkle_tree_from_leaves, + get_merkle_proof, + get_merkle_root, +) + + +privkeys_list = [i + 1 for i in range(1000)] +pubkeys_list = [bls.privtopub(privkey) for privkey in privkeys_list] +pubkey_to_privkey = {pubkey: privkey for privkey, pubkey in zip(privkeys_list, pubkeys_list)} + + +def create_mock_genesis_validator_deposits(num_validators, deposit_data_leaves): + deposit_timestamp = 0 + proof_of_possession = b'\x33' * 96 + + deposit_data_list = [] + for i in range(num_validators): + pubkey = pubkeys_list[i] + deposit_data = DepositData( + amount=spec.MAX_DEPOSIT_AMOUNT, + timestamp=deposit_timestamp, + deposit_input=DepositInput( + pubkey=pubkey, + # insecurely use pubkey as withdrawal key as well + withdrawal_credentials=spec.BLS_WITHDRAWAL_PREFIX_BYTE + hash(pubkey)[1:], + proof_of_possession=proof_of_possession, + ), + ) + item = hash(deposit_data.serialize()) + deposit_data_leaves.append(item) + tree = calc_merkle_tree_from_leaves(tuple(deposit_data_leaves)) + root = get_merkle_root((tuple(deposit_data_leaves))) + proof = list(get_merkle_proof(tree, item_index=i)) + assert verify_merkle_branch(item, proof, spec.DEPOSIT_CONTRACT_TREE_DEPTH, i, root) + deposit_data_list.append(deposit_data) + + genesis_validator_deposits = [] + for i in range(num_validators): + genesis_validator_deposits.append(Deposit( + proof=list(get_merkle_proof(tree, item_index=i)), + index=i, + deposit_data=deposit_data_list[i] + )) + return genesis_validator_deposits, root + + +def create_genesis_state(num_validators, deposit_data_leaves): + initial_deposits, deposit_root = create_mock_genesis_validator_deposits(num_validators, deposit_data_leaves) + return get_genesis_beacon_state( + initial_deposits, + genesis_time=0, + genesis_eth1_data=Eth1Data( + deposit_root=deposit_root, + block_hash=spec.ZERO_HASH, + ), + ) + + +def build_empty_block_for_next_slot(state): + empty_block = get_empty_block() + empty_block.slot = state.slot + 1 + previous_block_header = deepcopy(state.latest_block_header) + if previous_block_header.state_root == spec.ZERO_HASH: + previous_block_header.state_root = state.hash_tree_root() + empty_block.previous_block_root = previous_block_header.hash_tree_root() + return empty_block + + +def build_deposit_data(state, pubkey, privkey, amount): + deposit_input = DepositInput( + pubkey=pubkey, + # insecurely use pubkey as withdrawal key as well + withdrawal_credentials=spec.BLS_WITHDRAWAL_PREFIX_BYTE + hash(pubkey)[1:], + proof_of_possession=EMPTY_SIGNATURE, + ) + proof_of_possession = bls.sign( + message_hash=signed_root(deposit_input), + privkey=privkey, + domain=get_domain( + state.fork, + get_current_epoch(state), + spec.DOMAIN_DEPOSIT, + ) + ) + deposit_input.proof_of_possession = proof_of_possession + deposit_data = DepositData( + amount=amount, + timestamp=0, + deposit_input=deposit_input, + ) + return deposit_data + + +def build_attestation_data(state, slot, shard): + assert state.slot >= slot + + block_root = build_empty_block_for_next_slot(state).previous_block_root + + epoch_start_slot = get_epoch_start_slot(get_current_epoch(state)) + if epoch_start_slot == slot: + epoch_boundary_root = block_root + else: + get_block_root(state, epoch_start_slot) + + if slot < epoch_start_slot: + justified_block_root = state.previous_justified_root + else: + justified_block_root = state.current_justified_root + + return AttestationData( + slot=slot, + shard=shard, + beacon_block_root=block_root, + source_epoch=state.current_justified_epoch, + source_root=justified_block_root, + target_root=epoch_boundary_root, + crosslink_data_root=spec.ZERO_HASH, + previous_crosslink=deepcopy(state.latest_crosslinks[shard]), + ) diff --git a/tests/phase0/test_sanity.py b/tests/phase0/test_sanity.py new file mode 100644 index 000000000..8f04f316c --- /dev/null +++ b/tests/phase0/test_sanity.py @@ -0,0 +1,455 @@ +from copy import deepcopy + +import pytest + +from py_ecc import bls +import build.phase0.spec as spec + +from build.phase0.utils.minimal_ssz import signed_root +from build.phase0.spec import ( + # constants + EMPTY_SIGNATURE, + ZERO_HASH, + # SSZ + Attestation, + AttestationDataAndCustodyBit, + BeaconBlockHeader, + Deposit, + Transfer, + ProposerSlashing, + VoluntaryExit, + # functions + get_active_validator_indices, + get_attestation_participants, + get_block_root, + get_crosslink_committees_at_slot, + get_current_epoch, + get_domain, + get_state_root, + advance_slot, + cache_state, + verify_merkle_branch, + hash, +) +from build.phase0.state_transition import ( + state_transition, +) +from build.phase0.utils.merkle_minimal import ( + calc_merkle_tree_from_leaves, + get_merkle_proof, + get_merkle_root, +) +from tests.phase0.helpers import ( + build_attestation_data, + build_deposit_data, + build_empty_block_for_next_slot, +) + + +# mark entire file as 'sanity' +pytestmark = pytest.mark.sanity + + +def test_slot_transition(state): + test_state = deepcopy(state) + cache_state(test_state) + advance_slot(test_state) + assert test_state.slot == state.slot + 1 + assert get_state_root(test_state, state.slot) == state.hash_tree_root() + return test_state + + +def test_empty_block_transition(state): + test_state = deepcopy(state) + + block = build_empty_block_for_next_slot(test_state) + state_transition(test_state, block) + + assert len(test_state.eth1_data_votes) == len(state.eth1_data_votes) + 1 + assert get_block_root(test_state, state.slot) == block.previous_block_root + + return state, [block], test_state + + +def test_skipped_slots(state): + test_state = deepcopy(state) + block = build_empty_block_for_next_slot(test_state) + block.slot += 3 + + state_transition(test_state, block) + + assert test_state.slot == block.slot + for slot in range(state.slot, test_state.slot): + assert get_block_root(test_state, slot) == block.previous_block_root + + return state, [block], test_state + + +def test_empty_epoch_transition(state): + test_state = deepcopy(state) + block = build_empty_block_for_next_slot(test_state) + block.slot += spec.SLOTS_PER_EPOCH + + state_transition(test_state, block) + + assert test_state.slot == block.slot + for slot in range(state.slot, test_state.slot): + assert get_block_root(test_state, slot) == block.previous_block_root + + return state, [block], test_state + + +def test_empty_epoch_transition_not_finalizing(state): + test_state = deepcopy(state) + block = build_empty_block_for_next_slot(test_state) + block.slot += spec.SLOTS_PER_EPOCH * 5 + + state_transition(test_state, block) + + assert test_state.slot == block.slot + assert test_state.finalized_epoch < get_current_epoch(test_state) - 4 + + return state, [block], test_state + + +def test_proposer_slashing(state, pubkeys, privkeys): + test_state = deepcopy(state) + current_epoch = get_current_epoch(test_state) + validator_index = get_active_validator_indices(test_state.validator_registry, current_epoch)[-1] + privkey = privkeys[validator_index] + slot = spec.GENESIS_SLOT + header_1 = BeaconBlockHeader( + slot=slot, + previous_block_root=ZERO_HASH, + state_root=ZERO_HASH, + block_body_root=ZERO_HASH, + signature=EMPTY_SIGNATURE, + ) + header_2 = deepcopy(header_1) + header_2.previous_block_root = b'\x02' * 32 + header_2.slot = slot + 1 + + domain = get_domain( + fork=test_state.fork, + epoch=get_current_epoch(test_state), + domain_type=spec.DOMAIN_BEACON_BLOCK, + ) + header_1.signature = bls.sign( + message_hash=signed_root(header_1), + privkey=privkey, + domain=domain, + ) + header_2.signature = bls.sign( + message_hash=signed_root(header_2), + privkey=privkey, + domain=domain, + ) + + proposer_slashing = ProposerSlashing( + proposer_index=validator_index, + header_1=header_1, + header_2=header_2, + ) + + # + # Add to state via block transition + # + block = build_empty_block_for_next_slot(test_state) + block.body.proposer_slashings.append(proposer_slashing) + state_transition(test_state, block) + + assert not state.validator_registry[validator_index].initiated_exit + assert not state.validator_registry[validator_index].slashed + + slashed_validator = test_state.validator_registry[validator_index] + assert not slashed_validator.initiated_exit + assert slashed_validator.slashed + assert slashed_validator.exit_epoch < spec.FAR_FUTURE_EPOCH + assert slashed_validator.withdrawable_epoch < spec.FAR_FUTURE_EPOCH + # lost whistleblower reward + assert test_state.validator_balances[validator_index] < state.validator_balances[validator_index] + + return state, [block], test_state + + +def test_deposit_in_block(state, deposit_data_leaves, pubkeys, privkeys): + pre_state = deepcopy(state) + test_deposit_data_leaves = deepcopy(deposit_data_leaves) + + index = len(test_deposit_data_leaves) + pubkey = pubkeys[index] + privkey = privkeys[index] + deposit_data = build_deposit_data(pre_state, pubkey, privkey, spec.MAX_DEPOSIT_AMOUNT) + + item = hash(deposit_data.serialize()) + test_deposit_data_leaves.append(item) + tree = calc_merkle_tree_from_leaves(tuple(test_deposit_data_leaves)) + root = get_merkle_root((tuple(test_deposit_data_leaves))) + proof = list(get_merkle_proof(tree, item_index=index)) + assert verify_merkle_branch(item, proof, spec.DEPOSIT_CONTRACT_TREE_DEPTH, index, root) + + deposit = Deposit( + proof=list(proof), + index=index, + deposit_data=deposit_data, + ) + + pre_state.latest_eth1_data.deposit_root = root + post_state = deepcopy(pre_state) + block = build_empty_block_for_next_slot(post_state) + block.body.deposits.append(deposit) + + state_transition(post_state, block) + assert len(post_state.validator_registry) == len(state.validator_registry) + 1 + assert len(post_state.validator_balances) == len(state.validator_balances) + 1 + assert post_state.validator_registry[index].pubkey == pubkeys[index] + + return pre_state, [block], post_state + + +def test_deposit_top_up(state, pubkeys, privkeys, deposit_data_leaves): + pre_state = deepcopy(state) + test_deposit_data_leaves = deepcopy(deposit_data_leaves) + + validator_index = 0 + amount = spec.MAX_DEPOSIT_AMOUNT // 4 + pubkey = pubkeys[validator_index] + privkey = privkeys[validator_index] + deposit_data = build_deposit_data(pre_state, pubkey, privkey, amount) + + merkle_index = len(test_deposit_data_leaves) + item = hash(deposit_data.serialize()) + test_deposit_data_leaves.append(item) + tree = calc_merkle_tree_from_leaves(tuple(test_deposit_data_leaves)) + root = get_merkle_root((tuple(test_deposit_data_leaves))) + proof = list(get_merkle_proof(tree, item_index=merkle_index)) + assert verify_merkle_branch(item, proof, spec.DEPOSIT_CONTRACT_TREE_DEPTH, merkle_index, root) + + deposit = Deposit( + proof=list(proof), + index=merkle_index, + deposit_data=deposit_data, + ) + + pre_state.latest_eth1_data.deposit_root = root + block = build_empty_block_for_next_slot(pre_state) + block.body.deposits.append(deposit) + + pre_balance = pre_state.validator_balances[validator_index] + post_state = deepcopy(pre_state) + state_transition(post_state, block) + assert len(post_state.validator_registry) == len(pre_state.validator_registry) + assert len(post_state.validator_balances) == len(pre_state.validator_balances) + assert post_state.validator_balances[validator_index] == pre_balance + amount + + return pre_state, [block], post_state + + +def test_attestation(state, pubkeys, privkeys): + test_state = deepcopy(state) + slot = state.slot + shard = state.current_shuffling_start_shard + attestation_data = build_attestation_data(state, slot, shard) + + crosslink_committees = get_crosslink_committees_at_slot(state, slot) + crosslink_committee = [committee for committee, _shard in crosslink_committees if _shard == attestation_data.shard][0] + + committee_size = len(crosslink_committee) + bitfield_length = (committee_size + 7) // 8 + aggregation_bitfield = b'\x01' + b'\x00' * (bitfield_length - 1) + custody_bitfield = b'\x00' * bitfield_length + attestation = Attestation( + aggregation_bitfield=aggregation_bitfield, + data=attestation_data, + custody_bitfield=custody_bitfield, + aggregate_signature=EMPTY_SIGNATURE, + ) + participants = get_attestation_participants( + test_state, + attestation.data, + attestation.aggregation_bitfield, + ) + assert len(participants) == 1 + + validator_index = participants[0] + privkey = privkeys[validator_index] + + message_hash = AttestationDataAndCustodyBit( + data=attestation.data, + custody_bit=0b0, + ).hash_tree_root() + + attestation.aggregation_signature = bls.sign( + message_hash=message_hash, + privkey=privkey, + domain=get_domain( + fork=test_state.fork, + epoch=get_current_epoch(test_state), + domain_type=spec.DOMAIN_ATTESTATION, + ) + ) + + # + # Add to state via block transition + # + attestation_block = build_empty_block_for_next_slot(test_state) + attestation_block.slot += spec.MIN_ATTESTATION_INCLUSION_DELAY + attestation_block.body.attestations.append(attestation) + state_transition(test_state, attestation_block) + + assert len(test_state.current_epoch_attestations) == len(state.current_epoch_attestations) + 1 + + # + # Epoch transition should move to previous_epoch_attestations + # + pre_current_epoch_attestations = deepcopy(test_state.current_epoch_attestations) + + epoch_block = build_empty_block_for_next_slot(test_state) + epoch_block.slot += spec.SLOTS_PER_EPOCH + state_transition(test_state, epoch_block) + + assert len(test_state.current_epoch_attestations) == 0 + assert test_state.previous_epoch_attestations == pre_current_epoch_attestations + + return state, [attestation_block, epoch_block], test_state + + +def test_voluntary_exit(state, pubkeys, privkeys): + pre_state = deepcopy(state) + validator_index = get_active_validator_indices(pre_state.validator_registry, get_current_epoch(pre_state))[-1] + + # move state forward PERSISTENT_COMMITTEE_PERIOD epochs to allow for exit + pre_state.slot += spec.PERSISTENT_COMMITTEE_PERIOD * spec.SLOTS_PER_EPOCH + # artificially trigger registry update at next epoch transition + pre_state.validator_registry_update_epoch -= 1 + + post_state = deepcopy(pre_state) + + voluntary_exit = VoluntaryExit( + epoch=get_current_epoch(pre_state), + validator_index=validator_index, + signature=EMPTY_SIGNATURE, + ) + voluntary_exit.signature = bls.sign( + message_hash=signed_root(voluntary_exit), + privkey=privkeys[validator_index], + domain=get_domain( + fork=pre_state.fork, + epoch=get_current_epoch(pre_state), + domain_type=spec.DOMAIN_VOLUNTARY_EXIT, + ) + ) + + # + # Add to state via block transition + # + initiate_exit_block = build_empty_block_for_next_slot(post_state) + initiate_exit_block.body.voluntary_exits.append(voluntary_exit) + state_transition(post_state, initiate_exit_block) + + assert not pre_state.validator_registry[validator_index].initiated_exit + assert post_state.validator_registry[validator_index].initiated_exit + assert post_state.validator_registry[validator_index].exit_epoch == spec.FAR_FUTURE_EPOCH + + # + # Process within epoch transition + # + exit_block = build_empty_block_for_next_slot(post_state) + exit_block.slot += spec.SLOTS_PER_EPOCH + state_transition(post_state, exit_block) + + assert post_state.validator_registry[validator_index].exit_epoch < spec.FAR_FUTURE_EPOCH + + return pre_state, [initiate_exit_block, exit_block], post_state + + +def test_transfer(state, pubkeys, privkeys): + pre_state = deepcopy(state) + current_epoch = get_current_epoch(pre_state) + sender_index = get_active_validator_indices(pre_state.validator_registry, current_epoch)[-1] + recipient_index = get_active_validator_indices(pre_state.validator_registry, current_epoch)[0] + transfer_pubkey = pubkeys[-1] + transfer_privkey = privkeys[-1] + amount = pre_state.validator_balances[sender_index] + pre_transfer_recipient_balance = pre_state.validator_balances[recipient_index] + transfer = Transfer( + sender=sender_index, + recipient=recipient_index, + amount=amount, + fee=0, + slot=pre_state.slot + 1, + pubkey=transfer_pubkey, + signature=EMPTY_SIGNATURE, + ) + transfer.signature = bls.sign( + message_hash=signed_root(transfer), + privkey=transfer_privkey, + domain=get_domain( + fork=pre_state.fork, + epoch=get_current_epoch(pre_state), + domain_type=spec.DOMAIN_TRANSFER, + ) + ) + + # ensure withdrawal_credentials reproducable + pre_state.validator_registry[sender_index].withdrawal_credentials = ( + spec.BLS_WITHDRAWAL_PREFIX_BYTE + hash(transfer_pubkey)[1:] + ) + # un-activate so validator can transfer + pre_state.validator_registry[sender_index].activation_epoch = spec.FAR_FUTURE_EPOCH + + post_state = deepcopy(pre_state) + # + # Add to state via block transition + # + block = build_empty_block_for_next_slot(post_state) + block.body.transfers.append(transfer) + state_transition(post_state, block) + + sender_balance = post_state.validator_balances[sender_index] + recipient_balance = post_state.validator_balances[recipient_index] + assert sender_balance == 0 + assert recipient_balance == pre_transfer_recipient_balance + amount + + return pre_state, [block], post_state + + +def test_ejection(state): + pre_state = deepcopy(state) + + current_epoch = get_current_epoch(pre_state) + validator_index = get_active_validator_indices(pre_state.validator_registry, current_epoch)[-1] + + assert pre_state.validator_registry[validator_index].exit_epoch == spec.FAR_FUTURE_EPOCH + + # set validator balance to below ejection threshold + pre_state.validator_balances[validator_index] = spec.EJECTION_BALANCE - 1 + + post_state = deepcopy(pre_state) + # + # trigger epoch transition + # + block = build_empty_block_for_next_slot(post_state) + block.slot += spec.SLOTS_PER_EPOCH + state_transition(post_state, block) + + assert post_state.validator_registry[validator_index].exit_epoch < spec.FAR_FUTURE_EPOCH + + return pre_state, [block], post_state + + +def test_historical_batch(state): + pre_state = deepcopy(state) + pre_state.slot += spec.SLOTS_PER_HISTORICAL_ROOT - (pre_state.slot % spec.SLOTS_PER_HISTORICAL_ROOT) - 1 + + post_state = deepcopy(pre_state) + + block = build_empty_block_for_next_slot(post_state) + + state_transition(post_state, block) + + assert post_state.slot == block.slot + assert get_current_epoch(post_state) % (spec.SLOTS_PER_HISTORICAL_ROOT // spec.SLOTS_PER_EPOCH) == 0 + assert len(post_state.historical_roots) == len(pre_state.historical_roots) + 1 + + return pre_state, [block], post_state diff --git a/utils/__init__.py b/utils/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/utils/phase0/__init__.py b/utils/phase0/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/utils/phase0/bls_stub.py b/utils/phase0/bls_stub.py new file mode 100644 index 000000000..108c4ef71 --- /dev/null +++ b/utils/phase0/bls_stub.py @@ -0,0 +1,12 @@ + + +def bls_verify(pubkey, message_hash, signature, domain): + return True + + +def bls_verify_multiple(pubkeys, message_hashes, signature, domain): + return True + + +def bls_aggregate_pubkeys(pubkeys): + return b'\x42' * 96 diff --git a/utils/phase0/hash_function.py b/utils/phase0/hash_function.py new file mode 100644 index 000000000..21e6555bf --- /dev/null +++ b/utils/phase0/hash_function.py @@ -0,0 +1,7 @@ +# from hashlib import sha256 +from eth_utils import keccak + + +# def hash(x): return sha256(x).digest() +def hash(x): + return keccak(x) diff --git a/utils/phase0/merkle_minimal.py b/utils/phase0/merkle_minimal.py new file mode 100644 index 000000000..7c5483de3 --- /dev/null +++ b/utils/phase0/merkle_minimal.py @@ -0,0 +1,30 @@ +from .hash_function import hash + + +zerohashes = [b'\x00' * 32] +for layer in range(1, 32): + zerohashes.append(hash(zerohashes[layer - 1] + zerohashes[layer - 1])) + + +# Compute a Merkle root of a right-zerobyte-padded 2**32 sized tree +def calc_merkle_tree_from_leaves(values): + values = list(values) + tree = [values[::]] + for h in range(32): + if len(values) % 2 == 1: + values.append(zerohashes[h]) + values = [hash(values[i] + values[i + 1]) for i in range(0, len(values), 2)] + tree.append(values[::]) + return tree + + +def get_merkle_root(values): + return calc_merkle_tree_from_leaves(values)[-1][0] + + +def get_merkle_proof(tree, item_index): + proof = [] + for i in range(32): + subindex = (item_index // 2**i) ^ 1 + proof.append(tree[i][subindex] if subindex < len(tree[i]) else zerohashes[i]) + return proof diff --git a/utils/phase0/minimal_ssz.py b/utils/phase0/minimal_ssz.py new file mode 100644 index 000000000..08bd68357 --- /dev/null +++ b/utils/phase0/minimal_ssz.py @@ -0,0 +1,206 @@ +from .hash_function import hash + + +BYTES_PER_CHUNK = 32 +BYTES_PER_LENGTH_PREFIX = 4 +ZERO_CHUNK = b'\x00' * BYTES_PER_CHUNK + + +def SSZType(fields): + class SSZObject(): + def __init__(self, **kwargs): + for f in fields: + if f not in kwargs: + raise Exception("Missing constructor argument: %s" % f) + setattr(self, f, kwargs[f]) + + def __eq__(self, other): + return ( + self.fields == other.fields and + self.serialize() == other.serialize() + ) + + def __hash__(self): + return int.from_bytes(self.hash_tree_root(), byteorder="little") + + def __str__(self): + output = [] + for field in self.fields: + output.append(f'{field}: {getattr(self, field)}') + return "\n".join(output) + + def serialize(self): + return serialize_value(self, self.__class__) + + def hash_tree_root(self): + return hash_tree_root(self, self.__class__) + + SSZObject.fields = fields + return SSZObject + + +class Vector(list): + def __init__(self, x): + list.__init__(self, x) + self.length = len(x) + + def append(*args): + raise Exception("Cannot change the length of a vector") + + remove = clear = extend = pop = insert = append + + +def is_basic(typ): + return isinstance(typ, str) and (typ[:4] in ('uint', 'bool') or typ == 'byte') + + +def is_constant_sized(typ): + if is_basic(typ): + return True + elif isinstance(typ, list) and len(typ) == 1: + return is_constant_sized(typ[0]) + elif isinstance(typ, list) and len(typ) == 2: + return False + elif isinstance(typ, str) and typ[:5] == 'bytes': + return len(typ) > 5 + elif hasattr(typ, 'fields'): + for subtype in typ.fields.values(): + if not is_constant_sized(subtype): + return False + return True + else: + raise Exception("Type not recognized") + + +def coerce_to_bytes(x): + if isinstance(x, str): + o = x.encode('utf-8') + assert len(o) == len(x) + return o + elif isinstance(x, bytes): + return x + else: + raise Exception("Expecting bytes") + + +def serialize_value(value, typ=None): + if typ is None: + typ = infer_type(value) + if isinstance(typ, str) and typ[:4] == 'uint': + length = int(typ[4:]) + assert length in (8, 16, 32, 64, 128, 256) + return value.to_bytes(length // 8, 'little') + elif typ == 'bool': + assert value in (True, False) + return b'\x01' if value is True else b'\x00' + elif (isinstance(typ, list) and len(typ) == 1) or typ == 'bytes': + serialized_bytes = coerce_to_bytes(value) if typ == 'bytes' else b''.join([serialize_value(element, typ[0]) for element in value]) + assert len(serialized_bytes) < 2**(8 * BYTES_PER_LENGTH_PREFIX) + serialized_length = len(serialized_bytes).to_bytes(BYTES_PER_LENGTH_PREFIX, 'little') + return serialized_length + serialized_bytes + elif isinstance(typ, list) and len(typ) == 2: + assert len(value) == typ[1] + return b''.join([serialize_value(element, typ[0]) for element in value]) + elif isinstance(typ, str) and len(typ) > 5 and typ[:5] == 'bytes': + assert len(value) == int(typ[5:]), (value, int(typ[5:])) + return coerce_to_bytes(value) + elif hasattr(typ, 'fields'): + serialized_bytes = b''.join([serialize_value(getattr(value, field), subtype) for field, subtype in typ.fields.items()]) + if is_constant_sized(typ): + return serialized_bytes + else: + assert len(serialized_bytes) < 2**(8 * BYTES_PER_LENGTH_PREFIX) + serialized_length = len(serialized_bytes).to_bytes(BYTES_PER_LENGTH_PREFIX, 'little') + return serialized_length + serialized_bytes + else: + print(value, typ) + raise Exception("Type not recognized") + + +def chunkify(bytez): + bytez += b'\x00' * (-len(bytez) % BYTES_PER_CHUNK) + return [bytez[i:i + 32] for i in range(0, len(bytez), 32)] + + +def pack(values, subtype): + return chunkify(b''.join([serialize_value(value, subtype) for value in values])) + + +def is_power_of_two(x): + return x > 0 and x & (x - 1) == 0 + + +def merkleize(chunks): + tree = chunks[::] + while not is_power_of_two(len(tree)): + tree.append(ZERO_CHUNK) + tree = [ZERO_CHUNK] * len(tree) + tree + for i in range(len(tree) // 2 - 1, 0, -1): + tree[i] = hash(tree[i * 2] + tree[i * 2 + 1]) + return tree[1] + + +def mix_in_length(root, length): + return hash(root + length.to_bytes(32, 'little')) + + +def infer_type(value): + if hasattr(value.__class__, 'fields'): + return value.__class__ + elif isinstance(value, Vector): + return [infer_type(value[0]) if len(value) > 0 else 'uint64', len(value)] + elif isinstance(value, list): + return [infer_type(value[0])] if len(value) > 0 else ['uint64'] + elif isinstance(value, (bytes, str)): + return 'bytes' + elif isinstance(value, int): + return 'uint64' + else: + raise Exception("Failed to infer type") + + +def hash_tree_root(value, typ=None): + if typ is None: + typ = infer_type(value) + if is_basic(typ): + return merkleize(pack([value], typ)) + elif isinstance(typ, list) and len(typ) == 1 and is_basic(typ[0]): + return mix_in_length(merkleize(pack(value, typ[0])), len(value)) + elif isinstance(typ, list) and len(typ) == 1 and not is_basic(typ[0]): + return mix_in_length(merkleize([hash_tree_root(element, typ[0]) for element in value]), len(value)) + elif isinstance(typ, list) and len(typ) == 2 and is_basic(typ[0]): + assert len(value) == typ[1] + return merkleize(pack(value, typ[0])) + elif typ == 'bytes': + return mix_in_length(merkleize(chunkify(coerce_to_bytes(value))), len(value)) + elif isinstance(typ, str) and typ[:5] == 'bytes' and len(typ) > 5: + assert len(value) == int(typ[5:]) + return merkleize(chunkify(coerce_to_bytes(value))) + elif isinstance(typ, list) and len(typ) == 2 and not is_basic(typ[0]): + return merkleize([hash_tree_root(element, typ[0]) for element in value]) + elif hasattr(typ, 'fields'): + return merkleize([hash_tree_root(getattr(value, field), subtype) for field, subtype in typ.fields.items()]) + else: + raise Exception("Type not recognized") + + +def truncate(container): + field_keys = list(container.fields.keys()) + truncated_fields = { + key: container.fields[key] + for key in field_keys[:-1] + } + truncated_class = SSZType(truncated_fields) + kwargs = { + field: getattr(container, field) + for field in field_keys[:-1] + } + return truncated_class(**kwargs) + + +def signed_root(container): + return hash_tree_root(truncate(container)) + + +def serialize(ssz_object): + return getattr(ssz_object, 'serialize')() diff --git a/utils/phase0/state_transition.py b/utils/phase0/state_transition.py new file mode 100644 index 000000000..eefc3d409 --- /dev/null +++ b/utils/phase0/state_transition.py @@ -0,0 +1,100 @@ +from . import spec + + +from typing import ( # noqa: F401 + Any, + Callable, + List, + NewType, + Tuple, +) + +from .spec import ( + BeaconState, + BeaconBlock, +) + + +def process_transaction_type(state: BeaconState, + transactions: List[Any], + max_transactions: int, + tx_fn: Callable[[BeaconState, Any], None]) -> None: + assert len(transactions) <= max_transactions + for transaction in transactions: + tx_fn(state, transaction) + + +def process_transactions(state: BeaconState, block: BeaconBlock) -> None: + process_transaction_type( + state, + block.body.proposer_slashings, + spec.MAX_PROPOSER_SLASHINGS, + spec.process_proposer_slashing, + ) + process_transaction_type( + state, + block.body.attester_slashings, + spec.MAX_ATTESTER_SLASHINGS, + spec.process_attester_slashing, + ) + process_transaction_type( + state, + block.body.attestations, + spec.MAX_ATTESTATIONS, + spec.process_attestation, + ) + process_transaction_type( + state, + block.body.deposits, + spec.MAX_DEPOSITS, + spec.process_deposit, + ) + process_transaction_type( + state, + block.body.voluntary_exits, + spec.MAX_VOLUNTARY_EXITS, + spec.process_voluntary_exit, + ) + assert len(block.body.transfers) == len(set(block.body.transfers)) + process_transaction_type( + state, + block.body.transfers, + spec.MAX_TRANSFERS, + spec.process_transfer, + ) + + +def process_block(state: BeaconState, + block: BeaconBlock, + verify_state_root: bool=False) -> None: + spec.process_block_header(state, block) + spec.process_randao(state, block) + spec.process_eth1_data(state, block) + + process_transactions(state, block) + if verify_state_root: + spec.verify_block_state_root(state, block) + + +def process_epoch_transition(state: BeaconState) -> None: + spec.update_justification_and_finalization(state) + spec.process_crosslinks(state) + spec.maybe_reset_eth1_period(state) + spec.apply_rewards(state) + spec.process_ejections(state) + spec.update_registry_and_shuffling_data(state) + spec.process_slashings(state) + spec.process_exit_queue(state) + spec.finish_epoch_update(state) + + +def state_transition(state: BeaconState, + block: BeaconBlock, + verify_state_root: bool=False) -> BeaconState: + while state.slot < block.slot: + spec.cache_state(state) + if (state.slot + 1) % spec.SLOTS_PER_EPOCH == 0: + process_epoch_transition(state) + spec.advance_slot(state) + if block.slot == state.slot: + process_block(state, block, verify_state_root)