change configs to be loaded in time; reload specs module to make new config presets effective. Also fix more lint and consistency problems.

This commit is contained in:
protolambda 2019-11-19 20:16:40 +01:00 committed by Danny Ryan
parent d6bfe5d35c
commit e73316c13f
No known key found for this signature in database
GPG Key ID: 2765A792E42CE07A
9 changed files with 65 additions and 176 deletions

View File

@ -9,9 +9,13 @@ from typing import (
Optional,
)
CONFIG_LOADER = '''
apply_constants_preset(globals())
'''
PHASE0_IMPORTS = '''from typing import (
Any, Dict, Set, Sequence, Tuple, Optional
PHASE0_IMPORTS = '''from eth2spec.config.apply_config import apply_constants_preset
from typing import (
Dict, Set, Sequence, Tuple, Optional
)
from dataclasses import (
@ -33,8 +37,10 @@ from eth2spec.utils.bls import (
from eth2spec.utils.hash_function import hash
'''
PHASE1_IMPORTS = '''from typing import (
Any, Dict, Set, Sequence, MutableSequence, NewType, Tuple, Union,
PHASE1_IMPORTS = '''from eth2spec.phase0 import spec as phase0
from eth2spec.config.apply_config import apply_constants_preset
from typing import (
Dict, Set, Sequence, NewType, Tuple, Union,
)
from math import (
log2,
@ -101,24 +107,7 @@ def compute_committee(indices: Sequence[ValidatorIndex], # type: ignore
if param_hash not in committee_cache:
committee_cache[param_hash] = _compute_committee(indices, seed, index, count)
return committee_cache[param_hash]
# Access to overwrite spec constants based on configuration
def apply_constants_preset(preset: Dict[str, Any]) -> None:
global_vars = globals()
for k, v in preset.items():
if k.startswith('DOMAIN_'):
global_vars[k] = DomainType(v) # domain types are defined as bytes in the configs
else:
global_vars[k] = v
# Deal with derived constants
global_vars['GENESIS_EPOCH'] = compute_epoch_at_slot(GENESIS_SLOT)
# Initialize SSZ types again, to account for changed lengths
init_SSZ_types()
'''
return committee_cache[param_hash]'''
def remove_for_phase1(functions: Dict[str, str]):
@ -128,23 +117,10 @@ def remove_for_phase1(functions: Dict[str, str]):
functions[key] = "\n".join(lines)
def strip_comments(raw: str) -> str:
comment_line_regex = re.compile(r'^\s+# ')
lines = raw.split('\n')
out = []
for line in lines:
if not comment_line_regex.match(line):
if ' #' in line:
line = line[:line.index(' #')]
out.append(line)
return '\n'.join(out)
def objects_to_spec(functions: Dict[str, str],
custom_types: Dict[str, str],
constants: Dict[str, str],
ssz_objects: Dict[str, str],
inserts: Dict[str, str],
imports: Dict[str, str],
) -> str:
"""
@ -169,27 +145,17 @@ def objects_to_spec(functions: Dict[str, str],
constants[k] += " # noqa: E501"
constants_spec = '\n'.join(map(lambda x: '%s = %s' % (x, constants[x]), constants))
ssz_objects_instantiation_spec = '\n\n'.join(ssz_objects.values())
ssz_objects_reinitialization_spec = (
'def init_SSZ_types() -> None:\n global_vars = globals()\n\n '
+ '\n\n '.join([strip_comments(re.sub(r'\n\n', r'\n', re.sub(r'(?!\n\n)\n', r'\n ', value[:-1])))
for value in ssz_objects.values()])
+ '\n\n'
+ '\n'.join(map(lambda x: ' global_vars[\'%s\'] = %s' % (x, x), ssz_objects.keys()))
)
spec = (
imports
+ '\n\n' + new_type_definitions
+ '\n' + SUNDRY_CONSTANTS_FUNCTIONS
+ '\n\n' + constants_spec
+ '\n\n\n' + ssz_objects_instantiation_spec
+ '\n\n' + CONFIG_LOADER
+ '\n\n' + ssz_objects_instantiation_spec
+ '\n\n' + functions_spec
+ '\n' + SUNDRY_FUNCTIONS
+ '\n\n' + ssz_objects_reinitialization_spec
+ '\n'
)
# Handle @inserts
for key, value in inserts.items():
spec = re.sub('[ ]*# %s\\n' % key, value, spec)
return spec
@ -242,32 +208,22 @@ def combine_ssz_objects(old_objects: Dict[str, str], new_objects: Dict[str, str]
and returns the newer versions of the objects in dependency order.
"""
for key, value in new_objects.items():
if key in old_objects:
# add proper spacing
old_objects[key] = old_objects[key] + "\n\n"
lines = value.split("\n")
value = "\n".join([lines[0] + " # noqa: F811"] + lines[1:])
old_objects[key] = old_objects.get(key, '') + value
old_objects[key] = value
dependency_order_ssz_objects(old_objects, custom_types)
return old_objects
# inserts are handeled the same way as functions
combine_inserts = combine_functions
def combine_spec_objects(spec0: SpecObject, spec1: SpecObject) -> SpecObject:
"""
Takes in two spec variants (as tuples of their objects) and combines them using the appropriate combiner function.
"""
functions0, custom_types0, constants0, ssz_objects0, inserts0 = spec0
functions1, custom_types1, constants1, ssz_objects1, inserts1 = spec1
functions0, custom_types0, constants0, ssz_objects0 = spec0
functions1, custom_types1, constants1, ssz_objects1 = spec1
functions = combine_functions(functions0, functions1)
custom_types = combine_constants(custom_types0, custom_types1)
constants = combine_constants(constants0, constants1)
ssz_objects = combine_ssz_objects(ssz_objects0, ssz_objects1, custom_types)
inserts = combine_inserts(inserts0, inserts1)
return functions, custom_types, constants, ssz_objects, inserts
return functions, custom_types, constants, ssz_objects
def build_phase0_spec(phase0_sourcefile: str, fork_choice_sourcefile: str,

View File

@ -3,8 +3,6 @@ from typing import Dict, Tuple, NewType
FUNCTION_REGEX = r'^def [\w_]*'
BEGIN_INSERT_REGEX = r'# begin insert '
END_INSERT_REGEX = r'# end insert'
SpecObject = NewType('SpecObjects', Tuple[Dict[str, str], Dict[str, str], Dict[str, str], Dict[str, str]])
@ -15,22 +13,18 @@ def get_spec(file_name: str) -> SpecObject:
functions = {function_name: function_code}
constants= {constant_name: constant_code}
ssz_objects= {object_name: object}
inserts= {insert_tag: code to be inserted}
Note: This function makes heavy use of the inherent ordering of dicts,
if this is not supported by your python version, it will not work.
"""
pulling_from = None # line number of start of latest object
current_name = None # most recent section title
insert_name = None # stores the label of the current insert object
functions = {}
constants = {}
ssz_objects = {}
inserts = {}
functions: Dict[str, str] = {}
constants: Dict[str, str] = {}
ssz_objects: Dict[str, str] = {}
function_matcher = re.compile(FUNCTION_REGEX)
inserts_matcher = re.compile(BEGIN_INSERT_REGEX)
is_ssz = False
custom_types = {}
custom_types: Dict[str, str] = {}
for linenum, line in enumerate(open(file_name).readlines()):
line = line.rstrip()
if pulling_from is None and len(line) > 0 and line[0] == '#' and line[-1] == '`':
@ -40,15 +34,6 @@ def get_spec(file_name: str) -> SpecObject:
pulling_from = linenum + 1
elif line[:3] == '```':
pulling_from = None
elif inserts_matcher.match(line) is not None:
# Find @insert names
insert_name = re.search(r'@[\w]*', line).group(0)
elif insert_name is not None:
# In insert mode, either the next line is more code, or the end of the insert
if re.match(END_INSERT_REGEX, line) is not None:
insert_name = None
else:
inserts[insert_name] = inserts.get(insert_name, '') + line + '\n'
else:
# Handle function definitions & ssz_objects
if pulling_from is not None:
@ -84,4 +69,4 @@ def get_spec(file_name: str) -> SpecObject:
constants[row[0]] = row[1].replace('**TBD**', '2**32')
elif row[1].startswith('uint') or row[1].startswith('Bytes'):
custom_types[row[0]] = row[1]
return functions, custom_types, constants, ssz_objects, inserts
return SpecObject((functions, custom_types, constants, ssz_objects))

View File

@ -157,7 +157,7 @@ class AttestationCustodyBitWrapper(Container):
### New extended `PendingAttestation`
```python
class PendingAttestation(phase0.PendingAttestation):
class PendingAttestation(Container):
aggregation_bits: Bitlist[MAX_VALIDATORS_PER_COMMITTEE]
data: AttestationData
inclusion_delay: Slot
@ -168,7 +168,7 @@ class PendingAttestation(phase0.PendingAttestation):
### New extended `Validator`
```python
class Validator(phase0.Validator):
class Validator(Container):
pubkey: BLSPubkey
withdrawal_credentials: Hash # Commitment to pubkey for withdrawals
effective_balance: Gwei # Balance at stake
@ -189,7 +189,7 @@ class Validator(phase0.Validator):
### New extended `BeaconBlockBody`
```python
class BeaconBlockBody(phase0.BeaconBlockBody):
class BeaconBlockBody(Container):
randao_reveal: BLSSignature
eth1_data: Eth1Data # Eth1 data vote
graffiti: Bytes32 # Arbitrary data
@ -218,7 +218,7 @@ class BeaconBlockBody(phase0.BeaconBlockBody):
Note that the `body` has a new `BeaconBlockBody` definition.
```python
class BeaconBlock(phase0.BeaconBlock):
class BeaconBlock(Container):
slot: Slot
parent_root: Hash
state_root: Hash
@ -231,7 +231,7 @@ class BeaconBlock(phase0.BeaconBlock):
Note that aside from the new additions, `Validator` and `PendingAttestation` have new definitions.
```python
class BeaconState(phase0.BeaconState):
class BeaconState(Container):
# Versioning
genesis_time: uint64
slot: Slot
@ -270,7 +270,7 @@ class BeaconState(phase0.BeaconState):
custody_challenge_index: uint64
# Future derived secrets already exposed; contains the indices of the exposed validator
# at RANDAO reveal period % EARLY_DERIVED_SECRET_PENALTY_MAX_FUTURE_EPOCHS
exposed_derived_secrets: Vector[List[ValidatorIndex, PLACEHOLDER],
exposed_derived_secrets: Vector[List[ValidatorIndex, MAX_EARLY_DERIVED_SECRET_REVEALS * SLOTS_PER_EPOCH],
EARLY_DERIVED_SECRET_PENALTY_MAX_FUTURE_EPOCHS]
```
@ -610,7 +610,7 @@ def process_attestations(state: BeaconState, block_body: BeaconBlockBody, attest
# Apply proposer reward and cost
beacon_proposer_index = get_beacon_proposer_index(state)
estimated_attester_reward = sum([get_base_reward(state, attester) for attester in all_participants])
proposer_reward = estimated_attester_reward // PROPOSER_REWARD_COEFFICIENT
proposer_reward = estimated_attester_reward // PROPOSER_REWARD_QUOTIENT
increase_balance(state, beacon_proposer_index, proposer_reward)
states_slots_lengths = zip(
block_body.shard_transition.shard_states,
@ -659,19 +659,19 @@ def process_light_client_signatures(state: BeaconState, block_body: BeaconBlockB
assert len(block_body.light_client_signature_bitfield) == len(committee)
total_reward = Gwei(0)
signer_keys = []
for i, bit in enumerate(block_body.light_client_signature_bitfield):
if bit:
for i, participant_bit in enumerate(block_body.light_client_signature_bitfield):
if participant_bit:
signer_keys.append(state.validators[committee[i]].pubkey)
increase_balance(state, committee[i], get_base_reward(state, committee[i]))
total_reward += get_base_reward(state, committee[i])
increase_balance(state, get_beacon_proposer_index(state), total_reward // PROPOSER_REWARD_COEFFICIENT)
increase_balance(state, get_beacon_proposer_index(state), total_reward // PROPOSER_REWARD_QUOTIENT)
assert bls_verify(
pubkey=bls_aggregate_pubkeys(signer_keys),
message_hash=get_block_root_at_slot(state, state.slot - 1),
signature=block_body.light_client_signature,
domain=DOMAIN_LIGHT_CLIENT,
domain=DOMAIN_LIGHT_CLIENT
)
```

View File

@ -34,14 +34,12 @@
- [Helpers](#helpers)
- [`ceillog2`](#ceillog2)
- [`is_valid_merkle_branch_with_mixin`](#is_valid_merkle_branch_with_mixin)
- [`get_crosslink_chunk_count`](#get_crosslink_chunk_count)
- [`legendre_bit`](#legendre_bit)
- [`custody_subchunkify`](#custody_subchunkify)
- [`get_custody_chunk_bit`](#get_custody_chunk_bit)
- [`get_chunk_bits_root`](#get_chunk_bits_root)
- [`get_randao_epoch_for_custody_period`](#get_randao_epoch_for_custody_period)
- [`get_custody_period_for_validator`](#get_custody_period_for_validator)
- [`replace_empty_or_append`](#replace_empty_or_append)
- [Per-block processing](#per-block-processing)
- [Operations](#operations)
- [Custody key reveals](#custody-key-reveals)
@ -168,45 +166,6 @@ class CustodyBitChallenge(Container):
signature: BLSSignature
```
#### `CustodyChunkChallengeRecord`
```python
class CustodyChunkChallengeRecord(Container):
challenge_index: uint64
challenger_index: ValidatorIndex
responder_index: ValidatorIndex
inclusion_epoch: Epoch
data_root: Root
depth: uint64
chunk_index: uint64
```
#### `CustodyBitChallengeRecord`
```python
class CustodyBitChallengeRecord(Container):
challenge_index: uint64
challenger_index: ValidatorIndex
responder_index: ValidatorIndex
inclusion_epoch: Epoch
data_root: Root
chunk_count: uint64
chunk_bits_merkle_root: Root
responder_key: BLSSignature
```
#### `CustodyResponse`
```python
class CustodyResponse(Container):
challenge_index: uint64
chunk_index: uint64
chunk: Vector[Bytes[PLACEHOLDER], BYTES_PER_CUSTODY_CHUNK]
data_branch: List[Bytes32, PLACEHOLDER]
chunk_bits_branch: List[Bytes32, PLACEHOLDER]
chunk_bits_leaf: Bytes32
```
### New Beacon Chain operations
#### `CustodyKeyReveal`
@ -266,13 +225,6 @@ def is_valid_merkle_branch_with_mixin(leaf: Bytes32,
return value == root
```
### `get_crosslink_chunk_count`
```python
def get_custody_chunk_count(crosslink: Crosslink) -> int:
crosslink_length = min(MAX_EPOCHS_PER_CROSSLINK, crosslink.end_epoch - crosslink.start_epoch)
return crosslink_length * CHUNKS_PER_EPOCH
```
### `legendre_bit`
@ -354,17 +306,6 @@ def get_custody_period_for_validator(validator_index: ValidatorIndex, epoch: Epo
return (epoch + validator_index % EPOCHS_PER_CUSTODY_PERIOD) // EPOCHS_PER_CUSTODY_PERIOD
```
### `replace_empty_or_append`
```python
def replace_empty_or_append(list: MutableSequence[Any], new_element: Any) -> int:
for i in range(len(list)):
if is_zero(list[i]):
list[i] = new_element
return i
list.append(new_element)
return len(list) - 1
```
## Per-block processing
@ -526,7 +467,7 @@ def process_chunk_challenge(state: BeaconState, challenge: CustodyChunkChallenge
record.chunk_index != challenge.chunk_index
)
# Verify depth
depth = ceillog2(get_custody_chunk_count(challenge.attestation.data.crosslink))
depth = 123 # TODO
assert challenge.chunk_index < 2**depth
# Add new chunk challenge record
new_record = CustodyChunkChallengeRecord(
@ -611,24 +552,13 @@ def process_bit_challenge(state: BeaconState, challenge: CustodyBitChallenge) ->
domain = get_domain(state, DOMAIN_RANDAO, epoch_to_sign)
assert bls_verify(responder.pubkey, hash_tree_root(epoch_to_sign), challenge.responder_key, domain)
# Verify the chunk count
chunk_count = get_custody_chunk_count(attestation.data.crosslink)
chunk_count = 123 # TODO
assert chunk_count == len(challenge.chunk_bits)
# Verify custody bit is incorrect
committee = get_beacon_committee(state, epoch, shard)
custody_bit = attestation.custody_bits[committee.index(challenge.responder_index)]
assert custody_bit != get_chunk_bits_root(challenge.chunk_bits)
# Add new bit challenge record
new_record = CustodyBitChallengeRecord(
challenge_index=state.custody_challenge_index,
challenger_index=challenge.challenger_index,
responder_index=challenge.responder_index,
inclusion_epoch=get_current_epoch(state),
data_root=attestation.data.crosslink.data_root,
chunk_count=chunk_count,
chunk_bits_merkle_root=hash_tree_root(challenge.chunk_bits),
responder_key=challenge.responder_key,
)
replace_empty_or_append(state.custody_bit_challenge_records, new_record)
# TODO: immediate processing of challenge?
state.custody_challenge_index += 1
# Postpone responder withdrawability
responder.withdrawable_epoch = FAR_FUTURE_EPOCH

View File

@ -99,7 +99,7 @@ def upgrade_to_phase1(pre: phase0.BeaconState) -> BeaconState:
next_light_committee=CompactCommittee(),
# Custody game
custody_challenge_index=0,
exposed_derived_secrets=Vector[List[ValidatorIndex, PLACEHOLDER],
exposed_derived_secrets=Vector[List[ValidatorIndex, MAX_EARLY_DERIVED_SECRET_REVEALS * SLOTS_PER_EPOCH],
EARLY_DERIVED_SECRET_PENALTY_MAX_FUTURE_EPOCHS]()
)
post.current_light_committee = get_light_client_committee(post, post.epoch)

View File

@ -0,0 +1,19 @@
from preset_loader import loader
from typing import Dict, Any
presets: Dict[str, Any] = {}
# Access to overwrite spec constants based on configuration
def apply_constants_preset(spec_globals: Dict[str, Any]) -> None:
global presets
for k, v in presets.items():
if k.startswith('DOMAIN_'):
spec_globals[k] = spec_globals['DomainType'](v) # domain types are defined as bytes in the configs
else:
spec_globals[k] = v
def load_presets(configs_path, config_name):
global presets
presets = loader.load_presets(configs_path, config_name)

View File

@ -1,5 +1,4 @@
from eth2spec.phase0 import spec as spec_phase0
from eth2spec.phase1 import spec as spec_phase1
from eth2spec.config import apply_config
# We import pytest only when it's present, i.e. when we are running tests.
# The test-cases themselves can be generated without installing pytest.
@ -33,7 +32,4 @@ def pytest_addoption(parser):
@fixture(autouse=True)
def config(request):
config_name = request.config.getoption("--config")
from preset_loader import loader
presets = loader.load_presets('../../configs/', config_name)
spec_phase0.apply_constants_preset(presets)
spec_phase1.apply_constants_preset(presets)
apply_config.load_presets('../../configs/', config_name)

View File

@ -1,5 +1,5 @@
from eth2spec.phase0 import spec as spec_phase0
# from eth2spec.phase1 import spec as spec_phase1
from eth2spec.phase1 import spec as spec_phase1
from eth2spec.utils import bls
from .helpers.genesis import create_genesis_state
@ -8,6 +8,11 @@ from .utils import vector_test, with_meta_tags
from typing import Any, Callable, Sequence
from importlib import reload
reload(spec_phase0)
reload(spec_phase1)
def with_custom_state(balances_fn: Callable[[Any], Sequence[int]],
threshold_fn: Callable[[Any], int]):
@ -191,9 +196,7 @@ def with_phases(phases):
if 'phase0' in run_phases:
ret = run_with_spec_version(spec_phase0, *args, **kw)
if 'phase1' in run_phases:
# temporarily disable phase 1 tests
return
# ret = run_with_spec_version(spec_phase1, *args, **kw)
ret = run_with_spec_version(spec_phase1, *args, **kw)
return ret
return wrapper
return decorator