change configs to be loaded in time; reload specs module to make new config presets effective. Also fix more lint and consistency problems.

This commit is contained in:
protolambda 2019-11-19 20:16:40 +01:00 committed by Danny Ryan
parent d6bfe5d35c
commit e73316c13f
No known key found for this signature in database
GPG Key ID: 2765A792E42CE07A
9 changed files with 65 additions and 176 deletions

View File

@ -9,9 +9,13 @@ from typing import (
Optional, Optional,
) )
CONFIG_LOADER = '''
apply_constants_preset(globals())
'''
PHASE0_IMPORTS = '''from typing import ( PHASE0_IMPORTS = '''from eth2spec.config.apply_config import apply_constants_preset
Any, Dict, Set, Sequence, Tuple, Optional from typing import (
Dict, Set, Sequence, Tuple, Optional
) )
from dataclasses import ( from dataclasses import (
@ -33,8 +37,10 @@ from eth2spec.utils.bls import (
from eth2spec.utils.hash_function import hash from eth2spec.utils.hash_function import hash
''' '''
PHASE1_IMPORTS = '''from typing import ( PHASE1_IMPORTS = '''from eth2spec.phase0 import spec as phase0
Any, Dict, Set, Sequence, MutableSequence, NewType, Tuple, Union, from eth2spec.config.apply_config import apply_constants_preset
from typing import (
Dict, Set, Sequence, NewType, Tuple, Union,
) )
from math import ( from math import (
log2, log2,
@ -101,24 +107,7 @@ def compute_committee(indices: Sequence[ValidatorIndex], # type: ignore
if param_hash not in committee_cache: if param_hash not in committee_cache:
committee_cache[param_hash] = _compute_committee(indices, seed, index, count) committee_cache[param_hash] = _compute_committee(indices, seed, index, count)
return committee_cache[param_hash] return committee_cache[param_hash]'''
# Access to overwrite spec constants based on configuration
def apply_constants_preset(preset: Dict[str, Any]) -> None:
global_vars = globals()
for k, v in preset.items():
if k.startswith('DOMAIN_'):
global_vars[k] = DomainType(v) # domain types are defined as bytes in the configs
else:
global_vars[k] = v
# Deal with derived constants
global_vars['GENESIS_EPOCH'] = compute_epoch_at_slot(GENESIS_SLOT)
# Initialize SSZ types again, to account for changed lengths
init_SSZ_types()
'''
def remove_for_phase1(functions: Dict[str, str]): def remove_for_phase1(functions: Dict[str, str]):
@ -128,23 +117,10 @@ def remove_for_phase1(functions: Dict[str, str]):
functions[key] = "\n".join(lines) functions[key] = "\n".join(lines)
def strip_comments(raw: str) -> str:
comment_line_regex = re.compile(r'^\s+# ')
lines = raw.split('\n')
out = []
for line in lines:
if not comment_line_regex.match(line):
if ' #' in line:
line = line[:line.index(' #')]
out.append(line)
return '\n'.join(out)
def objects_to_spec(functions: Dict[str, str], def objects_to_spec(functions: Dict[str, str],
custom_types: Dict[str, str], custom_types: Dict[str, str],
constants: Dict[str, str], constants: Dict[str, str],
ssz_objects: Dict[str, str], ssz_objects: Dict[str, str],
inserts: Dict[str, str],
imports: Dict[str, str], imports: Dict[str, str],
) -> str: ) -> str:
""" """
@ -169,27 +145,17 @@ def objects_to_spec(functions: Dict[str, str],
constants[k] += " # noqa: E501" constants[k] += " # noqa: E501"
constants_spec = '\n'.join(map(lambda x: '%s = %s' % (x, constants[x]), constants)) constants_spec = '\n'.join(map(lambda x: '%s = %s' % (x, constants[x]), constants))
ssz_objects_instantiation_spec = '\n\n'.join(ssz_objects.values()) ssz_objects_instantiation_spec = '\n\n'.join(ssz_objects.values())
ssz_objects_reinitialization_spec = (
'def init_SSZ_types() -> None:\n global_vars = globals()\n\n '
+ '\n\n '.join([strip_comments(re.sub(r'\n\n', r'\n', re.sub(r'(?!\n\n)\n', r'\n ', value[:-1])))
for value in ssz_objects.values()])
+ '\n\n'
+ '\n'.join(map(lambda x: ' global_vars[\'%s\'] = %s' % (x, x), ssz_objects.keys()))
)
spec = ( spec = (
imports imports
+ '\n\n' + new_type_definitions + '\n\n' + new_type_definitions
+ '\n' + SUNDRY_CONSTANTS_FUNCTIONS + '\n' + SUNDRY_CONSTANTS_FUNCTIONS
+ '\n\n' + constants_spec + '\n\n' + constants_spec
+ '\n\n\n' + ssz_objects_instantiation_spec + '\n\n' + CONFIG_LOADER
+ '\n\n' + ssz_objects_instantiation_spec
+ '\n\n' + functions_spec + '\n\n' + functions_spec
+ '\n' + SUNDRY_FUNCTIONS + '\n' + SUNDRY_FUNCTIONS
+ '\n\n' + ssz_objects_reinitialization_spec
+ '\n' + '\n'
) )
# Handle @inserts
for key, value in inserts.items():
spec = re.sub('[ ]*# %s\\n' % key, value, spec)
return spec return spec
@ -242,32 +208,22 @@ def combine_ssz_objects(old_objects: Dict[str, str], new_objects: Dict[str, str]
and returns the newer versions of the objects in dependency order. and returns the newer versions of the objects in dependency order.
""" """
for key, value in new_objects.items(): for key, value in new_objects.items():
if key in old_objects: old_objects[key] = value
# add proper spacing
old_objects[key] = old_objects[key] + "\n\n"
lines = value.split("\n")
value = "\n".join([lines[0] + " # noqa: F811"] + lines[1:])
old_objects[key] = old_objects.get(key, '') + value
dependency_order_ssz_objects(old_objects, custom_types) dependency_order_ssz_objects(old_objects, custom_types)
return old_objects return old_objects
# inserts are handeled the same way as functions
combine_inserts = combine_functions
def combine_spec_objects(spec0: SpecObject, spec1: SpecObject) -> SpecObject: def combine_spec_objects(spec0: SpecObject, spec1: SpecObject) -> SpecObject:
""" """
Takes in two spec variants (as tuples of their objects) and combines them using the appropriate combiner function. Takes in two spec variants (as tuples of their objects) and combines them using the appropriate combiner function.
""" """
functions0, custom_types0, constants0, ssz_objects0, inserts0 = spec0 functions0, custom_types0, constants0, ssz_objects0 = spec0
functions1, custom_types1, constants1, ssz_objects1, inserts1 = spec1 functions1, custom_types1, constants1, ssz_objects1 = spec1
functions = combine_functions(functions0, functions1) functions = combine_functions(functions0, functions1)
custom_types = combine_constants(custom_types0, custom_types1) custom_types = combine_constants(custom_types0, custom_types1)
constants = combine_constants(constants0, constants1) constants = combine_constants(constants0, constants1)
ssz_objects = combine_ssz_objects(ssz_objects0, ssz_objects1, custom_types) ssz_objects = combine_ssz_objects(ssz_objects0, ssz_objects1, custom_types)
inserts = combine_inserts(inserts0, inserts1) return functions, custom_types, constants, ssz_objects
return functions, custom_types, constants, ssz_objects, inserts
def build_phase0_spec(phase0_sourcefile: str, fork_choice_sourcefile: str, def build_phase0_spec(phase0_sourcefile: str, fork_choice_sourcefile: str,

View File

@ -3,8 +3,6 @@ from typing import Dict, Tuple, NewType
FUNCTION_REGEX = r'^def [\w_]*' FUNCTION_REGEX = r'^def [\w_]*'
BEGIN_INSERT_REGEX = r'# begin insert '
END_INSERT_REGEX = r'# end insert'
SpecObject = NewType('SpecObjects', Tuple[Dict[str, str], Dict[str, str], Dict[str, str], Dict[str, str]]) SpecObject = NewType('SpecObjects', Tuple[Dict[str, str], Dict[str, str], Dict[str, str], Dict[str, str]])
@ -15,22 +13,18 @@ def get_spec(file_name: str) -> SpecObject:
functions = {function_name: function_code} functions = {function_name: function_code}
constants= {constant_name: constant_code} constants= {constant_name: constant_code}
ssz_objects= {object_name: object} ssz_objects= {object_name: object}
inserts= {insert_tag: code to be inserted}
Note: This function makes heavy use of the inherent ordering of dicts, Note: This function makes heavy use of the inherent ordering of dicts,
if this is not supported by your python version, it will not work. if this is not supported by your python version, it will not work.
""" """
pulling_from = None # line number of start of latest object pulling_from = None # line number of start of latest object
current_name = None # most recent section title current_name = None # most recent section title
insert_name = None # stores the label of the current insert object functions: Dict[str, str] = {}
functions = {} constants: Dict[str, str] = {}
constants = {} ssz_objects: Dict[str, str] = {}
ssz_objects = {}
inserts = {}
function_matcher = re.compile(FUNCTION_REGEX) function_matcher = re.compile(FUNCTION_REGEX)
inserts_matcher = re.compile(BEGIN_INSERT_REGEX)
is_ssz = False is_ssz = False
custom_types = {} custom_types: Dict[str, str] = {}
for linenum, line in enumerate(open(file_name).readlines()): for linenum, line in enumerate(open(file_name).readlines()):
line = line.rstrip() line = line.rstrip()
if pulling_from is None and len(line) > 0 and line[0] == '#' and line[-1] == '`': if pulling_from is None and len(line) > 0 and line[0] == '#' and line[-1] == '`':
@ -40,15 +34,6 @@ def get_spec(file_name: str) -> SpecObject:
pulling_from = linenum + 1 pulling_from = linenum + 1
elif line[:3] == '```': elif line[:3] == '```':
pulling_from = None pulling_from = None
elif inserts_matcher.match(line) is not None:
# Find @insert names
insert_name = re.search(r'@[\w]*', line).group(0)
elif insert_name is not None:
# In insert mode, either the next line is more code, or the end of the insert
if re.match(END_INSERT_REGEX, line) is not None:
insert_name = None
else:
inserts[insert_name] = inserts.get(insert_name, '') + line + '\n'
else: else:
# Handle function definitions & ssz_objects # Handle function definitions & ssz_objects
if pulling_from is not None: if pulling_from is not None:
@ -84,4 +69,4 @@ def get_spec(file_name: str) -> SpecObject:
constants[row[0]] = row[1].replace('**TBD**', '2**32') constants[row[0]] = row[1].replace('**TBD**', '2**32')
elif row[1].startswith('uint') or row[1].startswith('Bytes'): elif row[1].startswith('uint') or row[1].startswith('Bytes'):
custom_types[row[0]] = row[1] custom_types[row[0]] = row[1]
return functions, custom_types, constants, ssz_objects, inserts return SpecObject((functions, custom_types, constants, ssz_objects))

View File

@ -157,7 +157,7 @@ class AttestationCustodyBitWrapper(Container):
### New extended `PendingAttestation` ### New extended `PendingAttestation`
```python ```python
class PendingAttestation(phase0.PendingAttestation): class PendingAttestation(Container):
aggregation_bits: Bitlist[MAX_VALIDATORS_PER_COMMITTEE] aggregation_bits: Bitlist[MAX_VALIDATORS_PER_COMMITTEE]
data: AttestationData data: AttestationData
inclusion_delay: Slot inclusion_delay: Slot
@ -168,7 +168,7 @@ class PendingAttestation(phase0.PendingAttestation):
### New extended `Validator` ### New extended `Validator`
```python ```python
class Validator(phase0.Validator): class Validator(Container):
pubkey: BLSPubkey pubkey: BLSPubkey
withdrawal_credentials: Hash # Commitment to pubkey for withdrawals withdrawal_credentials: Hash # Commitment to pubkey for withdrawals
effective_balance: Gwei # Balance at stake effective_balance: Gwei # Balance at stake
@ -189,7 +189,7 @@ class Validator(phase0.Validator):
### New extended `BeaconBlockBody` ### New extended `BeaconBlockBody`
```python ```python
class BeaconBlockBody(phase0.BeaconBlockBody): class BeaconBlockBody(Container):
randao_reveal: BLSSignature randao_reveal: BLSSignature
eth1_data: Eth1Data # Eth1 data vote eth1_data: Eth1Data # Eth1 data vote
graffiti: Bytes32 # Arbitrary data graffiti: Bytes32 # Arbitrary data
@ -218,7 +218,7 @@ class BeaconBlockBody(phase0.BeaconBlockBody):
Note that the `body` has a new `BeaconBlockBody` definition. Note that the `body` has a new `BeaconBlockBody` definition.
```python ```python
class BeaconBlock(phase0.BeaconBlock): class BeaconBlock(Container):
slot: Slot slot: Slot
parent_root: Hash parent_root: Hash
state_root: Hash state_root: Hash
@ -231,7 +231,7 @@ class BeaconBlock(phase0.BeaconBlock):
Note that aside from the new additions, `Validator` and `PendingAttestation` have new definitions. Note that aside from the new additions, `Validator` and `PendingAttestation` have new definitions.
```python ```python
class BeaconState(phase0.BeaconState): class BeaconState(Container):
# Versioning # Versioning
genesis_time: uint64 genesis_time: uint64
slot: Slot slot: Slot
@ -270,7 +270,7 @@ class BeaconState(phase0.BeaconState):
custody_challenge_index: uint64 custody_challenge_index: uint64
# Future derived secrets already exposed; contains the indices of the exposed validator # Future derived secrets already exposed; contains the indices of the exposed validator
# at RANDAO reveal period % EARLY_DERIVED_SECRET_PENALTY_MAX_FUTURE_EPOCHS # at RANDAO reveal period % EARLY_DERIVED_SECRET_PENALTY_MAX_FUTURE_EPOCHS
exposed_derived_secrets: Vector[List[ValidatorIndex, PLACEHOLDER], exposed_derived_secrets: Vector[List[ValidatorIndex, MAX_EARLY_DERIVED_SECRET_REVEALS * SLOTS_PER_EPOCH],
EARLY_DERIVED_SECRET_PENALTY_MAX_FUTURE_EPOCHS] EARLY_DERIVED_SECRET_PENALTY_MAX_FUTURE_EPOCHS]
``` ```
@ -610,7 +610,7 @@ def process_attestations(state: BeaconState, block_body: BeaconBlockBody, attest
# Apply proposer reward and cost # Apply proposer reward and cost
beacon_proposer_index = get_beacon_proposer_index(state) beacon_proposer_index = get_beacon_proposer_index(state)
estimated_attester_reward = sum([get_base_reward(state, attester) for attester in all_participants]) estimated_attester_reward = sum([get_base_reward(state, attester) for attester in all_participants])
proposer_reward = estimated_attester_reward // PROPOSER_REWARD_COEFFICIENT proposer_reward = estimated_attester_reward // PROPOSER_REWARD_QUOTIENT
increase_balance(state, beacon_proposer_index, proposer_reward) increase_balance(state, beacon_proposer_index, proposer_reward)
states_slots_lengths = zip( states_slots_lengths = zip(
block_body.shard_transition.shard_states, block_body.shard_transition.shard_states,
@ -659,19 +659,19 @@ def process_light_client_signatures(state: BeaconState, block_body: BeaconBlockB
assert len(block_body.light_client_signature_bitfield) == len(committee) assert len(block_body.light_client_signature_bitfield) == len(committee)
total_reward = Gwei(0) total_reward = Gwei(0)
signer_keys = [] signer_keys = []
for i, bit in enumerate(block_body.light_client_signature_bitfield): for i, participant_bit in enumerate(block_body.light_client_signature_bitfield):
if bit: if participant_bit:
signer_keys.append(state.validators[committee[i]].pubkey) signer_keys.append(state.validators[committee[i]].pubkey)
increase_balance(state, committee[i], get_base_reward(state, committee[i])) increase_balance(state, committee[i], get_base_reward(state, committee[i]))
total_reward += get_base_reward(state, committee[i]) total_reward += get_base_reward(state, committee[i])
increase_balance(state, get_beacon_proposer_index(state), total_reward // PROPOSER_REWARD_COEFFICIENT) increase_balance(state, get_beacon_proposer_index(state), total_reward // PROPOSER_REWARD_QUOTIENT)
assert bls_verify( assert bls_verify(
pubkey=bls_aggregate_pubkeys(signer_keys), pubkey=bls_aggregate_pubkeys(signer_keys),
message_hash=get_block_root_at_slot(state, state.slot - 1), message_hash=get_block_root_at_slot(state, state.slot - 1),
signature=block_body.light_client_signature, signature=block_body.light_client_signature,
domain=DOMAIN_LIGHT_CLIENT, domain=DOMAIN_LIGHT_CLIENT
) )
``` ```

View File

@ -34,14 +34,12 @@
- [Helpers](#helpers) - [Helpers](#helpers)
- [`ceillog2`](#ceillog2) - [`ceillog2`](#ceillog2)
- [`is_valid_merkle_branch_with_mixin`](#is_valid_merkle_branch_with_mixin) - [`is_valid_merkle_branch_with_mixin`](#is_valid_merkle_branch_with_mixin)
- [`get_crosslink_chunk_count`](#get_crosslink_chunk_count)
- [`legendre_bit`](#legendre_bit) - [`legendre_bit`](#legendre_bit)
- [`custody_subchunkify`](#custody_subchunkify) - [`custody_subchunkify`](#custody_subchunkify)
- [`get_custody_chunk_bit`](#get_custody_chunk_bit) - [`get_custody_chunk_bit`](#get_custody_chunk_bit)
- [`get_chunk_bits_root`](#get_chunk_bits_root) - [`get_chunk_bits_root`](#get_chunk_bits_root)
- [`get_randao_epoch_for_custody_period`](#get_randao_epoch_for_custody_period) - [`get_randao_epoch_for_custody_period`](#get_randao_epoch_for_custody_period)
- [`get_custody_period_for_validator`](#get_custody_period_for_validator) - [`get_custody_period_for_validator`](#get_custody_period_for_validator)
- [`replace_empty_or_append`](#replace_empty_or_append)
- [Per-block processing](#per-block-processing) - [Per-block processing](#per-block-processing)
- [Operations](#operations) - [Operations](#operations)
- [Custody key reveals](#custody-key-reveals) - [Custody key reveals](#custody-key-reveals)
@ -168,45 +166,6 @@ class CustodyBitChallenge(Container):
signature: BLSSignature signature: BLSSignature
``` ```
#### `CustodyChunkChallengeRecord`
```python
class CustodyChunkChallengeRecord(Container):
challenge_index: uint64
challenger_index: ValidatorIndex
responder_index: ValidatorIndex
inclusion_epoch: Epoch
data_root: Root
depth: uint64
chunk_index: uint64
```
#### `CustodyBitChallengeRecord`
```python
class CustodyBitChallengeRecord(Container):
challenge_index: uint64
challenger_index: ValidatorIndex
responder_index: ValidatorIndex
inclusion_epoch: Epoch
data_root: Root
chunk_count: uint64
chunk_bits_merkle_root: Root
responder_key: BLSSignature
```
#### `CustodyResponse`
```python
class CustodyResponse(Container):
challenge_index: uint64
chunk_index: uint64
chunk: Vector[Bytes[PLACEHOLDER], BYTES_PER_CUSTODY_CHUNK]
data_branch: List[Bytes32, PLACEHOLDER]
chunk_bits_branch: List[Bytes32, PLACEHOLDER]
chunk_bits_leaf: Bytes32
```
### New Beacon Chain operations ### New Beacon Chain operations
#### `CustodyKeyReveal` #### `CustodyKeyReveal`
@ -266,13 +225,6 @@ def is_valid_merkle_branch_with_mixin(leaf: Bytes32,
return value == root return value == root
``` ```
### `get_crosslink_chunk_count`
```python
def get_custody_chunk_count(crosslink: Crosslink) -> int:
crosslink_length = min(MAX_EPOCHS_PER_CROSSLINK, crosslink.end_epoch - crosslink.start_epoch)
return crosslink_length * CHUNKS_PER_EPOCH
```
### `legendre_bit` ### `legendre_bit`
@ -354,17 +306,6 @@ def get_custody_period_for_validator(validator_index: ValidatorIndex, epoch: Epo
return (epoch + validator_index % EPOCHS_PER_CUSTODY_PERIOD) // EPOCHS_PER_CUSTODY_PERIOD return (epoch + validator_index % EPOCHS_PER_CUSTODY_PERIOD) // EPOCHS_PER_CUSTODY_PERIOD
``` ```
### `replace_empty_or_append`
```python
def replace_empty_or_append(list: MutableSequence[Any], new_element: Any) -> int:
for i in range(len(list)):
if is_zero(list[i]):
list[i] = new_element
return i
list.append(new_element)
return len(list) - 1
```
## Per-block processing ## Per-block processing
@ -526,7 +467,7 @@ def process_chunk_challenge(state: BeaconState, challenge: CustodyChunkChallenge
record.chunk_index != challenge.chunk_index record.chunk_index != challenge.chunk_index
) )
# Verify depth # Verify depth
depth = ceillog2(get_custody_chunk_count(challenge.attestation.data.crosslink)) depth = 123 # TODO
assert challenge.chunk_index < 2**depth assert challenge.chunk_index < 2**depth
# Add new chunk challenge record # Add new chunk challenge record
new_record = CustodyChunkChallengeRecord( new_record = CustodyChunkChallengeRecord(
@ -611,24 +552,13 @@ def process_bit_challenge(state: BeaconState, challenge: CustodyBitChallenge) ->
domain = get_domain(state, DOMAIN_RANDAO, epoch_to_sign) domain = get_domain(state, DOMAIN_RANDAO, epoch_to_sign)
assert bls_verify(responder.pubkey, hash_tree_root(epoch_to_sign), challenge.responder_key, domain) assert bls_verify(responder.pubkey, hash_tree_root(epoch_to_sign), challenge.responder_key, domain)
# Verify the chunk count # Verify the chunk count
chunk_count = get_custody_chunk_count(attestation.data.crosslink) chunk_count = 123 # TODO
assert chunk_count == len(challenge.chunk_bits) assert chunk_count == len(challenge.chunk_bits)
# Verify custody bit is incorrect # Verify custody bit is incorrect
committee = get_beacon_committee(state, epoch, shard) committee = get_beacon_committee(state, epoch, shard)
custody_bit = attestation.custody_bits[committee.index(challenge.responder_index)] custody_bit = attestation.custody_bits[committee.index(challenge.responder_index)]
assert custody_bit != get_chunk_bits_root(challenge.chunk_bits) assert custody_bit != get_chunk_bits_root(challenge.chunk_bits)
# Add new bit challenge record # TODO: immediate processing of challenge?
new_record = CustodyBitChallengeRecord(
challenge_index=state.custody_challenge_index,
challenger_index=challenge.challenger_index,
responder_index=challenge.responder_index,
inclusion_epoch=get_current_epoch(state),
data_root=attestation.data.crosslink.data_root,
chunk_count=chunk_count,
chunk_bits_merkle_root=hash_tree_root(challenge.chunk_bits),
responder_key=challenge.responder_key,
)
replace_empty_or_append(state.custody_bit_challenge_records, new_record)
state.custody_challenge_index += 1 state.custody_challenge_index += 1
# Postpone responder withdrawability # Postpone responder withdrawability
responder.withdrawable_epoch = FAR_FUTURE_EPOCH responder.withdrawable_epoch = FAR_FUTURE_EPOCH

View File

@ -99,7 +99,7 @@ def upgrade_to_phase1(pre: phase0.BeaconState) -> BeaconState:
next_light_committee=CompactCommittee(), next_light_committee=CompactCommittee(),
# Custody game # Custody game
custody_challenge_index=0, custody_challenge_index=0,
exposed_derived_secrets=Vector[List[ValidatorIndex, PLACEHOLDER], exposed_derived_secrets=Vector[List[ValidatorIndex, MAX_EARLY_DERIVED_SECRET_REVEALS * SLOTS_PER_EPOCH],
EARLY_DERIVED_SECRET_PENALTY_MAX_FUTURE_EPOCHS]() EARLY_DERIVED_SECRET_PENALTY_MAX_FUTURE_EPOCHS]()
) )
post.current_light_committee = get_light_client_committee(post, post.epoch) post.current_light_committee = get_light_client_committee(post, post.epoch)

View File

@ -0,0 +1,19 @@
from preset_loader import loader
from typing import Dict, Any
presets: Dict[str, Any] = {}
# Access to overwrite spec constants based on configuration
def apply_constants_preset(spec_globals: Dict[str, Any]) -> None:
global presets
for k, v in presets.items():
if k.startswith('DOMAIN_'):
spec_globals[k] = spec_globals['DomainType'](v) # domain types are defined as bytes in the configs
else:
spec_globals[k] = v
def load_presets(configs_path, config_name):
global presets
presets = loader.load_presets(configs_path, config_name)

View File

@ -1,5 +1,4 @@
from eth2spec.phase0 import spec as spec_phase0 from eth2spec.config import apply_config
from eth2spec.phase1 import spec as spec_phase1
# We import pytest only when it's present, i.e. when we are running tests. # We import pytest only when it's present, i.e. when we are running tests.
# The test-cases themselves can be generated without installing pytest. # The test-cases themselves can be generated without installing pytest.
@ -33,7 +32,4 @@ def pytest_addoption(parser):
@fixture(autouse=True) @fixture(autouse=True)
def config(request): def config(request):
config_name = request.config.getoption("--config") config_name = request.config.getoption("--config")
from preset_loader import loader apply_config.load_presets('../../configs/', config_name)
presets = loader.load_presets('../../configs/', config_name)
spec_phase0.apply_constants_preset(presets)
spec_phase1.apply_constants_preset(presets)

View File

@ -1,5 +1,5 @@
from eth2spec.phase0 import spec as spec_phase0 from eth2spec.phase0 import spec as spec_phase0
# from eth2spec.phase1 import spec as spec_phase1 from eth2spec.phase1 import spec as spec_phase1
from eth2spec.utils import bls from eth2spec.utils import bls
from .helpers.genesis import create_genesis_state from .helpers.genesis import create_genesis_state
@ -8,6 +8,11 @@ from .utils import vector_test, with_meta_tags
from typing import Any, Callable, Sequence from typing import Any, Callable, Sequence
from importlib import reload
reload(spec_phase0)
reload(spec_phase1)
def with_custom_state(balances_fn: Callable[[Any], Sequence[int]], def with_custom_state(balances_fn: Callable[[Any], Sequence[int]],
threshold_fn: Callable[[Any], int]): threshold_fn: Callable[[Any], int]):
@ -191,9 +196,7 @@ def with_phases(phases):
if 'phase0' in run_phases: if 'phase0' in run_phases:
ret = run_with_spec_version(spec_phase0, *args, **kw) ret = run_with_spec_version(spec_phase0, *args, **kw)
if 'phase1' in run_phases: if 'phase1' in run_phases:
# temporarily disable phase 1 tests ret = run_with_spec_version(spec_phase1, *args, **kw)
return
# ret = run_with_spec_version(spec_phase1, *args, **kw)
return ret return ret
return wrapper return wrapper
return decorator return decorator