Merge pull request #1166 from ethereum/mypy
Add mypy type hinting check
This commit is contained in:
commit
145e54b373
|
@ -9,7 +9,9 @@ build/
|
|||
output/
|
||||
|
||||
eth2.0-spec-tests/
|
||||
|
||||
.pytest_cache
|
||||
.mypy_cache
|
||||
|
||||
# Dynamically built from Markdown spec
|
||||
test_libs/pyspec/eth2spec/phase0/spec.py
|
||||
|
|
5
Makefile
5
Makefile
|
@ -59,7 +59,10 @@ open_cov:
|
|||
|
||||
lint: $(PY_SPEC_ALL_TARGETS)
|
||||
cd $(PY_SPEC_DIR); . venv/bin/activate; \
|
||||
flake8 --ignore=E252,W504,W503 --max-line-length=120 ./eth2spec;
|
||||
flake8 --ignore=E252,W504,W503 --max-line-length=120 ./eth2spec; \
|
||||
cd ./eth2spec; \
|
||||
mypy --follow-imports=silent --warn-unused-ignores --ignore-missing-imports --check-untyped-defs --disallow-incomplete-defs --disallow-untyped-defs -p phase0; \
|
||||
mypy --follow-imports=silent --warn-unused-ignores --ignore-missing-imports --check-untyped-defs --disallow-incomplete-defs --disallow-untyped-defs -p phase1
|
||||
|
||||
install_deposit_contract_test: $(PY_SPEC_ALL_TARGETS)
|
||||
cd $(DEPOSIT_CONTRACT_DIR); python3 -m venv venv; . venv/bin/activate; pip3 install -r requirements-testing.txt
|
||||
|
|
|
@ -13,9 +13,10 @@ from typing import (
|
|||
|
||||
PHASE0_IMPORTS = '''from typing import (
|
||||
Any,
|
||||
Callable,
|
||||
Dict,
|
||||
List,
|
||||
NewType,
|
||||
Set,
|
||||
Tuple,
|
||||
)
|
||||
|
||||
|
@ -25,7 +26,8 @@ from eth2spec.utils.ssz.ssz_impl import (
|
|||
)
|
||||
from eth2spec.utils.ssz.ssz_typing import (
|
||||
# unused: uint8, uint16, uint32, uint128, uint256,
|
||||
uint64, Container, Vector, BytesN
|
||||
uint64, Container, Vector,
|
||||
Bytes4, Bytes32, Bytes48, Bytes96,
|
||||
)
|
||||
from eth2spec.utils.bls import (
|
||||
bls_aggregate_pubkeys,
|
||||
|
@ -38,9 +40,11 @@ from eth2spec.utils.hash_function import hash
|
|||
'''
|
||||
PHASE1_IMPORTS = '''from typing import (
|
||||
Any,
|
||||
Callable,
|
||||
Dict,
|
||||
List,
|
||||
NewType,
|
||||
Optional,
|
||||
Set,
|
||||
Tuple,
|
||||
)
|
||||
|
||||
|
@ -52,7 +56,8 @@ from eth2spec.utils.ssz.ssz_impl import (
|
|||
)
|
||||
from eth2spec.utils.ssz.ssz_typing import (
|
||||
# unused: uint8, uint16, uint32, uint128, uint256,
|
||||
uint64, Container, Vector, BytesN
|
||||
uint64, Container, Vector,
|
||||
Bytes4, Bytes32, Bytes48, Bytes96,
|
||||
)
|
||||
from eth2spec.utils.bls import (
|
||||
bls_aggregate_pubkeys,
|
||||
|
@ -62,13 +67,6 @@ from eth2spec.utils.bls import (
|
|||
|
||||
from eth2spec.utils.hash_function import hash
|
||||
'''
|
||||
NEW_TYPES = {
|
||||
'Slot': 'int',
|
||||
'Epoch': 'int',
|
||||
'Shard': 'int',
|
||||
'ValidatorIndex': 'int',
|
||||
'Gwei': 'int',
|
||||
}
|
||||
BYTE_TYPES = [4, 32, 48, 96]
|
||||
SUNDRY_FUNCTIONS = '''
|
||||
def get_ssz_type_by_name(name: str) -> Container:
|
||||
|
@ -77,36 +75,33 @@ def get_ssz_type_by_name(name: str) -> Container:
|
|||
|
||||
# Monkey patch validator compute committee code
|
||||
_compute_committee = compute_committee
|
||||
committee_cache = {}
|
||||
committee_cache: Dict[Tuple[Hash, Hash, int, int], List[ValidatorIndex]] = {}
|
||||
|
||||
|
||||
def compute_committee(indices: List[ValidatorIndex], seed: Bytes32, index: int, count: int) -> List[ValidatorIndex]:
|
||||
def compute_committee(indices: List[ValidatorIndex], # type: ignore
|
||||
seed: Hash,
|
||||
index: int,
|
||||
count: int) -> List[ValidatorIndex]:
|
||||
param_hash = (hash_tree_root(indices), seed, index, count)
|
||||
|
||||
if param_hash in committee_cache:
|
||||
return committee_cache[param_hash]
|
||||
else:
|
||||
ret = _compute_committee(indices, seed, index, count)
|
||||
committee_cache[param_hash] = ret
|
||||
return ret
|
||||
if param_hash not in committee_cache:
|
||||
committee_cache[param_hash] = _compute_committee(indices, seed, index, count)
|
||||
return committee_cache[param_hash]
|
||||
|
||||
|
||||
# Monkey patch hash cache
|
||||
_hash = hash
|
||||
hash_cache = {}
|
||||
hash_cache: Dict[bytes, Hash] = {}
|
||||
|
||||
|
||||
def hash(x):
|
||||
if x in hash_cache:
|
||||
return hash_cache[x]
|
||||
else:
|
||||
ret = _hash(x)
|
||||
hash_cache[x] = ret
|
||||
return ret
|
||||
def hash(x: bytes) -> Hash:
|
||||
if x not in hash_cache:
|
||||
hash_cache[x] = Hash(_hash(x))
|
||||
return hash_cache[x]
|
||||
|
||||
|
||||
# Access to overwrite spec constants based on configuration
|
||||
def apply_constants_preset(preset: Dict[str, Any]):
|
||||
def apply_constants_preset(preset: Dict[str, Any]) -> None:
|
||||
global_vars = globals()
|
||||
for k, v in preset.items():
|
||||
global_vars[k] = v
|
||||
|
@ -120,32 +115,39 @@ def apply_constants_preset(preset: Dict[str, Any]):
|
|||
|
||||
|
||||
def objects_to_spec(functions: Dict[str, str],
|
||||
custom_types: Dict[str, str],
|
||||
constants: Dict[str, str],
|
||||
ssz_objects: Dict[str, str],
|
||||
inserts: Dict[str, str],
|
||||
imports: Dict[str, str],
|
||||
new_types: Dict[str, str],
|
||||
byte_types: List[int],
|
||||
) -> str:
|
||||
"""
|
||||
Given all the objects that constitute a spec, combine them into a single pyfile.
|
||||
"""
|
||||
new_type_definitions = '\n'.join(['Bytes%s = BytesN[%s]' % (n, n) for n in byte_types])
|
||||
new_type_definitions += '\n' + '\n'.join(['Hash = Bytes32', 'BLSPubkey = Bytes48', 'BLSSignature = Bytes96'])
|
||||
new_type_definitions += \
|
||||
'\n' + '\n'.join(['''%s = NewType('%s', %s)''' % (key, key, value) for key, value in new_types.items()])
|
||||
new_type_definitions = (
|
||||
'\n\n'.join(
|
||||
[
|
||||
f"class {key}({value}):\n"
|
||||
f" def __init__(self, _x: {value}) -> None:\n"
|
||||
f" ...\n"
|
||||
if value.startswith("uint")
|
||||
else f"class {key}({value}):\n pass\n"
|
||||
for key, value in custom_types.items()
|
||||
]
|
||||
)
|
||||
)
|
||||
functions_spec = '\n\n'.join(functions.values())
|
||||
constants_spec = '\n'.join(map(lambda x: '%s = %s' % (x, constants[x]), constants))
|
||||
ssz_objects_instantiation_spec = '\n\n'.join(ssz_objects.values())
|
||||
ssz_objects_reinitialization_spec = (
|
||||
'def init_SSZ_types():\n global_vars = globals()\n\n '
|
||||
'def init_SSZ_types() -> None:\n global_vars = globals()\n\n '
|
||||
+ '\n\n '.join([re.sub(r'(?!\n\n)\n', r'\n ', value[:-1]) for value in ssz_objects.values()])
|
||||
+ '\n\n'
|
||||
+ '\n'.join(map(lambda x: ' global_vars[\'%s\'] = %s' % (x, x), ssz_objects.keys()))
|
||||
)
|
||||
spec = (
|
||||
imports
|
||||
+ '\n' + new_type_definitions
|
||||
+ '\n\n' + new_type_definitions
|
||||
+ '\n\n' + constants_spec
|
||||
+ '\n\n\n' + ssz_objects_instantiation_spec
|
||||
+ '\n\n' + functions_spec
|
||||
|
@ -171,7 +173,7 @@ def combine_constants(old_constants: Dict[str, str], new_constants: Dict[str, st
|
|||
return old_constants
|
||||
|
||||
|
||||
def dependency_order_ssz_objects(objects: Dict[str, str]) -> None:
|
||||
def dependency_order_ssz_objects(objects: Dict[str, str], custom_types: Dict[str, str]) -> None:
|
||||
"""
|
||||
Determines which SSZ Object is depenedent on which other and orders them appropriately
|
||||
"""
|
||||
|
@ -180,14 +182,14 @@ def dependency_order_ssz_objects(objects: Dict[str, str]) -> None:
|
|||
dependencies = re.findall(r'(: [A-Z][\w[]*)', value)
|
||||
dependencies = map(lambda x: re.sub(r'\W|Vector|List|Container|Hash|BLSPubkey|BLSSignature|uint\d+|Bytes\d+|bytes', '', x), dependencies)
|
||||
for dep in dependencies:
|
||||
if dep in NEW_TYPES or len(dep) == 0:
|
||||
if dep in custom_types or len(dep) == 0:
|
||||
continue
|
||||
key_list = list(objects.keys())
|
||||
for item in [dep, key] + key_list[key_list.index(dep)+1:]:
|
||||
objects[item] = objects.pop(item)
|
||||
|
||||
|
||||
def combine_ssz_objects(old_objects: Dict[str, str], new_objects: Dict[str, str]) -> Dict[str, str]:
|
||||
def combine_ssz_objects(old_objects: Dict[str, str], new_objects: Dict[str, str], custom_types) -> Dict[str, str]:
|
||||
"""
|
||||
Takes in old spec and new spec ssz objects, combines them,
|
||||
and returns the newer versions of the objects in dependency order.
|
||||
|
@ -199,7 +201,7 @@ def combine_ssz_objects(old_objects: Dict[str, str], new_objects: Dict[str, str]
|
|||
# remove leading variable name
|
||||
value = re.sub(r'^class [\w]*\(Container\):\n', '', value)
|
||||
old_objects[key] = old_objects.get(key, '') + value
|
||||
dependency_order_ssz_objects(old_objects)
|
||||
dependency_order_ssz_objects(old_objects, custom_types)
|
||||
return old_objects
|
||||
|
||||
|
||||
|
@ -211,18 +213,19 @@ def combine_spec_objects(spec0: SpecObject, spec1: SpecObject) -> SpecObject:
|
|||
"""
|
||||
Takes in two spec variants (as tuples of their objects) and combines them using the appropriate combiner function.
|
||||
"""
|
||||
functions0, constants0, ssz_objects0, inserts0 = spec0
|
||||
functions1, constants1, ssz_objects1, inserts1 = spec1
|
||||
functions0, custom_types0, constants0, ssz_objects0, inserts0 = spec0
|
||||
functions1, custom_types1, constants1, ssz_objects1, inserts1 = spec1
|
||||
functions = combine_functions(functions0, functions1)
|
||||
custom_types = combine_constants(custom_types0, custom_types1)
|
||||
constants = combine_constants(constants0, constants1)
|
||||
ssz_objects = combine_ssz_objects(ssz_objects0, ssz_objects1)
|
||||
ssz_objects = combine_ssz_objects(ssz_objects0, ssz_objects1, custom_types)
|
||||
inserts = combine_inserts(inserts0, inserts1)
|
||||
return functions, constants, ssz_objects, inserts
|
||||
return functions, custom_types, constants, ssz_objects, inserts
|
||||
|
||||
|
||||
def build_phase0_spec(sourcefile: str, outfile: str=None) -> Optional[str]:
|
||||
functions, constants, ssz_objects, inserts = get_spec(sourcefile)
|
||||
spec = objects_to_spec(functions, constants, ssz_objects, inserts, PHASE0_IMPORTS, NEW_TYPES, BYTE_TYPES)
|
||||
functions, custom_types, constants, ssz_objects, inserts = get_spec(sourcefile)
|
||||
spec = objects_to_spec(functions, custom_types, constants, ssz_objects, inserts, PHASE0_IMPORTS)
|
||||
if outfile is not None:
|
||||
with open(outfile, 'w') as out:
|
||||
out.write(spec)
|
||||
|
@ -239,7 +242,7 @@ def build_phase1_spec(phase0_sourcefile: str,
|
|||
spec_objects = phase0_spec
|
||||
for value in [phase1_custody, phase1_shard_data]:
|
||||
spec_objects = combine_spec_objects(spec_objects, value)
|
||||
spec = objects_to_spec(*spec_objects, PHASE1_IMPORTS, NEW_TYPES, BYTE_TYPES)
|
||||
spec = objects_to_spec(*spec_objects, PHASE1_IMPORTS)
|
||||
if outfile is not None:
|
||||
with open(outfile, 'w') as out:
|
||||
out.write(spec)
|
||||
|
|
|
@ -29,6 +29,7 @@ def get_spec(file_name: str) -> SpecObject:
|
|||
inserts = {}
|
||||
function_matcher = re.compile(FUNCTION_REGEX)
|
||||
inserts_matcher = re.compile(BEGIN_INSERT_REGEX)
|
||||
custom_types = {}
|
||||
for linenum, line in enumerate(open(file_name).readlines()):
|
||||
line = line.rstrip()
|
||||
if pulling_from is None and len(line) > 0 and line[0] == '#' and line[-1] == '`':
|
||||
|
@ -64,7 +65,7 @@ def get_spec(file_name: str) -> SpecObject:
|
|||
ssz_objects[current_name] = ssz_objects.get(current_name, '') + line + '\n'
|
||||
else:
|
||||
functions[current_name] = functions.get(current_name, '') + line + '\n'
|
||||
# Handle constant table entries
|
||||
# Handle constant and custom types table entries
|
||||
elif pulling_from is None and len(line) > 0 and line[0] == '|':
|
||||
row = line[1:].split('|')
|
||||
if len(row) >= 2:
|
||||
|
@ -72,12 +73,15 @@ def get_spec(file_name: str) -> SpecObject:
|
|||
row[i] = row[i].strip().strip('`')
|
||||
if '`' in row[i]:
|
||||
row[i] = row[i][:row[i].find('`')]
|
||||
eligible = True
|
||||
if row[0][0] not in 'ABCDEFGHIJKLMNOPQRSTUVWXYZ_':
|
||||
eligible = False
|
||||
for c in row[0]:
|
||||
if c not in 'ABCDEFGHIJKLMNOPQRSTUVWXYZ_0123456789':
|
||||
if row[1].startswith('uint') or row[1].startswith('Bytes'):
|
||||
custom_types[row[0]] = row[1]
|
||||
else:
|
||||
eligible = True
|
||||
if row[0][0] not in 'ABCDEFGHIJKLMNOPQRSTUVWXYZ_':
|
||||
eligible = False
|
||||
if eligible:
|
||||
constants[row[0]] = row[1].replace('**TBD**', '0x1234567890123456789012345678901234567890')
|
||||
return functions, constants, ssz_objects, inserts
|
||||
for c in row[0]:
|
||||
if c not in 'ABCDEFGHIJKLMNOPQRSTUVWXYZ_0123456789':
|
||||
eligible = False
|
||||
if eligible:
|
||||
constants[row[0]] = row[1].replace('**TBD**', '0x1234567890123456789012345678901234567890')
|
||||
return functions, custom_types, constants, ssz_objects, inserts
|
||||
|
|
|
@ -10,10 +10,10 @@
|
|||
- [Introduction](#introduction)
|
||||
- [Notation](#notation)
|
||||
- [Terminology](#terminology)
|
||||
- [Custom types](#custom-types)
|
||||
- [Constants](#constants)
|
||||
- [Configuration](#configuration)
|
||||
- [Misc](#misc)
|
||||
- [Deposit contract](#deposit-contract)
|
||||
- [Gwei values](#gwei-values)
|
||||
- [Initial values](#initial-values)
|
||||
- [Time parameters](#time-parameters)
|
||||
|
@ -21,7 +21,7 @@
|
|||
- [Rewards and penalties](#rewards-and-penalties)
|
||||
- [Max operations per block](#max-operations-per-block)
|
||||
- [Signature domains](#signature-domains)
|
||||
- [Custom types](#custom-types)
|
||||
- [Custom types](#custom-types-1)
|
||||
- [Containers](#containers)
|
||||
- [Misc dependencies](#misc-dependencies)
|
||||
- [`Fork`](#fork)
|
||||
|
@ -151,14 +151,30 @@ Code snippets appearing in `this style` are to be interpreted as Python code.
|
|||
* **Withdrawal period**—the number of slots between a [validator](#dfn-validator) exit and the [validator](#dfn-validator) balance being withdrawable.
|
||||
* **Genesis time**—the Unix time of the genesis beacon chain block at slot 0.
|
||||
|
||||
## Custom types
|
||||
|
||||
We define the following Python custom types for type hinting and readability:
|
||||
|
||||
| Name | SSZ equivalent | Description |
|
||||
| - | - | - |
|
||||
| `Slot` | `uint64` | a slot number |
|
||||
| `Epoch` | `uint64` | an epoch number |
|
||||
| `Shard` | `uint64` | a shard number |
|
||||
| `ValidatorIndex` | `uint64` | a validator registry index |
|
||||
| `Gwei` | `uint64` | an amount in Gwei |
|
||||
| `Version` | `Bytes4` | a fork version number |
|
||||
| `Hash` | `Bytes32` | a hashed result |
|
||||
| `BLSPubkey` | `Bytes48` | a BLS12-381 public key |
|
||||
| `BLSSignature` | `Bytes96` | a BLS12-381 signature |
|
||||
|
||||
## Constants
|
||||
|
||||
The following values are (non-configurable) constants used throughout the specification.
|
||||
|
||||
| Name | Value |
|
||||
| - | - |
|
||||
| `FAR_FUTURE_EPOCH` | `2**64 - 1` |
|
||||
| `ZERO_HASH` | `b'\x00' * 32` |
|
||||
| `FAR_FUTURE_EPOCH` | `Epoch(2**64 - 1)` |
|
||||
| `ZERO_HASH` | `Hash(b'\x00' * 32)` |
|
||||
| `BASE_REWARDS_PER_EPOCH` | `5` |
|
||||
| `DEPOSIT_CONTRACT_TREE_DEPTH` | `2**5` (= 32) |
|
||||
|
||||
|
@ -183,19 +199,19 @@ These configurations are updated for releases, but may be out of sync during `de
|
|||
|
||||
### Gwei values
|
||||
|
||||
| Name | Value | Unit |
|
||||
| Name | Value |
|
||||
| - | - | :-: |
|
||||
| `MIN_DEPOSIT_AMOUNT` | `2**0 * 10**9` (= 1,000,000,000) | Gwei |
|
||||
| `MAX_EFFECTIVE_BALANCE` | `2**5 * 10**9` (= 32,000,000,000) | Gwei |
|
||||
| `EJECTION_BALANCE` | `2**4 * 10**9` (= 16,000,000,000) | Gwei |
|
||||
| `EFFECTIVE_BALANCE_INCREMENT` | `2**0 * 10**9` (= 1,000,000,000) | Gwei |
|
||||
| `MIN_DEPOSIT_AMOUNT` | `Gwei(2**0 * 10**9)` (= 1,000,000,000) |
|
||||
| `MAX_EFFECTIVE_BALANCE` | `Gwei(2**5 * 10**9)` (= 32,000,000,000) |
|
||||
| `EJECTION_BALANCE` | `Gwei(2**4 * 10**9)` (= 16,000,000,000) |
|
||||
| `EFFECTIVE_BALANCE_INCREMENT` | `Gwei(2**0 * 10**9)` (= 1,000,000,000) |
|
||||
|
||||
### Initial values
|
||||
|
||||
| Name | Value |
|
||||
| - | - |
|
||||
| `GENESIS_SLOT` | `0` |
|
||||
| `GENESIS_EPOCH` | `0` |
|
||||
| `GENESIS_SLOT` | `Slot(0)` |
|
||||
| `GENESIS_EPOCH` | `Epoch(0)` |
|
||||
| `BLS_WITHDRAWAL_PREFIX` | `0` |
|
||||
|
||||
### Time parameters
|
||||
|
@ -283,8 +299,8 @@ The following types are [SimpleSerialize (SSZ)](../simple-serialize.md) containe
|
|||
|
||||
```python
|
||||
class Fork(Container):
|
||||
previous_version: Bytes4
|
||||
current_version: Bytes4
|
||||
previous_version: Version
|
||||
current_version: Version
|
||||
epoch: Epoch # Epoch of latest fork
|
||||
```
|
||||
|
||||
|
@ -571,7 +587,7 @@ def slot_to_epoch(slot: Slot) -> Epoch:
|
|||
"""
|
||||
Return the epoch number of the given ``slot``.
|
||||
"""
|
||||
return slot // SLOTS_PER_EPOCH
|
||||
return Epoch(slot // SLOTS_PER_EPOCH)
|
||||
```
|
||||
|
||||
### `get_previous_epoch`
|
||||
|
@ -583,7 +599,7 @@ def get_previous_epoch(state: BeaconState) -> Epoch:
|
|||
Return the current epoch if it's genesis epoch.
|
||||
"""
|
||||
current_epoch = get_current_epoch(state)
|
||||
return GENESIS_EPOCH if current_epoch == GENESIS_EPOCH else current_epoch - 1
|
||||
return GENESIS_EPOCH if current_epoch == GENESIS_EPOCH else Epoch(current_epoch - 1)
|
||||
```
|
||||
|
||||
### `get_current_epoch`
|
||||
|
@ -603,7 +619,7 @@ def get_epoch_start_slot(epoch: Epoch) -> Slot:
|
|||
"""
|
||||
Return the starting slot of the given ``epoch``.
|
||||
"""
|
||||
return epoch * SLOTS_PER_EPOCH
|
||||
return Slot(epoch * SLOTS_PER_EPOCH)
|
||||
```
|
||||
|
||||
### `is_active_validator`
|
||||
|
@ -633,7 +649,7 @@ def get_active_validator_indices(state: BeaconState, epoch: Epoch) -> List[Valid
|
|||
"""
|
||||
Get active validator indices at ``epoch``.
|
||||
"""
|
||||
return [i for i, v in enumerate(state.validators) if is_active_validator(v, epoch)]
|
||||
return [ValidatorIndex(i) for i, v in enumerate(state.validators) if is_active_validator(v, epoch)]
|
||||
```
|
||||
|
||||
### `increase_balance`
|
||||
|
@ -688,11 +704,11 @@ def get_shard_delta(state: BeaconState, epoch: Epoch) -> int:
|
|||
```python
|
||||
def get_epoch_start_shard(state: BeaconState, epoch: Epoch) -> Shard:
|
||||
assert epoch <= get_current_epoch(state) + 1
|
||||
check_epoch = get_current_epoch(state) + 1
|
||||
shard = (state.start_shard + get_shard_delta(state, get_current_epoch(state))) % SHARD_COUNT
|
||||
check_epoch = Epoch(get_current_epoch(state) + 1)
|
||||
shard = Shard((state.start_shard + get_shard_delta(state, get_current_epoch(state))) % SHARD_COUNT)
|
||||
while check_epoch > epoch:
|
||||
check_epoch -= 1
|
||||
shard = (shard + SHARD_COUNT - get_shard_delta(state, check_epoch)) % SHARD_COUNT
|
||||
check_epoch -= Epoch(1)
|
||||
shard = Shard((shard + SHARD_COUNT - get_shard_delta(state, check_epoch)) % SHARD_COUNT)
|
||||
return shard
|
||||
```
|
||||
|
||||
|
@ -702,7 +718,7 @@ def get_epoch_start_shard(state: BeaconState, epoch: Epoch) -> Shard:
|
|||
def get_attestation_data_slot(state: BeaconState, data: AttestationData) -> Slot:
|
||||
committee_count = get_epoch_committee_count(state, data.target_epoch)
|
||||
offset = (data.crosslink.shard + SHARD_COUNT - get_epoch_start_shard(state, data.target_epoch)) % SHARD_COUNT
|
||||
return get_epoch_start_slot(data.target_epoch) + offset // (committee_count // SLOTS_PER_EPOCH)
|
||||
return Slot(get_epoch_start_slot(data.target_epoch) + offset // (committee_count // SLOTS_PER_EPOCH))
|
||||
```
|
||||
|
||||
### `get_block_root_at_slot`
|
||||
|
@ -762,7 +778,7 @@ def generate_seed(state: BeaconState,
|
|||
Generate a seed for the given ``epoch``.
|
||||
"""
|
||||
return hash(
|
||||
get_randao_mix(state, epoch + RANDAO_MIXES_LENGTH - MIN_SEED_LOOKAHEAD) +
|
||||
get_randao_mix(state, Epoch(epoch + RANDAO_MIXES_LENGTH - MIN_SEED_LOOKAHEAD)) +
|
||||
get_active_index_root(state, epoch) +
|
||||
int_to_bytes(epoch, length=32)
|
||||
)
|
||||
|
@ -778,7 +794,7 @@ def get_beacon_proposer_index(state: BeaconState) -> ValidatorIndex:
|
|||
epoch = get_current_epoch(state)
|
||||
committees_per_slot = get_epoch_committee_count(state, epoch) // SLOTS_PER_EPOCH
|
||||
offset = committees_per_slot * (state.slot % SLOTS_PER_EPOCH)
|
||||
shard = (get_epoch_start_shard(state, epoch) + offset) % SHARD_COUNT
|
||||
shard = Shard((get_epoch_start_shard(state, epoch) + offset) % SHARD_COUNT)
|
||||
first_committee = get_crosslink_committee(state, epoch, shard)
|
||||
MAX_RANDOM_BYTE = 2**8 - 1
|
||||
seed = generate_seed(state, epoch)
|
||||
|
@ -788,7 +804,7 @@ def get_beacon_proposer_index(state: BeaconState) -> ValidatorIndex:
|
|||
random_byte = hash(seed + int_to_bytes(i // 32, length=8))[i % 32]
|
||||
effective_balance = state.validators[candidate_index].effective_balance
|
||||
if effective_balance * MAX_RANDOM_BYTE >= MAX_EFFECTIVE_BALANCE * random_byte:
|
||||
return candidate_index
|
||||
return ValidatorIndex(candidate_index)
|
||||
i += 1
|
||||
```
|
||||
|
||||
|
@ -821,16 +837,19 @@ def get_shuffled_index(index: ValidatorIndex, index_count: int, seed: Hash) -> V
|
|||
|
||||
# Swap or not (https://link.springer.com/content/pdf/10.1007%2F978-3-642-32009-5_1.pdf)
|
||||
# See the 'generalized domain' algorithm on page 3
|
||||
for round in range(SHUFFLE_ROUND_COUNT):
|
||||
pivot = bytes_to_int(hash(seed + int_to_bytes(round, length=1))[0:8]) % index_count
|
||||
flip = (pivot + index_count - index) % index_count
|
||||
for current_round in range(SHUFFLE_ROUND_COUNT):
|
||||
pivot = bytes_to_int(hash(seed + int_to_bytes(current_round, length=1))[0:8]) % index_count
|
||||
flip = ValidatorIndex((pivot + index_count - index) % index_count)
|
||||
position = max(index, flip)
|
||||
source = hash(seed + int_to_bytes(round, length=1) + int_to_bytes(position // 256, length=4))
|
||||
source = hash(
|
||||
seed + int_to_bytes(current_round, length=1) +
|
||||
int_to_bytes(position // 256, length=4)
|
||||
)
|
||||
byte = source[(position % 256) // 8]
|
||||
bit = (byte >> (position % 8)) % 2
|
||||
index = flip if bit else index
|
||||
|
||||
return index
|
||||
return ValidatorIndex(index)
|
||||
```
|
||||
|
||||
### `compute_committee`
|
||||
|
@ -839,7 +858,7 @@ def get_shuffled_index(index: ValidatorIndex, index_count: int, seed: Hash) -> V
|
|||
def compute_committee(indices: List[ValidatorIndex], seed: Hash, index: int, count: int) -> List[ValidatorIndex]:
|
||||
start = (len(indices) * index) // count
|
||||
end = (len(indices) * (index + 1)) // count
|
||||
return [indices[get_shuffled_index(i, len(indices), seed)] for i in range(start, end)]
|
||||
return [indices[get_shuffled_index(ValidatorIndex(i), len(indices), seed)] for i in range(start, end)]
|
||||
```
|
||||
|
||||
### `get_crosslink_committee`
|
||||
|
@ -889,7 +908,7 @@ def get_total_balance(state: BeaconState, indices: List[ValidatorIndex]) -> Gwei
|
|||
"""
|
||||
Return the combined effective balance of the ``indices``. (1 Gwei minimum to avoid divisions by zero.)
|
||||
"""
|
||||
return max(sum([state.validators[index].effective_balance for index in indices]), 1)
|
||||
return Gwei(max(sum([state.validators[index].effective_balance for index in indices]), 1))
|
||||
```
|
||||
|
||||
### `get_domain`
|
||||
|
@ -897,7 +916,7 @@ def get_total_balance(state: BeaconState, indices: List[ValidatorIndex]) -> Gwei
|
|||
```python
|
||||
def get_domain(state: BeaconState,
|
||||
domain_type: int,
|
||||
message_epoch: int=None) -> int:
|
||||
message_epoch: Epoch=None) -> int:
|
||||
"""
|
||||
Return the signature domain (fork version concatenated with domain type) of a message.
|
||||
"""
|
||||
|
@ -1025,7 +1044,7 @@ def get_delayed_activation_exit_epoch(epoch: Epoch) -> Epoch:
|
|||
"""
|
||||
Return the epoch at which an activation or exit triggered in ``epoch`` takes effect.
|
||||
"""
|
||||
return epoch + 1 + ACTIVATION_EXIT_DELAY
|
||||
return Epoch(epoch + 1 + ACTIVATION_EXIT_DELAY)
|
||||
```
|
||||
|
||||
### `get_churn_limit`
|
||||
|
@ -1074,11 +1093,11 @@ def initiate_validator_exit(state: BeaconState, index: ValidatorIndex) -> None:
|
|||
exit_queue_epoch = max(exit_epochs + [get_delayed_activation_exit_epoch(get_current_epoch(state))])
|
||||
exit_queue_churn = len([v for v in state.validators if v.exit_epoch == exit_queue_epoch])
|
||||
if exit_queue_churn >= get_churn_limit(state):
|
||||
exit_queue_epoch += 1
|
||||
exit_queue_epoch += Epoch(1)
|
||||
|
||||
# Set validator exit epoch and withdrawable epoch
|
||||
validator.exit_epoch = exit_queue_epoch
|
||||
validator.withdrawable_epoch = validator.exit_epoch + MIN_VALIDATOR_WITHDRAWABILITY_DELAY
|
||||
validator.withdrawable_epoch = Epoch(validator.exit_epoch + MIN_VALIDATOR_WITHDRAWABILITY_DELAY)
|
||||
```
|
||||
|
||||
#### `slash_validator`
|
||||
|
@ -1093,15 +1112,15 @@ def slash_validator(state: BeaconState,
|
|||
current_epoch = get_current_epoch(state)
|
||||
initiate_validator_exit(state, slashed_index)
|
||||
state.validators[slashed_index].slashed = True
|
||||
state.validators[slashed_index].withdrawable_epoch = current_epoch + SLASHED_EXIT_LENGTH
|
||||
state.validators[slashed_index].withdrawable_epoch = Epoch(current_epoch + SLASHED_EXIT_LENGTH)
|
||||
slashed_balance = state.validators[slashed_index].effective_balance
|
||||
state.slashed_balances[current_epoch % SLASHED_EXIT_LENGTH] += slashed_balance
|
||||
|
||||
proposer_index = get_beacon_proposer_index(state)
|
||||
if whistleblower_index is None:
|
||||
whistleblower_index = proposer_index
|
||||
whistleblowing_reward = slashed_balance // WHISTLEBLOWING_REWARD_QUOTIENT
|
||||
proposer_reward = whistleblowing_reward // PROPOSER_REWARD_QUOTIENT
|
||||
whistleblowing_reward = Gwei(slashed_balance // WHISTLEBLOWING_REWARD_QUOTIENT)
|
||||
proposer_reward = Gwei(whistleblowing_reward // PROPOSER_REWARD_QUOTIENT)
|
||||
increase_balance(state, proposer_index, proposer_reward)
|
||||
increase_balance(state, whistleblower_index, whistleblowing_reward - proposer_reward)
|
||||
decrease_balance(state, slashed_index, whistleblowing_reward)
|
||||
|
@ -1204,7 +1223,7 @@ def process_slots(state: BeaconState, slot: Slot) -> None:
|
|||
# Process epoch on the first slot of the next epoch
|
||||
if (state.slot + 1) % SLOTS_PER_EPOCH == 0:
|
||||
process_epoch(state)
|
||||
state.slot += 1
|
||||
state.slot += Slot(1)
|
||||
```
|
||||
|
||||
```python
|
||||
|
@ -1271,7 +1290,7 @@ def get_matching_head_attestations(state: BeaconState, epoch: Epoch) -> List[Pen
|
|||
```python
|
||||
def get_unslashed_attesting_indices(state: BeaconState,
|
||||
attestations: List[PendingAttestation]) -> List[ValidatorIndex]:
|
||||
output = set()
|
||||
output = set() # type: Set[ValidatorIndex]
|
||||
for a in attestations:
|
||||
output = output.union(get_attesting_indices(state, a.data, a.aggregation_bitfield))
|
||||
return sorted(filter(lambda index: not state.validators[index].slashed, list(output)))
|
||||
|
@ -1357,7 +1376,7 @@ def process_crosslinks(state: BeaconState) -> None:
|
|||
state.previous_crosslinks = [c for c in state.current_crosslinks]
|
||||
for epoch in (get_previous_epoch(state), get_current_epoch(state)):
|
||||
for offset in range(get_epoch_committee_count(state, epoch)):
|
||||
shard = (get_epoch_start_shard(state, epoch) + offset) % SHARD_COUNT
|
||||
shard = Shard((get_epoch_start_shard(state, epoch) + offset) % SHARD_COUNT)
|
||||
crosslink_committee = get_crosslink_committee(state, epoch, shard)
|
||||
winning_crosslink, attesting_indices = get_winning_crosslink_and_attesting_indices(state, epoch, shard)
|
||||
if 3 * get_total_balance(state, attesting_indices) >= 2 * get_total_balance(state, crosslink_committee):
|
||||
|
@ -1370,17 +1389,17 @@ def process_crosslinks(state: BeaconState) -> None:
|
|||
def get_base_reward(state: BeaconState, index: ValidatorIndex) -> Gwei:
|
||||
total_balance = get_total_active_balance(state)
|
||||
effective_balance = state.validators[index].effective_balance
|
||||
return effective_balance * BASE_REWARD_FACTOR // integer_squareroot(total_balance) // BASE_REWARDS_PER_EPOCH
|
||||
return Gwei(effective_balance * BASE_REWARD_FACTOR // integer_squareroot(total_balance) // BASE_REWARDS_PER_EPOCH)
|
||||
```
|
||||
|
||||
```python
|
||||
def get_attestation_deltas(state: BeaconState) -> Tuple[List[Gwei], List[Gwei]]:
|
||||
previous_epoch = get_previous_epoch(state)
|
||||
total_balance = get_total_active_balance(state)
|
||||
rewards = [0 for _ in range(len(state.validators))]
|
||||
penalties = [0 for _ in range(len(state.validators))]
|
||||
rewards = [Gwei(0) for _ in range(len(state.validators))]
|
||||
penalties = [Gwei(0) for _ in range(len(state.validators))]
|
||||
eligible_validator_indices = [
|
||||
index for index, v in enumerate(state.validators)
|
||||
ValidatorIndex(index) for index, v in enumerate(state.validators)
|
||||
if is_active_validator(v, previous_epoch) or (v.slashed and previous_epoch + 1 < v.withdrawable_epoch)
|
||||
]
|
||||
|
||||
|
@ -1399,23 +1418,25 @@ def get_attestation_deltas(state: BeaconState) -> Tuple[List[Gwei], List[Gwei]]:
|
|||
|
||||
# Proposer and inclusion delay micro-rewards
|
||||
for index in get_unslashed_attesting_indices(state, matching_source_attestations):
|
||||
index = ValidatorIndex(index)
|
||||
attestation = min([
|
||||
a for a in matching_source_attestations
|
||||
if index in get_attesting_indices(state, a.data, a.aggregation_bitfield)
|
||||
], key=lambda a: a.inclusion_delay)
|
||||
proposer_reward = get_base_reward(state, index) // PROPOSER_REWARD_QUOTIENT
|
||||
proposer_reward = Gwei(get_base_reward(state, index) // PROPOSER_REWARD_QUOTIENT)
|
||||
rewards[attestation.proposer_index] += proposer_reward
|
||||
max_attester_reward = get_base_reward(state, index) - proposer_reward
|
||||
rewards[index] += max_attester_reward * MIN_ATTESTATION_INCLUSION_DELAY // attestation.inclusion_delay
|
||||
rewards[index] += Gwei(max_attester_reward * MIN_ATTESTATION_INCLUSION_DELAY // attestation.inclusion_delay)
|
||||
|
||||
# Inactivity penalty
|
||||
finality_delay = previous_epoch - state.finalized_epoch
|
||||
if finality_delay > MIN_EPOCHS_TO_INACTIVITY_PENALTY:
|
||||
matching_target_attesting_indices = get_unslashed_attesting_indices(state, matching_target_attestations)
|
||||
for index in eligible_validator_indices:
|
||||
penalties[index] += BASE_REWARDS_PER_EPOCH * get_base_reward(state, index)
|
||||
index = ValidatorIndex(index)
|
||||
penalties[index] += Gwei(BASE_REWARDS_PER_EPOCH * get_base_reward(state, index))
|
||||
if index not in matching_target_attesting_indices:
|
||||
penalties[index] += (
|
||||
penalties[index] += Gwei(
|
||||
state.validators[index].effective_balance * finality_delay // INACTIVITY_PENALTY_QUOTIENT
|
||||
)
|
||||
|
||||
|
@ -1424,11 +1445,11 @@ def get_attestation_deltas(state: BeaconState) -> Tuple[List[Gwei], List[Gwei]]:
|
|||
|
||||
```python
|
||||
def get_crosslink_deltas(state: BeaconState) -> Tuple[List[Gwei], List[Gwei]]:
|
||||
rewards = [0 for index in range(len(state.validators))]
|
||||
penalties = [0 for index in range(len(state.validators))]
|
||||
rewards = [Gwei(0) for index in range(len(state.validators))]
|
||||
penalties = [Gwei(0) for index in range(len(state.validators))]
|
||||
epoch = get_previous_epoch(state)
|
||||
for offset in range(get_epoch_committee_count(state, epoch)):
|
||||
shard = (get_epoch_start_shard(state, epoch) + offset) % SHARD_COUNT
|
||||
shard = Shard((get_epoch_start_shard(state, epoch) + offset) % SHARD_COUNT)
|
||||
crosslink_committee = get_crosslink_committee(state, epoch, shard)
|
||||
winning_crosslink, attesting_indices = get_winning_crosslink_and_attesting_indices(state, epoch, shard)
|
||||
attesting_balance = get_total_balance(state, attesting_indices)
|
||||
|
@ -1449,9 +1470,9 @@ def process_rewards_and_penalties(state: BeaconState) -> None:
|
|||
|
||||
rewards1, penalties1 = get_attestation_deltas(state)
|
||||
rewards2, penalties2 = get_crosslink_deltas(state)
|
||||
for i in range(len(state.validators)):
|
||||
increase_balance(state, i, rewards1[i] + rewards2[i])
|
||||
decrease_balance(state, i, penalties1[i] + penalties2[i])
|
||||
for index in range(len(state.validators)):
|
||||
increase_balance(state, ValidatorIndex(index), rewards1[index] + rewards2[index])
|
||||
decrease_balance(state, ValidatorIndex(index), penalties1[index] + penalties2[index])
|
||||
```
|
||||
|
||||
#### Registry updates
|
||||
|
@ -1467,7 +1488,7 @@ def process_registry_updates(state: BeaconState) -> None:
|
|||
validator.activation_eligibility_epoch = get_current_epoch(state)
|
||||
|
||||
if is_active_validator(validator, get_current_epoch(state)) and validator.effective_balance <= EJECTION_BALANCE:
|
||||
initiate_validator_exit(state, index)
|
||||
initiate_validator_exit(state, ValidatorIndex(index))
|
||||
|
||||
# Queue validators eligible for activation and not dequeued for activation prior to finalized epoch
|
||||
activation_queue = sorted([
|
||||
|
@ -1500,7 +1521,7 @@ def process_slashings(state: BeaconState) -> None:
|
|||
validator.effective_balance * min(total_penalties * 3, total_balance) // total_balance,
|
||||
validator.effective_balance // MIN_SLASHING_PENALTY_QUOTIENT
|
||||
)
|
||||
decrease_balance(state, index, penalty)
|
||||
decrease_balance(state, ValidatorIndex(index), penalty)
|
||||
```
|
||||
|
||||
#### Final updates
|
||||
|
@ -1519,11 +1540,11 @@ def process_final_updates(state: BeaconState) -> None:
|
|||
if balance < validator.effective_balance or validator.effective_balance + 3 * HALF_INCREMENT < balance:
|
||||
validator.effective_balance = min(balance - balance % EFFECTIVE_BALANCE_INCREMENT, MAX_EFFECTIVE_BALANCE)
|
||||
# Update start shard
|
||||
state.start_shard = (state.start_shard + get_shard_delta(state, current_epoch)) % SHARD_COUNT
|
||||
state.start_shard = Shard((state.start_shard + get_shard_delta(state, current_epoch)) % SHARD_COUNT)
|
||||
# Set active index root
|
||||
index_root_position = (next_epoch + ACTIVATION_EXIT_DELAY) % ACTIVE_INDEX_ROOTS_LENGTH
|
||||
state.active_index_roots[index_root_position] = hash_tree_root(
|
||||
get_active_validator_indices(state, next_epoch + ACTIVATION_EXIT_DELAY)
|
||||
get_active_validator_indices(state, Epoch(next_epoch + ACTIVATION_EXIT_DELAY))
|
||||
)
|
||||
# Set total slashed balances
|
||||
state.slashed_balances[next_epoch % SLASHED_EXIT_LENGTH] = (
|
||||
|
@ -1610,15 +1631,15 @@ def process_operations(state: BeaconState, body: BeaconBlockBody) -> None:
|
|||
assert len(body.deposits) == min(MAX_DEPOSITS, state.eth1_data.deposit_count - state.eth1_deposit_index)
|
||||
# Verify that there are no duplicate transfers
|
||||
assert len(body.transfers) == len(set(body.transfers))
|
||||
|
||||
for operations, max_operations, function in (
|
||||
all_operations = [
|
||||
(body.proposer_slashings, MAX_PROPOSER_SLASHINGS, process_proposer_slashing),
|
||||
(body.attester_slashings, MAX_ATTESTER_SLASHINGS, process_attester_slashing),
|
||||
(body.attestations, MAX_ATTESTATIONS, process_attestation),
|
||||
(body.deposits, MAX_DEPOSITS, process_deposit),
|
||||
(body.voluntary_exits, MAX_VOLUNTARY_EXITS, process_voluntary_exit),
|
||||
(body.transfers, MAX_TRANSFERS, process_transfer),
|
||||
):
|
||||
] # type: List[Tuple[List[Container], int, Callable]]
|
||||
for operations, max_operations, function in all_operations:
|
||||
assert len(operations) <= max_operations
|
||||
for operation in operations:
|
||||
function(state, operation)
|
||||
|
@ -1754,7 +1775,7 @@ def process_deposit(state: BeaconState, deposit: Deposit) -> None:
|
|||
state.balances.append(amount)
|
||||
else:
|
||||
# Increase balance by deposit amount
|
||||
index = validator_pubkeys.index(pubkey)
|
||||
index = ValidatorIndex(validator_pubkeys.index(pubkey))
|
||||
increase_balance(state, index, amount)
|
||||
```
|
||||
|
||||
|
|
|
@ -133,9 +133,9 @@ class CustodyBitChallenge(Container):
|
|||
responder_index: ValidatorIndex
|
||||
attestation: Attestation
|
||||
challenger_index: ValidatorIndex
|
||||
responder_key: Bytes96
|
||||
responder_key: BLSSignature
|
||||
chunk_bits: bytes
|
||||
signature: Bytes96
|
||||
signature: BLSSignature
|
||||
```
|
||||
|
||||
#### `CustodyChunkChallengeRecord`
|
||||
|
@ -162,7 +162,7 @@ class CustodyBitChallengeRecord(Container):
|
|||
data_root: Bytes32
|
||||
chunk_count: uint64
|
||||
chunk_bits_merkle_root: Bytes32
|
||||
responder_key: Bytes96
|
||||
responder_key: BLSSignature
|
||||
```
|
||||
|
||||
#### `CustodyResponse`
|
||||
|
@ -184,9 +184,9 @@ class CustodyResponse(Container):
|
|||
```python
|
||||
class CustodyKeyReveal(Container):
|
||||
# Index of the validator whose key is being revealed
|
||||
revealer_index: uint64
|
||||
revealer_index: ValidatorIndex
|
||||
# Reveal (masked signature)
|
||||
reveal: Bytes96
|
||||
reveal: BLSSignature
|
||||
```
|
||||
|
||||
#### `EarlyDerivedSecretReveal`
|
||||
|
@ -196,13 +196,13 @@ Represents an early (punishable) reveal of one of the derived secrets, where der
|
|||
```python
|
||||
class EarlyDerivedSecretReveal(Container):
|
||||
# Index of the validator whose key is being revealed
|
||||
revealed_index: uint64
|
||||
revealed_index: ValidatorIndex
|
||||
# RANDAO epoch of the key that is being revealed
|
||||
epoch: uint64
|
||||
epoch: Epoch
|
||||
# Reveal (masked signature)
|
||||
reveal: Bytes96
|
||||
reveal: BLSSignature
|
||||
# Index of the validator who revealed (whistleblower)
|
||||
masker_index: uint64
|
||||
masker_index: ValidatorIndex
|
||||
# Mask used to hide the actual reveal signature (prevent reveal from being stolen)
|
||||
mask: Bytes32
|
||||
```
|
||||
|
@ -232,7 +232,7 @@ class BeaconState(Container):
|
|||
|
||||
# Future derived secrets already exposed; contains the indices of the exposed validator
|
||||
# at RANDAO reveal period % EARLY_DERIVED_SECRET_PENALTY_MAX_FUTURE_EPOCHS
|
||||
exposed_derived_secrets: Vector[List[uint64], EARLY_DERIVED_SECRET_PENALTY_MAX_FUTURE_EPOCHS]
|
||||
exposed_derived_secrets: Vector[List[ValidatorIndex], EARLY_DERIVED_SECRET_PENALTY_MAX_FUTURE_EPOCHS]
|
||||
```
|
||||
|
||||
#### `BeaconBlockBody`
|
||||
|
@ -251,7 +251,7 @@ class BeaconBlockBody(Container):
|
|||
### `ceillog2`
|
||||
|
||||
```python
|
||||
def ceillog2(x):
|
||||
def ceillog2(x: int) -> int:
|
||||
return x.bit_length()
|
||||
```
|
||||
|
||||
|
@ -267,9 +267,9 @@ def get_custody_chunk_count(crosslink: Crosslink) -> int:
|
|||
### `get_custody_chunk_bit`
|
||||
|
||||
```python
|
||||
def get_custody_chunk_bit(key: Bytes96, chunk: bytes) -> bool:
|
||||
def get_custody_chunk_bit(key: BLSSignature, chunk: bytes) -> bool:
|
||||
# TODO: Replace with something MPC-friendly, e.g. the Legendre symbol
|
||||
return get_bitfield_bit(hash(key + chunk), 0)
|
||||
return bool(get_bitfield_bit(hash(key + chunk), 0))
|
||||
```
|
||||
|
||||
### `get_chunk_bits_root`
|
||||
|
@ -288,7 +288,7 @@ def get_chunk_bits_root(chunk_bitfield: bytes) -> Bytes32:
|
|||
```python
|
||||
def get_randao_epoch_for_custody_period(period: int, validator_index: ValidatorIndex) -> Epoch:
|
||||
next_period_start = (period + 1) * EPOCHS_PER_CUSTODY_PERIOD - validator_index % EPOCHS_PER_CUSTODY_PERIOD
|
||||
return next_period_start + CUSTODY_PERIOD_TO_RANDAO_PADDING
|
||||
return Epoch(next_period_start + CUSTODY_PERIOD_TO_RANDAO_PADDING)
|
||||
```
|
||||
|
||||
### `get_validators_custody_reveal_period`
|
||||
|
@ -372,7 +372,11 @@ def process_custody_key_reveal(state: BeaconState,
|
|||
|
||||
# Reward Block Preposer
|
||||
proposer_index = get_beacon_proposer_index(state)
|
||||
increase_balance(state, proposer_index, get_base_reward(state, reveal.revealer_index) // MINOR_REWARD_QUOTIENT)
|
||||
increase_balance(
|
||||
state,
|
||||
proposer_index,
|
||||
Gwei(get_base_reward(state, reveal.revealer_index) // MINOR_REWARD_QUOTIENT)
|
||||
)
|
||||
```
|
||||
|
||||
#### Early derived secret reveals
|
||||
|
@ -433,7 +437,7 @@ def process_early_derived_secret_reveal(state: BeaconState,
|
|||
// len(get_active_validator_indices(state, get_current_epoch(state)))
|
||||
// PROPOSER_REWARD_QUOTIENT
|
||||
)
|
||||
penalty = (
|
||||
penalty = Gwei(
|
||||
max_proposer_slot_reward
|
||||
* EARLY_DERIVED_SECRET_REVEAL_SLOT_REWARD_MULTIPLE
|
||||
* (len(state.exposed_derived_secrets[derived_secret_location]) + 1)
|
||||
|
@ -442,8 +446,8 @@ def process_early_derived_secret_reveal(state: BeaconState,
|
|||
# Apply penalty
|
||||
proposer_index = get_beacon_proposer_index(state)
|
||||
whistleblower_index = reveal.masker_index
|
||||
whistleblowing_reward = penalty // WHISTLEBLOWING_REWARD_QUOTIENT
|
||||
proposer_reward = whistleblowing_reward // PROPOSER_REWARD_QUOTIENT
|
||||
whistleblowing_reward = Gwei(penalty // WHISTLEBLOWING_REWARD_QUOTIENT)
|
||||
proposer_reward = Gwei(whistleblowing_reward // PROPOSER_REWARD_QUOTIENT)
|
||||
increase_balance(state, proposer_index, proposer_reward)
|
||||
increase_balance(state, whistleblower_index, whistleblowing_reward - proposer_reward)
|
||||
decrease_balance(state, reveal.revealed_index, penalty)
|
||||
|
@ -512,7 +516,7 @@ def process_bit_challenge(state: BeaconState,
|
|||
pubkey=challenger.pubkey,
|
||||
message_hash=signing_root(challenge),
|
||||
signature=challenge.signature,
|
||||
domain=get_domain(state, get_current_epoch(state), DOMAIN_CUSTODY_BIT_CHALLENGE),
|
||||
domain=get_domain(state, DOMAIN_CUSTODY_BIT_CHALLENGE, get_current_epoch(state)),
|
||||
)
|
||||
assert is_slashable_validator(challenger, get_current_epoch(state))
|
||||
|
||||
|
@ -535,8 +539,8 @@ def process_bit_challenge(state: BeaconState,
|
|||
# Verify the responder is a valid custody key
|
||||
epoch_to_sign = get_randao_epoch_for_custody_period(
|
||||
get_validators_custody_reveal_period(
|
||||
state=state,
|
||||
index=challenge.responder_index,
|
||||
state,
|
||||
challenge.responder_index,
|
||||
epoch=slot_to_epoch(attestation.data.slot)),
|
||||
challenge.responder_index
|
||||
)
|
||||
|
@ -610,7 +614,7 @@ def process_chunk_challenge_response(state: BeaconState,
|
|||
# Verify the chunk matches the crosslink data root
|
||||
assert verify_merkle_branch(
|
||||
leaf=hash_tree_root(response.chunk),
|
||||
branch=response.data_branch,
|
||||
proof=response.data_branch,
|
||||
depth=challenge.depth,
|
||||
index=response.chunk_index,
|
||||
root=challenge.data_root,
|
||||
|
@ -620,7 +624,7 @@ def process_chunk_challenge_response(state: BeaconState,
|
|||
records[records.index(challenge)] = CustodyChunkChallengeRecord()
|
||||
# Reward the proposer
|
||||
proposer_index = get_beacon_proposer_index(state)
|
||||
increase_balance(state, proposer_index, get_base_reward(state, proposer_index) // MINOR_REWARD_QUOTIENT)
|
||||
increase_balance(state, proposer_index, Gwei(get_base_reward(state, proposer_index) // MINOR_REWARD_QUOTIENT))
|
||||
```
|
||||
|
||||
```python
|
||||
|
@ -635,7 +639,7 @@ def process_bit_challenge_response(state: BeaconState,
|
|||
# Verify the chunk matches the crosslink data root
|
||||
assert verify_merkle_branch(
|
||||
leaf=hash_tree_root(response.chunk),
|
||||
branch=response.data_branch,
|
||||
proof=response.data_branch,
|
||||
depth=ceillog2(challenge.chunk_count),
|
||||
index=response.chunk_index,
|
||||
root=challenge.data_root,
|
||||
|
@ -643,7 +647,7 @@ def process_bit_challenge_response(state: BeaconState,
|
|||
# Verify the chunk bit leaf matches the challenge data
|
||||
assert verify_merkle_branch(
|
||||
leaf=response.chunk_bits_leaf,
|
||||
branch=response.chunk_bits_branch,
|
||||
proof=response.chunk_bits_branch,
|
||||
depth=ceillog2(challenge.chunk_count) >> 8,
|
||||
index=response.chunk_index // 256,
|
||||
root=challenge.chunk_bits_merkle_root
|
||||
|
@ -671,8 +675,8 @@ Run `process_reveal_deadlines(state)` immediately after `process_registry_update
|
|||
def process_reveal_deadlines(state: BeaconState) -> None:
|
||||
for index, validator in enumerate(state.validators):
|
||||
deadline = validator.next_custody_reveal_period + (CUSTODY_RESPONSE_DEADLINE // EPOCHS_PER_CUSTODY_PERIOD)
|
||||
if get_validators_custody_reveal_period(state, index) > deadline:
|
||||
slash_validator(state, index)
|
||||
if get_validators_custody_reveal_period(state, ValidatorIndex(index)) > deadline:
|
||||
slash_validator(state, ValidatorIndex(index))
|
||||
```
|
||||
|
||||
Run `process_challenge_deadlines(state)` immediately after `process_reveal_deadlines(state)`:
|
||||
|
@ -682,17 +686,17 @@ Run `process_challenge_deadlines(state)` immediately after `process_reveal_deadl
|
|||
process_challenge_deadlines(state)
|
||||
# end insert @process_challenge_deadlines
|
||||
def process_challenge_deadlines(state: BeaconState) -> None:
|
||||
for challenge in state.custody_chunk_challenge_records:
|
||||
if get_current_epoch(state) > challenge.inclusion_epoch + CUSTODY_RESPONSE_DEADLINE:
|
||||
slash_validator(state, challenge.responder_index, challenge.challenger_index)
|
||||
records = state.custody_chunk_challenge_records
|
||||
records[records.index(challenge)] = CustodyChunkChallengeRecord()
|
||||
for custody_chunk_challenge in state.custody_chunk_challenge_records:
|
||||
if get_current_epoch(state) > custody_chunk_challenge.inclusion_epoch + CUSTODY_RESPONSE_DEADLINE:
|
||||
slash_validator(state, custody_chunk_challenge.responder_index, custody_chunk_challenge.challenger_index)
|
||||
records = state.custody_chunk_challenge
|
||||
records[records.index(custody_chunk_challenge)] = CustodyChunkChallengeRecord()
|
||||
|
||||
for challenge in state.custody_bit_challenge_records:
|
||||
if get_current_epoch(state) > challenge.inclusion_epoch + CUSTODY_RESPONSE_DEADLINE:
|
||||
slash_validator(state, challenge.responder_index, challenge.challenger_index)
|
||||
for custody_bit_challenge in state.custody_bit_challenge_records:
|
||||
if get_current_epoch(state) > custody_bit_challenge.inclusion_epoch + CUSTODY_RESPONSE_DEADLINE:
|
||||
slash_validator(state, custody_bit_challenge.responder_index, custody_bit_challenge.challenger_index)
|
||||
records = state.custody_bit_challenge_records
|
||||
records[records.index(challenge)] = CustodyBitChallengeRecord()
|
||||
records[records.index(custody_bit_challenge)] = CustodyBitChallengeRecord()
|
||||
```
|
||||
|
||||
Append this to `process_final_updates(state)`:
|
||||
|
@ -713,5 +717,5 @@ def after_process_final_updates(state: BeaconState) -> None:
|
|||
for index, validator in enumerate(state.validators):
|
||||
if index not in validator_indices_in_records:
|
||||
if validator.exit_epoch != FAR_FUTURE_EPOCH and validator.withdrawable_epoch == FAR_FUTURE_EPOCH:
|
||||
validator.withdrawable_epoch = validator.exit_epoch + MIN_VALIDATOR_WITHDRAWABILITY_DELAY
|
||||
validator.withdrawable_epoch = Epoch(validator.exit_epoch + MIN_VALIDATOR_WITHDRAWABILITY_DELAY)
|
||||
```
|
||||
|
|
|
@ -84,39 +84,39 @@ class ShardBlockBody(Container):
|
|||
```python
|
||||
class ShardAttestation(Container):
|
||||
class data(Container):
|
||||
slot: uint64
|
||||
shard: uint64
|
||||
slot: Slot
|
||||
shard: Shard
|
||||
shard_block_root: Bytes32
|
||||
aggregation_bitfield: bytes
|
||||
aggregate_signature: Bytes96
|
||||
aggregate_signature: BLSSignature
|
||||
```
|
||||
|
||||
### `ShardBlock`
|
||||
|
||||
```python
|
||||
class ShardBlock(Container):
|
||||
slot: uint64
|
||||
shard: uint64
|
||||
slot: Slot
|
||||
shard: Shard
|
||||
beacon_chain_root: Bytes32
|
||||
parent_root: Bytes32
|
||||
data: ShardBlockBody
|
||||
state_root: Bytes32
|
||||
attestations: List[ShardAttestation]
|
||||
signature: Bytes96
|
||||
signature: BLSSignature
|
||||
```
|
||||
|
||||
### `ShardBlockHeader`
|
||||
|
||||
```python
|
||||
class ShardBlockHeader(Container):
|
||||
slot: uint64
|
||||
shard: uint64
|
||||
slot: Slot
|
||||
shard: Shard
|
||||
beacon_chain_root: Bytes32
|
||||
parent_root: Bytes32
|
||||
body_root: Bytes32
|
||||
state_root: Bytes32
|
||||
attestations: List[ShardAttestation]
|
||||
signature: Bytes96
|
||||
signature: BLSSignature
|
||||
```
|
||||
|
||||
## Helper functions
|
||||
|
@ -143,8 +143,8 @@ def get_period_committee(state: BeaconState,
|
|||
### `get_switchover_epoch`
|
||||
|
||||
```python
|
||||
def get_switchover_epoch(state: BeaconState, epoch: Epoch, index: ValidatorIndex):
|
||||
earlier_start_epoch = epoch - (epoch % PERSISTENT_COMMITTEE_PERIOD) - PERSISTENT_COMMITTEE_PERIOD * 2
|
||||
def get_switchover_epoch(state: BeaconState, epoch: Epoch, index: ValidatorIndex) -> int:
|
||||
earlier_start_epoch = Epoch(epoch - (epoch % PERSISTENT_COMMITTEE_PERIOD) - PERSISTENT_COMMITTEE_PERIOD * 2)
|
||||
return (bytes_to_int(hash(generate_seed(state, earlier_start_epoch) + int_to_bytes(index, length=3)[0:8]))
|
||||
% PERSISTENT_COMMITTEE_PERIOD)
|
||||
```
|
||||
|
@ -159,19 +159,19 @@ def get_persistent_committee(state: BeaconState,
|
|||
Return the persistent committee for the given ``shard`` at the given ``slot``.
|
||||
"""
|
||||
epoch = slot_to_epoch(slot)
|
||||
earlier_start_epoch = epoch - (epoch % PERSISTENT_COMMITTEE_PERIOD) - PERSISTENT_COMMITTEE_PERIOD * 2
|
||||
later_start_epoch = epoch - (epoch % PERSISTENT_COMMITTEE_PERIOD) - PERSISTENT_COMMITTEE_PERIOD
|
||||
earlier_start_epoch = Epoch(epoch - (epoch % PERSISTENT_COMMITTEE_PERIOD) - PERSISTENT_COMMITTEE_PERIOD * 2)
|
||||
later_start_epoch = Epoch(epoch - (epoch % PERSISTENT_COMMITTEE_PERIOD) - PERSISTENT_COMMITTEE_PERIOD)
|
||||
|
||||
committee_count = max(
|
||||
len(get_active_validator_indices(state.validators, earlier_start_epoch)) //
|
||||
len(get_active_validator_indices(state, earlier_start_epoch)) //
|
||||
(SHARD_COUNT * TARGET_COMMITTEE_SIZE),
|
||||
len(get_active_validator_indices(state.validators, later_start_epoch)) //
|
||||
len(get_active_validator_indices(state, later_start_epoch)) //
|
||||
(SHARD_COUNT * TARGET_COMMITTEE_SIZE),
|
||||
) + 1
|
||||
|
||||
index = slot % committee_count
|
||||
earlier_committee = get_period_committee(state, shard, earlier_start_epoch, index, committee_count)
|
||||
later_committee = get_period_committee(state, shard, later_start_epoch, index, committee_count)
|
||||
earlier_committee = get_period_committee(state, earlier_start_epoch, shard, index, committee_count)
|
||||
later_committee = get_period_committee(state, later_start_epoch, shard, index, committee_count)
|
||||
|
||||
# Take not-yet-cycled-out validators from earlier committee and already-cycled-in validators from
|
||||
# later committee; return a sorted list of the union of the two, deduplicated
|
||||
|
@ -186,7 +186,7 @@ def get_persistent_committee(state: BeaconState,
|
|||
```python
|
||||
def get_shard_proposer_index(state: BeaconState,
|
||||
shard: Shard,
|
||||
slot: Slot) -> ValidatorIndex:
|
||||
slot: Slot) -> Optional[ValidatorIndex]:
|
||||
# Randomly shift persistent committee
|
||||
persistent_committee = get_persistent_committee(state, shard, slot)
|
||||
seed = hash(state.current_shuffling_seed + int_to_bytes(shard, length=8) + int_to_bytes(slot, length=8))
|
||||
|
@ -236,7 +236,7 @@ def verify_shard_attestation_signature(state: BeaconState,
|
|||
pubkey=bls_aggregate_pubkeys(pubkeys),
|
||||
message_hash=data.shard_block_root,
|
||||
signature=attestation.aggregate_signature,
|
||||
domain=get_domain(state, slot_to_epoch(data.slot), DOMAIN_SHARD_ATTESTER)
|
||||
domain=get_domain(state, DOMAIN_SHARD_ATTESTER, slot_to_epoch(data.slot))
|
||||
)
|
||||
```
|
||||
|
||||
|
@ -333,7 +333,7 @@ def is_valid_shard_block(beacon_blocks: List[BeaconBlock],
|
|||
pubkey=beacon_state.validators[proposer_index].pubkey,
|
||||
message_hash=signing_root(block),
|
||||
signature=candidate.signature,
|
||||
domain=get_domain(beacon_state, slot_to_epoch(candidate.slot), DOMAIN_SHARD_PROPOSER),
|
||||
domain=get_domain(beacon_state, DOMAIN_SHARD_PROPOSER, slot_to_epoch(candidate.slot)),
|
||||
)
|
||||
|
||||
return True
|
||||
|
|
|
@ -167,7 +167,7 @@ If a client wants to update its `finalized_header` it asks the network for a `Bl
|
|||
```python
|
||||
{
|
||||
'header': BeaconBlockHeader,
|
||||
'shard_aggregate_signature': 'bytes96',
|
||||
'shard_aggregate_signature': BLSSignature,
|
||||
'shard_bitfield': 'bytes',
|
||||
'shard_parent_block': ShardBlock,
|
||||
}
|
||||
|
|
|
@ -47,8 +47,13 @@ class uint32(uint):
|
|||
return super().__new__(cls, value)
|
||||
|
||||
|
||||
# We simply default to uint64. But do give it a name, for readability
|
||||
uint64 = NewType('uint64', int)
|
||||
class uint64(uint):
|
||||
byte_len = 8
|
||||
|
||||
def __new__(cls, value, *args, **kwargs):
|
||||
if value.bit_length() > 64:
|
||||
raise ValueError("value out of bounds for uint64")
|
||||
return super().__new__(cls, value)
|
||||
|
||||
|
||||
class uint128(uint):
|
||||
|
@ -250,7 +255,7 @@ class Vector(metaclass=VectorMeta):
|
|||
# cannot check non-type objects, or parametrized types
|
||||
if isinstance(cls.elem_type, type) and not hasattr(cls.elem_type, '__args__'):
|
||||
for i, item in enumerate(self.items):
|
||||
if not issubclass(type(item), cls.elem_type):
|
||||
if not issubclass(cls.elem_type, type(item)):
|
||||
raise TypeError("Typed vector cannot hold differently typed value"
|
||||
" at index %d. Got type: %s, expected type: %s" % (i, type(item), cls.elem_type))
|
||||
|
||||
|
@ -393,11 +398,27 @@ class BytesN(bytes, metaclass=BytesNMeta):
|
|||
return hash_tree_root(self, self.__class__)
|
||||
|
||||
|
||||
class Bytes4(BytesN):
|
||||
length = 4
|
||||
|
||||
|
||||
class Bytes32(BytesN):
|
||||
length = 32
|
||||
|
||||
|
||||
class Bytes48(BytesN):
|
||||
length = 48
|
||||
|
||||
|
||||
class Bytes96(BytesN):
|
||||
length = 96
|
||||
|
||||
|
||||
# SSZ Defaults
|
||||
# -----------------------------
|
||||
def get_zero_value(typ):
|
||||
if is_uint_type(typ):
|
||||
return 0
|
||||
return uint64(0)
|
||||
elif is_list_type(typ):
|
||||
return []
|
||||
elif is_bool_type(typ):
|
||||
|
|
|
@ -2,4 +2,5 @@
|
|||
pytest>=3.6,<3.7
|
||||
../config_helpers
|
||||
flake8==3.7.7
|
||||
mypy==0.701
|
||||
pytest-cov
|
||||
|
|
Loading…
Reference in New Issue