mirror of
https://github.com/status-im/eth2.0-specs.git
synced 2025-02-22 07:18:10 +00:00
Merge dev and resolve conflicts (please let the CI tests pass with the new dev)
This commit is contained in:
commit
3418c3506f
2
.gitignore
vendored
2
.gitignore
vendored
@ -9,7 +9,9 @@ build/
|
|||||||
output/
|
output/
|
||||||
|
|
||||||
eth2.0-spec-tests/
|
eth2.0-spec-tests/
|
||||||
|
|
||||||
.pytest_cache
|
.pytest_cache
|
||||||
|
.mypy_cache
|
||||||
|
|
||||||
# Dynamically built from Markdown spec
|
# Dynamically built from Markdown spec
|
||||||
test_libs/pyspec/eth2spec/phase0/spec.py
|
test_libs/pyspec/eth2spec/phase0/spec.py
|
||||||
|
5
Makefile
5
Makefile
@ -59,7 +59,10 @@ open_cov:
|
|||||||
|
|
||||||
lint: $(PY_SPEC_ALL_TARGETS)
|
lint: $(PY_SPEC_ALL_TARGETS)
|
||||||
cd $(PY_SPEC_DIR); . venv/bin/activate; \
|
cd $(PY_SPEC_DIR); . venv/bin/activate; \
|
||||||
flake8 --ignore=E252,W504,W503 --max-line-length=120 ./eth2spec;
|
flake8 --ignore=E252,W504,W503 --max-line-length=120 ./eth2spec; \
|
||||||
|
cd ./eth2spec; \
|
||||||
|
mypy --follow-imports=silent --warn-unused-ignores --ignore-missing-imports --check-untyped-defs --disallow-incomplete-defs --disallow-untyped-defs -p phase0; \
|
||||||
|
mypy --follow-imports=silent --warn-unused-ignores --ignore-missing-imports --check-untyped-defs --disallow-incomplete-defs --disallow-untyped-defs -p phase1
|
||||||
|
|
||||||
install_deposit_contract_test: $(PY_SPEC_ALL_TARGETS)
|
install_deposit_contract_test: $(PY_SPEC_ALL_TARGETS)
|
||||||
cd $(DEPOSIT_CONTRACT_DIR); python3 -m venv venv; . venv/bin/activate; pip3 install -r requirements-testing.txt
|
cd $(DEPOSIT_CONTRACT_DIR); python3 -m venv venv; . venv/bin/activate; pip3 install -r requirements-testing.txt
|
||||||
|
@ -13,9 +13,10 @@ from typing import (
|
|||||||
|
|
||||||
PHASE0_IMPORTS = '''from typing import (
|
PHASE0_IMPORTS = '''from typing import (
|
||||||
Any,
|
Any,
|
||||||
|
Callable,
|
||||||
Dict,
|
Dict,
|
||||||
List,
|
List,
|
||||||
NewType,
|
Set,
|
||||||
Tuple,
|
Tuple,
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -32,7 +33,8 @@ from eth2spec.utils.ssz.ssz_impl import (
|
|||||||
)
|
)
|
||||||
from eth2spec.utils.ssz.ssz_typing import (
|
from eth2spec.utils.ssz.ssz_typing import (
|
||||||
# unused: uint8, uint16, uint32, uint128, uint256,
|
# unused: uint8, uint16, uint32, uint128, uint256,
|
||||||
uint64, Container, Vector, BytesN
|
uint64, Container, Vector,
|
||||||
|
Bytes4, Bytes32, Bytes48, Bytes96,
|
||||||
)
|
)
|
||||||
from eth2spec.utils.bls import (
|
from eth2spec.utils.bls import (
|
||||||
bls_aggregate_pubkeys,
|
bls_aggregate_pubkeys,
|
||||||
@ -45,9 +47,11 @@ from eth2spec.utils.hash_function import hash
|
|||||||
'''
|
'''
|
||||||
PHASE1_IMPORTS = '''from typing import (
|
PHASE1_IMPORTS = '''from typing import (
|
||||||
Any,
|
Any,
|
||||||
|
Callable,
|
||||||
Dict,
|
Dict,
|
||||||
List,
|
List,
|
||||||
NewType,
|
Optional,
|
||||||
|
Set,
|
||||||
Tuple,
|
Tuple,
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -66,7 +70,8 @@ from eth2spec.utils.ssz.ssz_impl import (
|
|||||||
)
|
)
|
||||||
from eth2spec.utils.ssz.ssz_typing import (
|
from eth2spec.utils.ssz.ssz_typing import (
|
||||||
# unused: uint8, uint16, uint32, uint128, uint256,
|
# unused: uint8, uint16, uint32, uint128, uint256,
|
||||||
uint64, Container, Vector, BytesN
|
uint64, Container, Vector,
|
||||||
|
Bytes4, Bytes32, Bytes48, Bytes96,
|
||||||
)
|
)
|
||||||
from eth2spec.utils.bls import (
|
from eth2spec.utils.bls import (
|
||||||
bls_aggregate_pubkeys,
|
bls_aggregate_pubkeys,
|
||||||
@ -76,13 +81,6 @@ from eth2spec.utils.bls import (
|
|||||||
|
|
||||||
from eth2spec.utils.hash_function import hash
|
from eth2spec.utils.hash_function import hash
|
||||||
'''
|
'''
|
||||||
NEW_TYPES = {
|
|
||||||
'Slot': 'int',
|
|
||||||
'Epoch': 'int',
|
|
||||||
'Shard': 'int',
|
|
||||||
'ValidatorIndex': 'int',
|
|
||||||
'Gwei': 'int',
|
|
||||||
}
|
|
||||||
BYTE_TYPES = [4, 32, 48, 96]
|
BYTE_TYPES = [4, 32, 48, 96]
|
||||||
SUNDRY_FUNCTIONS = '''
|
SUNDRY_FUNCTIONS = '''
|
||||||
def get_ssz_type_by_name(name: str) -> Container:
|
def get_ssz_type_by_name(name: str) -> Container:
|
||||||
@ -91,36 +89,33 @@ def get_ssz_type_by_name(name: str) -> Container:
|
|||||||
|
|
||||||
# Monkey patch validator compute committee code
|
# Monkey patch validator compute committee code
|
||||||
_compute_committee = compute_committee
|
_compute_committee = compute_committee
|
||||||
committee_cache = {}
|
committee_cache: Dict[Tuple[Hash, Hash, int, int], List[ValidatorIndex]] = {}
|
||||||
|
|
||||||
|
|
||||||
def compute_committee(indices: List[ValidatorIndex], seed: Bytes32, index: int, count: int) -> List[ValidatorIndex]:
|
def compute_committee(indices: List[ValidatorIndex], # type: ignore
|
||||||
|
seed: Hash,
|
||||||
|
index: int,
|
||||||
|
count: int) -> List[ValidatorIndex]:
|
||||||
param_hash = (hash_tree_root(indices), seed, index, count)
|
param_hash = (hash_tree_root(indices), seed, index, count)
|
||||||
|
|
||||||
if param_hash in committee_cache:
|
if param_hash not in committee_cache:
|
||||||
return committee_cache[param_hash]
|
committee_cache[param_hash] = _compute_committee(indices, seed, index, count)
|
||||||
else:
|
return committee_cache[param_hash]
|
||||||
ret = _compute_committee(indices, seed, index, count)
|
|
||||||
committee_cache[param_hash] = ret
|
|
||||||
return ret
|
|
||||||
|
|
||||||
|
|
||||||
# Monkey patch hash cache
|
# Monkey patch hash cache
|
||||||
_hash = hash
|
_hash = hash
|
||||||
hash_cache = {}
|
hash_cache: Dict[bytes, Hash] = {}
|
||||||
|
|
||||||
|
|
||||||
def hash(x):
|
def hash(x: bytes) -> Hash:
|
||||||
if x in hash_cache:
|
if x not in hash_cache:
|
||||||
return hash_cache[x]
|
hash_cache[x] = Hash(_hash(x))
|
||||||
else:
|
return hash_cache[x]
|
||||||
ret = _hash(x)
|
|
||||||
hash_cache[x] = ret
|
|
||||||
return ret
|
|
||||||
|
|
||||||
|
|
||||||
# Access to overwrite spec constants based on configuration
|
# Access to overwrite spec constants based on configuration
|
||||||
def apply_constants_preset(preset: Dict[str, Any]):
|
def apply_constants_preset(preset: Dict[str, Any]) -> None:
|
||||||
global_vars = globals()
|
global_vars = globals()
|
||||||
for k, v in preset.items():
|
for k, v in preset.items():
|
||||||
global_vars[k] = v
|
global_vars[k] = v
|
||||||
@ -134,32 +129,39 @@ def apply_constants_preset(preset: Dict[str, Any]):
|
|||||||
|
|
||||||
|
|
||||||
def objects_to_spec(functions: Dict[str, str],
|
def objects_to_spec(functions: Dict[str, str],
|
||||||
|
custom_types: Dict[str, str],
|
||||||
constants: Dict[str, str],
|
constants: Dict[str, str],
|
||||||
ssz_objects: Dict[str, str],
|
ssz_objects: Dict[str, str],
|
||||||
inserts: Dict[str, str],
|
inserts: Dict[str, str],
|
||||||
imports: Dict[str, str],
|
imports: Dict[str, str],
|
||||||
new_types: Dict[str, str],
|
|
||||||
byte_types: List[int],
|
|
||||||
) -> str:
|
) -> str:
|
||||||
"""
|
"""
|
||||||
Given all the objects that constitute a spec, combine them into a single pyfile.
|
Given all the objects that constitute a spec, combine them into a single pyfile.
|
||||||
"""
|
"""
|
||||||
new_type_definitions = '\n'.join(['Bytes%s = BytesN[%s]' % (n, n) for n in byte_types])
|
new_type_definitions = (
|
||||||
new_type_definitions += '\n' + '\n'.join(['Hash = Bytes32', 'BLSPubkey = Bytes48', 'BLSSignature = Bytes96'])
|
'\n\n'.join(
|
||||||
new_type_definitions += \
|
[
|
||||||
'\n' + '\n'.join(['''%s = NewType('%s', %s)''' % (key, key, value) for key, value in new_types.items()])
|
f"class {key}({value}):\n"
|
||||||
|
f" def __init__(self, _x: {value}) -> None:\n"
|
||||||
|
f" ...\n"
|
||||||
|
if value.startswith("uint")
|
||||||
|
else f"class {key}({value}):\n pass\n"
|
||||||
|
for key, value in custom_types.items()
|
||||||
|
]
|
||||||
|
)
|
||||||
|
)
|
||||||
functions_spec = '\n\n'.join(functions.values())
|
functions_spec = '\n\n'.join(functions.values())
|
||||||
constants_spec = '\n'.join(map(lambda x: '%s = %s' % (x, constants[x]), constants))
|
constants_spec = '\n'.join(map(lambda x: '%s = %s' % (x, constants[x]), constants))
|
||||||
ssz_objects_instantiation_spec = '\n\n'.join(ssz_objects.values())
|
ssz_objects_instantiation_spec = '\n\n'.join(ssz_objects.values())
|
||||||
ssz_objects_reinitialization_spec = (
|
ssz_objects_reinitialization_spec = (
|
||||||
'def init_SSZ_types():\n global_vars = globals()\n\n '
|
'def init_SSZ_types() -> None:\n global_vars = globals()\n\n '
|
||||||
+ '\n\n '.join([re.sub(r'(?!\n\n)\n', r'\n ', value[:-1]) for value in ssz_objects.values()])
|
+ '\n\n '.join([re.sub(r'(?!\n\n)\n', r'\n ', value[:-1]) for value in ssz_objects.values()])
|
||||||
+ '\n\n'
|
+ '\n\n'
|
||||||
+ '\n'.join(map(lambda x: ' global_vars[\'%s\'] = %s' % (x, x), ssz_objects.keys()))
|
+ '\n'.join(map(lambda x: ' global_vars[\'%s\'] = %s' % (x, x), ssz_objects.keys()))
|
||||||
)
|
)
|
||||||
spec = (
|
spec = (
|
||||||
imports
|
imports
|
||||||
+ '\n' + new_type_definitions
|
+ '\n\n' + new_type_definitions
|
||||||
+ '\n\n' + constants_spec
|
+ '\n\n' + constants_spec
|
||||||
+ '\n\n\n' + ssz_objects_instantiation_spec
|
+ '\n\n\n' + ssz_objects_instantiation_spec
|
||||||
+ '\n\n' + functions_spec
|
+ '\n\n' + functions_spec
|
||||||
@ -185,7 +187,7 @@ def combine_constants(old_constants: Dict[str, str], new_constants: Dict[str, st
|
|||||||
return old_constants
|
return old_constants
|
||||||
|
|
||||||
|
|
||||||
def dependency_order_ssz_objects(objects: Dict[str, str]) -> None:
|
def dependency_order_ssz_objects(objects: Dict[str, str], custom_types: Dict[str, str]) -> None:
|
||||||
"""
|
"""
|
||||||
Determines which SSZ Object is depenedent on which other and orders them appropriately
|
Determines which SSZ Object is depenedent on which other and orders them appropriately
|
||||||
"""
|
"""
|
||||||
@ -194,14 +196,14 @@ def dependency_order_ssz_objects(objects: Dict[str, str]) -> None:
|
|||||||
dependencies = re.findall(r'(: [A-Z][\w[]*)', value)
|
dependencies = re.findall(r'(: [A-Z][\w[]*)', value)
|
||||||
dependencies = map(lambda x: re.sub(r'\W|Vector|List|Container|Hash|BLSPubkey|BLSSignature|uint\d+|Bytes\d+|bytes', '', x), dependencies)
|
dependencies = map(lambda x: re.sub(r'\W|Vector|List|Container|Hash|BLSPubkey|BLSSignature|uint\d+|Bytes\d+|bytes', '', x), dependencies)
|
||||||
for dep in dependencies:
|
for dep in dependencies:
|
||||||
if dep in NEW_TYPES or len(dep) == 0:
|
if dep in custom_types or len(dep) == 0:
|
||||||
continue
|
continue
|
||||||
key_list = list(objects.keys())
|
key_list = list(objects.keys())
|
||||||
for item in [dep, key] + key_list[key_list.index(dep)+1:]:
|
for item in [dep, key] + key_list[key_list.index(dep)+1:]:
|
||||||
objects[item] = objects.pop(item)
|
objects[item] = objects.pop(item)
|
||||||
|
|
||||||
|
|
||||||
def combine_ssz_objects(old_objects: Dict[str, str], new_objects: Dict[str, str]) -> Dict[str, str]:
|
def combine_ssz_objects(old_objects: Dict[str, str], new_objects: Dict[str, str], custom_types) -> Dict[str, str]:
|
||||||
"""
|
"""
|
||||||
Takes in old spec and new spec ssz objects, combines them,
|
Takes in old spec and new spec ssz objects, combines them,
|
||||||
and returns the newer versions of the objects in dependency order.
|
and returns the newer versions of the objects in dependency order.
|
||||||
@ -213,7 +215,7 @@ def combine_ssz_objects(old_objects: Dict[str, str], new_objects: Dict[str, str]
|
|||||||
# remove leading variable name
|
# remove leading variable name
|
||||||
value = re.sub(r'^class [\w]*\(Container\):\n', '', value)
|
value = re.sub(r'^class [\w]*\(Container\):\n', '', value)
|
||||||
old_objects[key] = old_objects.get(key, '') + value
|
old_objects[key] = old_objects.get(key, '') + value
|
||||||
dependency_order_ssz_objects(old_objects)
|
dependency_order_ssz_objects(old_objects, custom_types)
|
||||||
return old_objects
|
return old_objects
|
||||||
|
|
||||||
|
|
||||||
@ -225,20 +227,21 @@ def combine_spec_objects(spec0: SpecObject, spec1: SpecObject) -> SpecObject:
|
|||||||
"""
|
"""
|
||||||
Takes in two spec variants (as tuples of their objects) and combines them using the appropriate combiner function.
|
Takes in two spec variants (as tuples of their objects) and combines them using the appropriate combiner function.
|
||||||
"""
|
"""
|
||||||
functions0, constants0, ssz_objects0, inserts0 = spec0
|
functions0, custom_types0, constants0, ssz_objects0, inserts0 = spec0
|
||||||
functions1, constants1, ssz_objects1, inserts1 = spec1
|
functions1, custom_types1, constants1, ssz_objects1, inserts1 = spec1
|
||||||
functions = combine_functions(functions0, functions1)
|
functions = combine_functions(functions0, functions1)
|
||||||
|
custom_types = combine_constants(custom_types0, custom_types1)
|
||||||
constants = combine_constants(constants0, constants1)
|
constants = combine_constants(constants0, constants1)
|
||||||
ssz_objects = combine_ssz_objects(ssz_objects0, ssz_objects1)
|
ssz_objects = combine_ssz_objects(ssz_objects0, ssz_objects1, custom_types)
|
||||||
inserts = combine_inserts(inserts0, inserts1)
|
inserts = combine_inserts(inserts0, inserts1)
|
||||||
return functions, constants, ssz_objects, inserts
|
return functions, custom_types, constants, ssz_objects, inserts
|
||||||
|
|
||||||
|
|
||||||
def build_phase0_spec(phase0_sourcefile: str, fork_choice_sourcefile: str, outfile: str=None) -> Optional[str]:
|
def build_phase0_spec(phase0_sourcefile: str, fork_choice_sourcefile: str, outfile: str=None) -> Optional[str]:
|
||||||
phase0_spec = get_spec(phase0_sourcefile)
|
phase0_spec = get_spec(phase0_sourcefile)
|
||||||
fork_choice_spec = get_spec(fork_choice_sourcefile)
|
fork_choice_spec = get_spec(fork_choice_sourcefile)
|
||||||
spec_objects = combine_spec_objects(phase0_spec, fork_choice_spec)
|
spec_objects = combine_spec_objects(phase0_spec, fork_choice_spec)
|
||||||
spec = objects_to_spec(*spec_objects, PHASE0_IMPORTS, NEW_TYPES, BYTE_TYPES)
|
spec = objects_to_spec(*spec_objects, PHASE0_IMPORTS)
|
||||||
if outfile is not None:
|
if outfile is not None:
|
||||||
with open(outfile, 'w') as out:
|
with open(outfile, 'w') as out:
|
||||||
out.write(spec)
|
out.write(spec)
|
||||||
@ -257,7 +260,7 @@ def build_phase1_spec(phase0_sourcefile: str,
|
|||||||
spec_objects = phase0_spec
|
spec_objects = phase0_spec
|
||||||
for value in [phase1_custody, phase1_shard_data, fork_choice_spec]:
|
for value in [phase1_custody, phase1_shard_data, fork_choice_spec]:
|
||||||
spec_objects = combine_spec_objects(spec_objects, value)
|
spec_objects = combine_spec_objects(spec_objects, value)
|
||||||
spec = objects_to_spec(*spec_objects, PHASE1_IMPORTS, NEW_TYPES, BYTE_TYPES)
|
spec = objects_to_spec(*spec_objects, PHASE1_IMPORTS)
|
||||||
if outfile is not None:
|
if outfile is not None:
|
||||||
with open(outfile, 'w') as out:
|
with open(outfile, 'w') as out:
|
||||||
out.write(spec)
|
out.write(spec)
|
||||||
|
@ -30,6 +30,7 @@ def get_spec(file_name: str) -> SpecObject:
|
|||||||
function_matcher = re.compile(FUNCTION_REGEX)
|
function_matcher = re.compile(FUNCTION_REGEX)
|
||||||
inserts_matcher = re.compile(BEGIN_INSERT_REGEX)
|
inserts_matcher = re.compile(BEGIN_INSERT_REGEX)
|
||||||
is_ssz = False
|
is_ssz = False
|
||||||
|
custom_types = {}
|
||||||
for linenum, line in enumerate(open(file_name).readlines()):
|
for linenum, line in enumerate(open(file_name).readlines()):
|
||||||
line = line.rstrip()
|
line = line.rstrip()
|
||||||
if pulling_from is None and len(line) > 0 and line[0] == '#' and line[-1] == '`':
|
if pulling_from is None and len(line) > 0 and line[0] == '#' and line[-1] == '`':
|
||||||
@ -65,7 +66,7 @@ def get_spec(file_name: str) -> SpecObject:
|
|||||||
ssz_objects[current_name] = ssz_objects.get(current_name, '') + line + '\n'
|
ssz_objects[current_name] = ssz_objects.get(current_name, '') + line + '\n'
|
||||||
else:
|
else:
|
||||||
functions[current_name] = functions.get(current_name, '') + line + '\n'
|
functions[current_name] = functions.get(current_name, '') + line + '\n'
|
||||||
# Handle constant table entries
|
# Handle constant and custom types table entries
|
||||||
elif pulling_from is None and len(line) > 0 and line[0] == '|':
|
elif pulling_from is None and len(line) > 0 and line[0] == '|':
|
||||||
row = line[1:].split('|')
|
row = line[1:].split('|')
|
||||||
if len(row) >= 2:
|
if len(row) >= 2:
|
||||||
@ -73,12 +74,15 @@ def get_spec(file_name: str) -> SpecObject:
|
|||||||
row[i] = row[i].strip().strip('`')
|
row[i] = row[i].strip().strip('`')
|
||||||
if '`' in row[i]:
|
if '`' in row[i]:
|
||||||
row[i] = row[i][:row[i].find('`')]
|
row[i] = row[i][:row[i].find('`')]
|
||||||
eligible = True
|
if row[1].startswith('uint') or row[1].startswith('Bytes'):
|
||||||
if row[0][0] not in 'ABCDEFGHIJKLMNOPQRSTUVWXYZ_':
|
custom_types[row[0]] = row[1]
|
||||||
eligible = False
|
else:
|
||||||
for c in row[0]:
|
eligible = True
|
||||||
if c not in 'ABCDEFGHIJKLMNOPQRSTUVWXYZ_0123456789':
|
if row[0][0] not in 'ABCDEFGHIJKLMNOPQRSTUVWXYZ_':
|
||||||
eligible = False
|
eligible = False
|
||||||
if eligible:
|
for c in row[0]:
|
||||||
constants[row[0]] = row[1].replace('**TBD**', '0x1234567890123456789012345678901234567890')
|
if c not in 'ABCDEFGHIJKLMNOPQRSTUVWXYZ_0123456789':
|
||||||
return functions, constants, ssz_objects, inserts
|
eligible = False
|
||||||
|
if eligible:
|
||||||
|
constants[row[0]] = row[1].replace('**TBD**', '0x1234567890123456789012345678901234567890')
|
||||||
|
return functions, custom_types, constants, ssz_objects, inserts
|
||||||
|
@ -10,10 +10,10 @@
|
|||||||
- [Introduction](#introduction)
|
- [Introduction](#introduction)
|
||||||
- [Notation](#notation)
|
- [Notation](#notation)
|
||||||
- [Terminology](#terminology)
|
- [Terminology](#terminology)
|
||||||
|
- [Custom types](#custom-types)
|
||||||
- [Constants](#constants)
|
- [Constants](#constants)
|
||||||
- [Configuration](#configuration)
|
- [Configuration](#configuration)
|
||||||
- [Misc](#misc)
|
- [Misc](#misc)
|
||||||
- [Deposit contract](#deposit-contract)
|
|
||||||
- [Gwei values](#gwei-values)
|
- [Gwei values](#gwei-values)
|
||||||
- [Initial values](#initial-values)
|
- [Initial values](#initial-values)
|
||||||
- [Time parameters](#time-parameters)
|
- [Time parameters](#time-parameters)
|
||||||
@ -21,7 +21,7 @@
|
|||||||
- [Rewards and penalties](#rewards-and-penalties)
|
- [Rewards and penalties](#rewards-and-penalties)
|
||||||
- [Max operations per block](#max-operations-per-block)
|
- [Max operations per block](#max-operations-per-block)
|
||||||
- [Signature domains](#signature-domains)
|
- [Signature domains](#signature-domains)
|
||||||
- [Custom types](#custom-types)
|
- [Custom types](#custom-types-1)
|
||||||
- [Containers](#containers)
|
- [Containers](#containers)
|
||||||
- [Misc dependencies](#misc-dependencies)
|
- [Misc dependencies](#misc-dependencies)
|
||||||
- [`Fork`](#fork)
|
- [`Fork`](#fork)
|
||||||
@ -151,14 +151,30 @@ Code snippets appearing in `this style` are to be interpreted as Python code.
|
|||||||
* **Withdrawal period**—the number of slots between a [validator](#dfn-validator) exit and the [validator](#dfn-validator) balance being withdrawable.
|
* **Withdrawal period**—the number of slots between a [validator](#dfn-validator) exit and the [validator](#dfn-validator) balance being withdrawable.
|
||||||
* **Genesis time**—the Unix time of the genesis beacon chain block at slot 0.
|
* **Genesis time**—the Unix time of the genesis beacon chain block at slot 0.
|
||||||
|
|
||||||
|
## Custom types
|
||||||
|
|
||||||
|
We define the following Python custom types for type hinting and readability:
|
||||||
|
|
||||||
|
| Name | SSZ equivalent | Description |
|
||||||
|
| - | - | - |
|
||||||
|
| `Slot` | `uint64` | a slot number |
|
||||||
|
| `Epoch` | `uint64` | an epoch number |
|
||||||
|
| `Shard` | `uint64` | a shard number |
|
||||||
|
| `ValidatorIndex` | `uint64` | a validator registry index |
|
||||||
|
| `Gwei` | `uint64` | an amount in Gwei |
|
||||||
|
| `Version` | `Bytes4` | a fork version number |
|
||||||
|
| `Hash` | `Bytes32` | a hashed result |
|
||||||
|
| `BLSPubkey` | `Bytes48` | a BLS12-381 public key |
|
||||||
|
| `BLSSignature` | `Bytes96` | a BLS12-381 signature |
|
||||||
|
|
||||||
## Constants
|
## Constants
|
||||||
|
|
||||||
The following values are (non-configurable) constants used throughout the specification.
|
The following values are (non-configurable) constants used throughout the specification.
|
||||||
|
|
||||||
| Name | Value |
|
| Name | Value |
|
||||||
| - | - |
|
| - | - |
|
||||||
| `FAR_FUTURE_EPOCH` | `2**64 - 1` |
|
| `FAR_FUTURE_EPOCH` | `Epoch(2**64 - 1)` |
|
||||||
| `ZERO_HASH` | `b'\x00' * 32` |
|
| `ZERO_HASH` | `Hash(b'\x00' * 32)` |
|
||||||
| `BASE_REWARDS_PER_EPOCH` | `5` |
|
| `BASE_REWARDS_PER_EPOCH` | `5` |
|
||||||
| `DEPOSIT_CONTRACT_TREE_DEPTH` | `2**5` (= 32) |
|
| `DEPOSIT_CONTRACT_TREE_DEPTH` | `2**5` (= 32) |
|
||||||
|
|
||||||
@ -183,19 +199,19 @@ These configurations are updated for releases, but may be out of sync during `de
|
|||||||
|
|
||||||
### Gwei values
|
### Gwei values
|
||||||
|
|
||||||
| Name | Value | Unit |
|
| Name | Value |
|
||||||
| - | - | :-: |
|
| - | - | :-: |
|
||||||
| `MIN_DEPOSIT_AMOUNT` | `2**0 * 10**9` (= 1,000,000,000) | Gwei |
|
| `MIN_DEPOSIT_AMOUNT` | `Gwei(2**0 * 10**9)` (= 1,000,000,000) |
|
||||||
| `MAX_EFFECTIVE_BALANCE` | `2**5 * 10**9` (= 32,000,000,000) | Gwei |
|
| `MAX_EFFECTIVE_BALANCE` | `Gwei(2**5 * 10**9)` (= 32,000,000,000) |
|
||||||
| `EJECTION_BALANCE` | `2**4 * 10**9` (= 16,000,000,000) | Gwei |
|
| `EJECTION_BALANCE` | `Gwei(2**4 * 10**9)` (= 16,000,000,000) |
|
||||||
| `EFFECTIVE_BALANCE_INCREMENT` | `2**0 * 10**9` (= 1,000,000,000) | Gwei |
|
| `EFFECTIVE_BALANCE_INCREMENT` | `Gwei(2**0 * 10**9)` (= 1,000,000,000) |
|
||||||
|
|
||||||
### Initial values
|
### Initial values
|
||||||
|
|
||||||
| Name | Value |
|
| Name | Value |
|
||||||
| - | - |
|
| - | - |
|
||||||
| `GENESIS_SLOT` | `0` |
|
| `GENESIS_SLOT` | `Slot(0)` |
|
||||||
| `GENESIS_EPOCH` | `0` |
|
| `GENESIS_EPOCH` | `Epoch(0)` |
|
||||||
| `BLS_WITHDRAWAL_PREFIX` | `0` |
|
| `BLS_WITHDRAWAL_PREFIX` | `0` |
|
||||||
|
|
||||||
### Time parameters
|
### Time parameters
|
||||||
@ -283,8 +299,8 @@ The following types are [SimpleSerialize (SSZ)](../simple-serialize.md) containe
|
|||||||
|
|
||||||
```python
|
```python
|
||||||
class Fork(Container):
|
class Fork(Container):
|
||||||
previous_version: Bytes4
|
previous_version: Version
|
||||||
current_version: Bytes4
|
current_version: Version
|
||||||
epoch: Epoch # Epoch of latest fork
|
epoch: Epoch # Epoch of latest fork
|
||||||
```
|
```
|
||||||
|
|
||||||
@ -571,7 +587,7 @@ def slot_to_epoch(slot: Slot) -> Epoch:
|
|||||||
"""
|
"""
|
||||||
Return the epoch number of the given ``slot``.
|
Return the epoch number of the given ``slot``.
|
||||||
"""
|
"""
|
||||||
return slot // SLOTS_PER_EPOCH
|
return Epoch(slot // SLOTS_PER_EPOCH)
|
||||||
```
|
```
|
||||||
|
|
||||||
### `get_previous_epoch`
|
### `get_previous_epoch`
|
||||||
@ -583,7 +599,7 @@ def get_previous_epoch(state: BeaconState) -> Epoch:
|
|||||||
Return the current epoch if it's genesis epoch.
|
Return the current epoch if it's genesis epoch.
|
||||||
"""
|
"""
|
||||||
current_epoch = get_current_epoch(state)
|
current_epoch = get_current_epoch(state)
|
||||||
return GENESIS_EPOCH if current_epoch == GENESIS_EPOCH else current_epoch - 1
|
return GENESIS_EPOCH if current_epoch == GENESIS_EPOCH else Epoch(current_epoch - 1)
|
||||||
```
|
```
|
||||||
|
|
||||||
### `get_current_epoch`
|
### `get_current_epoch`
|
||||||
@ -603,7 +619,7 @@ def get_epoch_start_slot(epoch: Epoch) -> Slot:
|
|||||||
"""
|
"""
|
||||||
Return the starting slot of the given ``epoch``.
|
Return the starting slot of the given ``epoch``.
|
||||||
"""
|
"""
|
||||||
return epoch * SLOTS_PER_EPOCH
|
return Slot(epoch * SLOTS_PER_EPOCH)
|
||||||
```
|
```
|
||||||
|
|
||||||
### `is_active_validator`
|
### `is_active_validator`
|
||||||
@ -633,7 +649,7 @@ def get_active_validator_indices(state: BeaconState, epoch: Epoch) -> List[Valid
|
|||||||
"""
|
"""
|
||||||
Get active validator indices at ``epoch``.
|
Get active validator indices at ``epoch``.
|
||||||
"""
|
"""
|
||||||
return [i for i, v in enumerate(state.validators) if is_active_validator(v, epoch)]
|
return [ValidatorIndex(i) for i, v in enumerate(state.validators) if is_active_validator(v, epoch)]
|
||||||
```
|
```
|
||||||
|
|
||||||
### `increase_balance`
|
### `increase_balance`
|
||||||
@ -688,11 +704,11 @@ def get_shard_delta(state: BeaconState, epoch: Epoch) -> int:
|
|||||||
```python
|
```python
|
||||||
def get_epoch_start_shard(state: BeaconState, epoch: Epoch) -> Shard:
|
def get_epoch_start_shard(state: BeaconState, epoch: Epoch) -> Shard:
|
||||||
assert epoch <= get_current_epoch(state) + 1
|
assert epoch <= get_current_epoch(state) + 1
|
||||||
check_epoch = get_current_epoch(state) + 1
|
check_epoch = Epoch(get_current_epoch(state) + 1)
|
||||||
shard = (state.start_shard + get_shard_delta(state, get_current_epoch(state))) % SHARD_COUNT
|
shard = Shard((state.start_shard + get_shard_delta(state, get_current_epoch(state))) % SHARD_COUNT)
|
||||||
while check_epoch > epoch:
|
while check_epoch > epoch:
|
||||||
check_epoch -= 1
|
check_epoch -= Epoch(1)
|
||||||
shard = (shard + SHARD_COUNT - get_shard_delta(state, check_epoch)) % SHARD_COUNT
|
shard = Shard((shard + SHARD_COUNT - get_shard_delta(state, check_epoch)) % SHARD_COUNT)
|
||||||
return shard
|
return shard
|
||||||
```
|
```
|
||||||
|
|
||||||
@ -702,7 +718,7 @@ def get_epoch_start_shard(state: BeaconState, epoch: Epoch) -> Shard:
|
|||||||
def get_attestation_data_slot(state: BeaconState, data: AttestationData) -> Slot:
|
def get_attestation_data_slot(state: BeaconState, data: AttestationData) -> Slot:
|
||||||
committee_count = get_epoch_committee_count(state, data.target_epoch)
|
committee_count = get_epoch_committee_count(state, data.target_epoch)
|
||||||
offset = (data.crosslink.shard + SHARD_COUNT - get_epoch_start_shard(state, data.target_epoch)) % SHARD_COUNT
|
offset = (data.crosslink.shard + SHARD_COUNT - get_epoch_start_shard(state, data.target_epoch)) % SHARD_COUNT
|
||||||
return get_epoch_start_slot(data.target_epoch) + offset // (committee_count // SLOTS_PER_EPOCH)
|
return Slot(get_epoch_start_slot(data.target_epoch) + offset // (committee_count // SLOTS_PER_EPOCH))
|
||||||
```
|
```
|
||||||
|
|
||||||
### `get_block_root_at_slot`
|
### `get_block_root_at_slot`
|
||||||
@ -762,7 +778,7 @@ def generate_seed(state: BeaconState,
|
|||||||
Generate a seed for the given ``epoch``.
|
Generate a seed for the given ``epoch``.
|
||||||
"""
|
"""
|
||||||
return hash(
|
return hash(
|
||||||
get_randao_mix(state, epoch + RANDAO_MIXES_LENGTH - MIN_SEED_LOOKAHEAD) +
|
get_randao_mix(state, Epoch(epoch + RANDAO_MIXES_LENGTH - MIN_SEED_LOOKAHEAD)) +
|
||||||
get_active_index_root(state, epoch) +
|
get_active_index_root(state, epoch) +
|
||||||
int_to_bytes(epoch, length=32)
|
int_to_bytes(epoch, length=32)
|
||||||
)
|
)
|
||||||
@ -778,7 +794,7 @@ def get_beacon_proposer_index(state: BeaconState) -> ValidatorIndex:
|
|||||||
epoch = get_current_epoch(state)
|
epoch = get_current_epoch(state)
|
||||||
committees_per_slot = get_epoch_committee_count(state, epoch) // SLOTS_PER_EPOCH
|
committees_per_slot = get_epoch_committee_count(state, epoch) // SLOTS_PER_EPOCH
|
||||||
offset = committees_per_slot * (state.slot % SLOTS_PER_EPOCH)
|
offset = committees_per_slot * (state.slot % SLOTS_PER_EPOCH)
|
||||||
shard = (get_epoch_start_shard(state, epoch) + offset) % SHARD_COUNT
|
shard = Shard((get_epoch_start_shard(state, epoch) + offset) % SHARD_COUNT)
|
||||||
first_committee = get_crosslink_committee(state, epoch, shard)
|
first_committee = get_crosslink_committee(state, epoch, shard)
|
||||||
MAX_RANDOM_BYTE = 2**8 - 1
|
MAX_RANDOM_BYTE = 2**8 - 1
|
||||||
seed = generate_seed(state, epoch)
|
seed = generate_seed(state, epoch)
|
||||||
@ -788,7 +804,7 @@ def get_beacon_proposer_index(state: BeaconState) -> ValidatorIndex:
|
|||||||
random_byte = hash(seed + int_to_bytes(i // 32, length=8))[i % 32]
|
random_byte = hash(seed + int_to_bytes(i // 32, length=8))[i % 32]
|
||||||
effective_balance = state.validators[candidate_index].effective_balance
|
effective_balance = state.validators[candidate_index].effective_balance
|
||||||
if effective_balance * MAX_RANDOM_BYTE >= MAX_EFFECTIVE_BALANCE * random_byte:
|
if effective_balance * MAX_RANDOM_BYTE >= MAX_EFFECTIVE_BALANCE * random_byte:
|
||||||
return candidate_index
|
return ValidatorIndex(candidate_index)
|
||||||
i += 1
|
i += 1
|
||||||
```
|
```
|
||||||
|
|
||||||
@ -821,16 +837,19 @@ def get_shuffled_index(index: ValidatorIndex, index_count: int, seed: Hash) -> V
|
|||||||
|
|
||||||
# Swap or not (https://link.springer.com/content/pdf/10.1007%2F978-3-642-32009-5_1.pdf)
|
# Swap or not (https://link.springer.com/content/pdf/10.1007%2F978-3-642-32009-5_1.pdf)
|
||||||
# See the 'generalized domain' algorithm on page 3
|
# See the 'generalized domain' algorithm on page 3
|
||||||
for round in range(SHUFFLE_ROUND_COUNT):
|
for current_round in range(SHUFFLE_ROUND_COUNT):
|
||||||
pivot = bytes_to_int(hash(seed + int_to_bytes(round, length=1))[0:8]) % index_count
|
pivot = bytes_to_int(hash(seed + int_to_bytes(current_round, length=1))[0:8]) % index_count
|
||||||
flip = (pivot + index_count - index) % index_count
|
flip = ValidatorIndex((pivot + index_count - index) % index_count)
|
||||||
position = max(index, flip)
|
position = max(index, flip)
|
||||||
source = hash(seed + int_to_bytes(round, length=1) + int_to_bytes(position // 256, length=4))
|
source = hash(
|
||||||
|
seed + int_to_bytes(current_round, length=1) +
|
||||||
|
int_to_bytes(position // 256, length=4)
|
||||||
|
)
|
||||||
byte = source[(position % 256) // 8]
|
byte = source[(position % 256) // 8]
|
||||||
bit = (byte >> (position % 8)) % 2
|
bit = (byte >> (position % 8)) % 2
|
||||||
index = flip if bit else index
|
index = flip if bit else index
|
||||||
|
|
||||||
return index
|
return ValidatorIndex(index)
|
||||||
```
|
```
|
||||||
|
|
||||||
### `compute_committee`
|
### `compute_committee`
|
||||||
@ -839,7 +858,7 @@ def get_shuffled_index(index: ValidatorIndex, index_count: int, seed: Hash) -> V
|
|||||||
def compute_committee(indices: List[ValidatorIndex], seed: Hash, index: int, count: int) -> List[ValidatorIndex]:
|
def compute_committee(indices: List[ValidatorIndex], seed: Hash, index: int, count: int) -> List[ValidatorIndex]:
|
||||||
start = (len(indices) * index) // count
|
start = (len(indices) * index) // count
|
||||||
end = (len(indices) * (index + 1)) // count
|
end = (len(indices) * (index + 1)) // count
|
||||||
return [indices[get_shuffled_index(i, len(indices), seed)] for i in range(start, end)]
|
return [indices[get_shuffled_index(ValidatorIndex(i), len(indices), seed)] for i in range(start, end)]
|
||||||
```
|
```
|
||||||
|
|
||||||
### `get_crosslink_committee`
|
### `get_crosslink_committee`
|
||||||
@ -889,7 +908,7 @@ def get_total_balance(state: BeaconState, indices: List[ValidatorIndex]) -> Gwei
|
|||||||
"""
|
"""
|
||||||
Return the combined effective balance of the ``indices``. (1 Gwei minimum to avoid divisions by zero.)
|
Return the combined effective balance of the ``indices``. (1 Gwei minimum to avoid divisions by zero.)
|
||||||
"""
|
"""
|
||||||
return max(sum([state.validators[index].effective_balance for index in indices]), 1)
|
return Gwei(max(sum([state.validators[index].effective_balance for index in indices]), 1))
|
||||||
```
|
```
|
||||||
|
|
||||||
### `get_domain`
|
### `get_domain`
|
||||||
@ -897,7 +916,7 @@ def get_total_balance(state: BeaconState, indices: List[ValidatorIndex]) -> Gwei
|
|||||||
```python
|
```python
|
||||||
def get_domain(state: BeaconState,
|
def get_domain(state: BeaconState,
|
||||||
domain_type: int,
|
domain_type: int,
|
||||||
message_epoch: int=None) -> int:
|
message_epoch: Epoch=None) -> int:
|
||||||
"""
|
"""
|
||||||
Return the signature domain (fork version concatenated with domain type) of a message.
|
Return the signature domain (fork version concatenated with domain type) of a message.
|
||||||
"""
|
"""
|
||||||
@ -1025,7 +1044,7 @@ def get_delayed_activation_exit_epoch(epoch: Epoch) -> Epoch:
|
|||||||
"""
|
"""
|
||||||
Return the epoch at which an activation or exit triggered in ``epoch`` takes effect.
|
Return the epoch at which an activation or exit triggered in ``epoch`` takes effect.
|
||||||
"""
|
"""
|
||||||
return epoch + 1 + ACTIVATION_EXIT_DELAY
|
return Epoch(epoch + 1 + ACTIVATION_EXIT_DELAY)
|
||||||
```
|
```
|
||||||
|
|
||||||
### `get_churn_limit`
|
### `get_churn_limit`
|
||||||
@ -1074,11 +1093,11 @@ def initiate_validator_exit(state: BeaconState, index: ValidatorIndex) -> None:
|
|||||||
exit_queue_epoch = max(exit_epochs + [get_delayed_activation_exit_epoch(get_current_epoch(state))])
|
exit_queue_epoch = max(exit_epochs + [get_delayed_activation_exit_epoch(get_current_epoch(state))])
|
||||||
exit_queue_churn = len([v for v in state.validators if v.exit_epoch == exit_queue_epoch])
|
exit_queue_churn = len([v for v in state.validators if v.exit_epoch == exit_queue_epoch])
|
||||||
if exit_queue_churn >= get_churn_limit(state):
|
if exit_queue_churn >= get_churn_limit(state):
|
||||||
exit_queue_epoch += 1
|
exit_queue_epoch += Epoch(1)
|
||||||
|
|
||||||
# Set validator exit epoch and withdrawable epoch
|
# Set validator exit epoch and withdrawable epoch
|
||||||
validator.exit_epoch = exit_queue_epoch
|
validator.exit_epoch = exit_queue_epoch
|
||||||
validator.withdrawable_epoch = validator.exit_epoch + MIN_VALIDATOR_WITHDRAWABILITY_DELAY
|
validator.withdrawable_epoch = Epoch(validator.exit_epoch + MIN_VALIDATOR_WITHDRAWABILITY_DELAY)
|
||||||
```
|
```
|
||||||
|
|
||||||
#### `slash_validator`
|
#### `slash_validator`
|
||||||
@ -1093,15 +1112,15 @@ def slash_validator(state: BeaconState,
|
|||||||
current_epoch = get_current_epoch(state)
|
current_epoch = get_current_epoch(state)
|
||||||
initiate_validator_exit(state, slashed_index)
|
initiate_validator_exit(state, slashed_index)
|
||||||
state.validators[slashed_index].slashed = True
|
state.validators[slashed_index].slashed = True
|
||||||
state.validators[slashed_index].withdrawable_epoch = current_epoch + SLASHED_EXIT_LENGTH
|
state.validators[slashed_index].withdrawable_epoch = Epoch(current_epoch + SLASHED_EXIT_LENGTH)
|
||||||
slashed_balance = state.validators[slashed_index].effective_balance
|
slashed_balance = state.validators[slashed_index].effective_balance
|
||||||
state.slashed_balances[current_epoch % SLASHED_EXIT_LENGTH] += slashed_balance
|
state.slashed_balances[current_epoch % SLASHED_EXIT_LENGTH] += slashed_balance
|
||||||
|
|
||||||
proposer_index = get_beacon_proposer_index(state)
|
proposer_index = get_beacon_proposer_index(state)
|
||||||
if whistleblower_index is None:
|
if whistleblower_index is None:
|
||||||
whistleblower_index = proposer_index
|
whistleblower_index = proposer_index
|
||||||
whistleblowing_reward = slashed_balance // WHISTLEBLOWING_REWARD_QUOTIENT
|
whistleblowing_reward = Gwei(slashed_balance // WHISTLEBLOWING_REWARD_QUOTIENT)
|
||||||
proposer_reward = whistleblowing_reward // PROPOSER_REWARD_QUOTIENT
|
proposer_reward = Gwei(whistleblowing_reward // PROPOSER_REWARD_QUOTIENT)
|
||||||
increase_balance(state, proposer_index, proposer_reward)
|
increase_balance(state, proposer_index, proposer_reward)
|
||||||
increase_balance(state, whistleblower_index, whistleblowing_reward - proposer_reward)
|
increase_balance(state, whistleblower_index, whistleblowing_reward - proposer_reward)
|
||||||
decrease_balance(state, slashed_index, whistleblowing_reward)
|
decrease_balance(state, slashed_index, whistleblowing_reward)
|
||||||
@ -1204,7 +1223,7 @@ def process_slots(state: BeaconState, slot: Slot) -> None:
|
|||||||
# Process epoch on the first slot of the next epoch
|
# Process epoch on the first slot of the next epoch
|
||||||
if (state.slot + 1) % SLOTS_PER_EPOCH == 0:
|
if (state.slot + 1) % SLOTS_PER_EPOCH == 0:
|
||||||
process_epoch(state)
|
process_epoch(state)
|
||||||
state.slot += 1
|
state.slot += Slot(1)
|
||||||
```
|
```
|
||||||
|
|
||||||
```python
|
```python
|
||||||
@ -1271,7 +1290,7 @@ def get_matching_head_attestations(state: BeaconState, epoch: Epoch) -> List[Pen
|
|||||||
```python
|
```python
|
||||||
def get_unslashed_attesting_indices(state: BeaconState,
|
def get_unslashed_attesting_indices(state: BeaconState,
|
||||||
attestations: List[PendingAttestation]) -> List[ValidatorIndex]:
|
attestations: List[PendingAttestation]) -> List[ValidatorIndex]:
|
||||||
output = set()
|
output = set() # type: Set[ValidatorIndex]
|
||||||
for a in attestations:
|
for a in attestations:
|
||||||
output = output.union(get_attesting_indices(state, a.data, a.aggregation_bitfield))
|
output = output.union(get_attesting_indices(state, a.data, a.aggregation_bitfield))
|
||||||
return sorted(filter(lambda index: not state.validators[index].slashed, list(output)))
|
return sorted(filter(lambda index: not state.validators[index].slashed, list(output)))
|
||||||
@ -1357,7 +1376,7 @@ def process_crosslinks(state: BeaconState) -> None:
|
|||||||
state.previous_crosslinks = [c for c in state.current_crosslinks]
|
state.previous_crosslinks = [c for c in state.current_crosslinks]
|
||||||
for epoch in (get_previous_epoch(state), get_current_epoch(state)):
|
for epoch in (get_previous_epoch(state), get_current_epoch(state)):
|
||||||
for offset in range(get_epoch_committee_count(state, epoch)):
|
for offset in range(get_epoch_committee_count(state, epoch)):
|
||||||
shard = (get_epoch_start_shard(state, epoch) + offset) % SHARD_COUNT
|
shard = Shard((get_epoch_start_shard(state, epoch) + offset) % SHARD_COUNT)
|
||||||
crosslink_committee = get_crosslink_committee(state, epoch, shard)
|
crosslink_committee = get_crosslink_committee(state, epoch, shard)
|
||||||
winning_crosslink, attesting_indices = get_winning_crosslink_and_attesting_indices(state, epoch, shard)
|
winning_crosslink, attesting_indices = get_winning_crosslink_and_attesting_indices(state, epoch, shard)
|
||||||
if 3 * get_total_balance(state, attesting_indices) >= 2 * get_total_balance(state, crosslink_committee):
|
if 3 * get_total_balance(state, attesting_indices) >= 2 * get_total_balance(state, crosslink_committee):
|
||||||
@ -1370,17 +1389,17 @@ def process_crosslinks(state: BeaconState) -> None:
|
|||||||
def get_base_reward(state: BeaconState, index: ValidatorIndex) -> Gwei:
|
def get_base_reward(state: BeaconState, index: ValidatorIndex) -> Gwei:
|
||||||
total_balance = get_total_active_balance(state)
|
total_balance = get_total_active_balance(state)
|
||||||
effective_balance = state.validators[index].effective_balance
|
effective_balance = state.validators[index].effective_balance
|
||||||
return effective_balance * BASE_REWARD_FACTOR // integer_squareroot(total_balance) // BASE_REWARDS_PER_EPOCH
|
return Gwei(effective_balance * BASE_REWARD_FACTOR // integer_squareroot(total_balance) // BASE_REWARDS_PER_EPOCH)
|
||||||
```
|
```
|
||||||
|
|
||||||
```python
|
```python
|
||||||
def get_attestation_deltas(state: BeaconState) -> Tuple[List[Gwei], List[Gwei]]:
|
def get_attestation_deltas(state: BeaconState) -> Tuple[List[Gwei], List[Gwei]]:
|
||||||
previous_epoch = get_previous_epoch(state)
|
previous_epoch = get_previous_epoch(state)
|
||||||
total_balance = get_total_active_balance(state)
|
total_balance = get_total_active_balance(state)
|
||||||
rewards = [0 for _ in range(len(state.validators))]
|
rewards = [Gwei(0) for _ in range(len(state.validators))]
|
||||||
penalties = [0 for _ in range(len(state.validators))]
|
penalties = [Gwei(0) for _ in range(len(state.validators))]
|
||||||
eligible_validator_indices = [
|
eligible_validator_indices = [
|
||||||
index for index, v in enumerate(state.validators)
|
ValidatorIndex(index) for index, v in enumerate(state.validators)
|
||||||
if is_active_validator(v, previous_epoch) or (v.slashed and previous_epoch + 1 < v.withdrawable_epoch)
|
if is_active_validator(v, previous_epoch) or (v.slashed and previous_epoch + 1 < v.withdrawable_epoch)
|
||||||
]
|
]
|
||||||
|
|
||||||
@ -1399,23 +1418,25 @@ def get_attestation_deltas(state: BeaconState) -> Tuple[List[Gwei], List[Gwei]]:
|
|||||||
|
|
||||||
# Proposer and inclusion delay micro-rewards
|
# Proposer and inclusion delay micro-rewards
|
||||||
for index in get_unslashed_attesting_indices(state, matching_source_attestations):
|
for index in get_unslashed_attesting_indices(state, matching_source_attestations):
|
||||||
|
index = ValidatorIndex(index)
|
||||||
attestation = min([
|
attestation = min([
|
||||||
a for a in matching_source_attestations
|
a for a in matching_source_attestations
|
||||||
if index in get_attesting_indices(state, a.data, a.aggregation_bitfield)
|
if index in get_attesting_indices(state, a.data, a.aggregation_bitfield)
|
||||||
], key=lambda a: a.inclusion_delay)
|
], key=lambda a: a.inclusion_delay)
|
||||||
proposer_reward = get_base_reward(state, index) // PROPOSER_REWARD_QUOTIENT
|
proposer_reward = Gwei(get_base_reward(state, index) // PROPOSER_REWARD_QUOTIENT)
|
||||||
rewards[attestation.proposer_index] += proposer_reward
|
rewards[attestation.proposer_index] += proposer_reward
|
||||||
max_attester_reward = get_base_reward(state, index) - proposer_reward
|
max_attester_reward = get_base_reward(state, index) - proposer_reward
|
||||||
rewards[index] += max_attester_reward * MIN_ATTESTATION_INCLUSION_DELAY // attestation.inclusion_delay
|
rewards[index] += Gwei(max_attester_reward * MIN_ATTESTATION_INCLUSION_DELAY // attestation.inclusion_delay)
|
||||||
|
|
||||||
# Inactivity penalty
|
# Inactivity penalty
|
||||||
finality_delay = previous_epoch - state.finalized_epoch
|
finality_delay = previous_epoch - state.finalized_epoch
|
||||||
if finality_delay > MIN_EPOCHS_TO_INACTIVITY_PENALTY:
|
if finality_delay > MIN_EPOCHS_TO_INACTIVITY_PENALTY:
|
||||||
matching_target_attesting_indices = get_unslashed_attesting_indices(state, matching_target_attestations)
|
matching_target_attesting_indices = get_unslashed_attesting_indices(state, matching_target_attestations)
|
||||||
for index in eligible_validator_indices:
|
for index in eligible_validator_indices:
|
||||||
penalties[index] += BASE_REWARDS_PER_EPOCH * get_base_reward(state, index)
|
index = ValidatorIndex(index)
|
||||||
|
penalties[index] += Gwei(BASE_REWARDS_PER_EPOCH * get_base_reward(state, index))
|
||||||
if index not in matching_target_attesting_indices:
|
if index not in matching_target_attesting_indices:
|
||||||
penalties[index] += (
|
penalties[index] += Gwei(
|
||||||
state.validators[index].effective_balance * finality_delay // INACTIVITY_PENALTY_QUOTIENT
|
state.validators[index].effective_balance * finality_delay // INACTIVITY_PENALTY_QUOTIENT
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -1424,11 +1445,11 @@ def get_attestation_deltas(state: BeaconState) -> Tuple[List[Gwei], List[Gwei]]:
|
|||||||
|
|
||||||
```python
|
```python
|
||||||
def get_crosslink_deltas(state: BeaconState) -> Tuple[List[Gwei], List[Gwei]]:
|
def get_crosslink_deltas(state: BeaconState) -> Tuple[List[Gwei], List[Gwei]]:
|
||||||
rewards = [0 for index in range(len(state.validators))]
|
rewards = [Gwei(0) for index in range(len(state.validators))]
|
||||||
penalties = [0 for index in range(len(state.validators))]
|
penalties = [Gwei(0) for index in range(len(state.validators))]
|
||||||
epoch = get_previous_epoch(state)
|
epoch = get_previous_epoch(state)
|
||||||
for offset in range(get_epoch_committee_count(state, epoch)):
|
for offset in range(get_epoch_committee_count(state, epoch)):
|
||||||
shard = (get_epoch_start_shard(state, epoch) + offset) % SHARD_COUNT
|
shard = Shard((get_epoch_start_shard(state, epoch) + offset) % SHARD_COUNT)
|
||||||
crosslink_committee = get_crosslink_committee(state, epoch, shard)
|
crosslink_committee = get_crosslink_committee(state, epoch, shard)
|
||||||
winning_crosslink, attesting_indices = get_winning_crosslink_and_attesting_indices(state, epoch, shard)
|
winning_crosslink, attesting_indices = get_winning_crosslink_and_attesting_indices(state, epoch, shard)
|
||||||
attesting_balance = get_total_balance(state, attesting_indices)
|
attesting_balance = get_total_balance(state, attesting_indices)
|
||||||
@ -1449,9 +1470,9 @@ def process_rewards_and_penalties(state: BeaconState) -> None:
|
|||||||
|
|
||||||
rewards1, penalties1 = get_attestation_deltas(state)
|
rewards1, penalties1 = get_attestation_deltas(state)
|
||||||
rewards2, penalties2 = get_crosslink_deltas(state)
|
rewards2, penalties2 = get_crosslink_deltas(state)
|
||||||
for i in range(len(state.validators)):
|
for index in range(len(state.validators)):
|
||||||
increase_balance(state, i, rewards1[i] + rewards2[i])
|
increase_balance(state, ValidatorIndex(index), rewards1[index] + rewards2[index])
|
||||||
decrease_balance(state, i, penalties1[i] + penalties2[i])
|
decrease_balance(state, ValidatorIndex(index), penalties1[index] + penalties2[index])
|
||||||
```
|
```
|
||||||
|
|
||||||
#### Registry updates
|
#### Registry updates
|
||||||
@ -1467,7 +1488,7 @@ def process_registry_updates(state: BeaconState) -> None:
|
|||||||
validator.activation_eligibility_epoch = get_current_epoch(state)
|
validator.activation_eligibility_epoch = get_current_epoch(state)
|
||||||
|
|
||||||
if is_active_validator(validator, get_current_epoch(state)) and validator.effective_balance <= EJECTION_BALANCE:
|
if is_active_validator(validator, get_current_epoch(state)) and validator.effective_balance <= EJECTION_BALANCE:
|
||||||
initiate_validator_exit(state, index)
|
initiate_validator_exit(state, ValidatorIndex(index))
|
||||||
|
|
||||||
# Queue validators eligible for activation and not dequeued for activation prior to finalized epoch
|
# Queue validators eligible for activation and not dequeued for activation prior to finalized epoch
|
||||||
activation_queue = sorted([
|
activation_queue = sorted([
|
||||||
@ -1500,7 +1521,7 @@ def process_slashings(state: BeaconState) -> None:
|
|||||||
validator.effective_balance * min(total_penalties * 3, total_balance) // total_balance,
|
validator.effective_balance * min(total_penalties * 3, total_balance) // total_balance,
|
||||||
validator.effective_balance // MIN_SLASHING_PENALTY_QUOTIENT
|
validator.effective_balance // MIN_SLASHING_PENALTY_QUOTIENT
|
||||||
)
|
)
|
||||||
decrease_balance(state, index, penalty)
|
decrease_balance(state, ValidatorIndex(index), penalty)
|
||||||
```
|
```
|
||||||
|
|
||||||
#### Final updates
|
#### Final updates
|
||||||
@ -1519,11 +1540,11 @@ def process_final_updates(state: BeaconState) -> None:
|
|||||||
if balance < validator.effective_balance or validator.effective_balance + 3 * HALF_INCREMENT < balance:
|
if balance < validator.effective_balance or validator.effective_balance + 3 * HALF_INCREMENT < balance:
|
||||||
validator.effective_balance = min(balance - balance % EFFECTIVE_BALANCE_INCREMENT, MAX_EFFECTIVE_BALANCE)
|
validator.effective_balance = min(balance - balance % EFFECTIVE_BALANCE_INCREMENT, MAX_EFFECTIVE_BALANCE)
|
||||||
# Update start shard
|
# Update start shard
|
||||||
state.start_shard = (state.start_shard + get_shard_delta(state, current_epoch)) % SHARD_COUNT
|
state.start_shard = Shard((state.start_shard + get_shard_delta(state, current_epoch)) % SHARD_COUNT)
|
||||||
# Set active index root
|
# Set active index root
|
||||||
index_root_position = (next_epoch + ACTIVATION_EXIT_DELAY) % ACTIVE_INDEX_ROOTS_LENGTH
|
index_root_position = (next_epoch + ACTIVATION_EXIT_DELAY) % ACTIVE_INDEX_ROOTS_LENGTH
|
||||||
state.active_index_roots[index_root_position] = hash_tree_root(
|
state.active_index_roots[index_root_position] = hash_tree_root(
|
||||||
get_active_validator_indices(state, next_epoch + ACTIVATION_EXIT_DELAY)
|
get_active_validator_indices(state, Epoch(next_epoch + ACTIVATION_EXIT_DELAY))
|
||||||
)
|
)
|
||||||
# Set total slashed balances
|
# Set total slashed balances
|
||||||
state.slashed_balances[next_epoch % SLASHED_EXIT_LENGTH] = (
|
state.slashed_balances[next_epoch % SLASHED_EXIT_LENGTH] = (
|
||||||
@ -1610,15 +1631,15 @@ def process_operations(state: BeaconState, body: BeaconBlockBody) -> None:
|
|||||||
assert len(body.deposits) == min(MAX_DEPOSITS, state.eth1_data.deposit_count - state.eth1_deposit_index)
|
assert len(body.deposits) == min(MAX_DEPOSITS, state.eth1_data.deposit_count - state.eth1_deposit_index)
|
||||||
# Verify that there are no duplicate transfers
|
# Verify that there are no duplicate transfers
|
||||||
assert len(body.transfers) == len(set(body.transfers))
|
assert len(body.transfers) == len(set(body.transfers))
|
||||||
|
all_operations = [
|
||||||
for operations, max_operations, function in (
|
|
||||||
(body.proposer_slashings, MAX_PROPOSER_SLASHINGS, process_proposer_slashing),
|
(body.proposer_slashings, MAX_PROPOSER_SLASHINGS, process_proposer_slashing),
|
||||||
(body.attester_slashings, MAX_ATTESTER_SLASHINGS, process_attester_slashing),
|
(body.attester_slashings, MAX_ATTESTER_SLASHINGS, process_attester_slashing),
|
||||||
(body.attestations, MAX_ATTESTATIONS, process_attestation),
|
(body.attestations, MAX_ATTESTATIONS, process_attestation),
|
||||||
(body.deposits, MAX_DEPOSITS, process_deposit),
|
(body.deposits, MAX_DEPOSITS, process_deposit),
|
||||||
(body.voluntary_exits, MAX_VOLUNTARY_EXITS, process_voluntary_exit),
|
(body.voluntary_exits, MAX_VOLUNTARY_EXITS, process_voluntary_exit),
|
||||||
(body.transfers, MAX_TRANSFERS, process_transfer),
|
(body.transfers, MAX_TRANSFERS, process_transfer),
|
||||||
):
|
] # type: List[Tuple[List[Container], int, Callable]]
|
||||||
|
for operations, max_operations, function in all_operations:
|
||||||
assert len(operations) <= max_operations
|
assert len(operations) <= max_operations
|
||||||
for operation in operations:
|
for operation in operations:
|
||||||
function(state, operation)
|
function(state, operation)
|
||||||
@ -1754,7 +1775,7 @@ def process_deposit(state: BeaconState, deposit: Deposit) -> None:
|
|||||||
state.balances.append(amount)
|
state.balances.append(amount)
|
||||||
else:
|
else:
|
||||||
# Increase balance by deposit amount
|
# Increase balance by deposit amount
|
||||||
index = validator_pubkeys.index(pubkey)
|
index = ValidatorIndex(validator_pubkeys.index(pubkey))
|
||||||
increase_balance(state, index, amount)
|
increase_balance(state, index, amount)
|
||||||
```
|
```
|
||||||
|
|
||||||
|
@ -103,10 +103,10 @@ def get_ancestor(store: Store, root: Bytes32, slot: Slot) -> Bytes32:
|
|||||||
def get_attesting_balance_from_store(store: Store, root: Bytes32) -> Gwei:
|
def get_attesting_balance_from_store(store: Store, root: Bytes32) -> Gwei:
|
||||||
state = store.states[store.justified_root]
|
state = store.states[store.justified_root]
|
||||||
active_indices = get_active_validator_indices(state.validator_registry, slot_to_epoch(state.slot))
|
active_indices = get_active_validator_indices(state.validator_registry, slot_to_epoch(state.slot))
|
||||||
return sum(
|
return Gwei(sum(
|
||||||
state.validator_registry[i].effective_balance for i in active_indices
|
state.validator_registry[i].effective_balance for i in active_indices
|
||||||
if get_ancestor(store, store.latest_targets[i].root, store.blocks[root].slot) == root
|
if get_ancestor(store, store.latest_targets[i].root, store.blocks[root].slot) == root
|
||||||
)
|
))
|
||||||
```
|
```
|
||||||
|
|
||||||
#### `get_head`
|
#### `get_head`
|
||||||
|
@ -133,9 +133,9 @@ class CustodyBitChallenge(Container):
|
|||||||
responder_index: ValidatorIndex
|
responder_index: ValidatorIndex
|
||||||
attestation: Attestation
|
attestation: Attestation
|
||||||
challenger_index: ValidatorIndex
|
challenger_index: ValidatorIndex
|
||||||
responder_key: Bytes96
|
responder_key: BLSSignature
|
||||||
chunk_bits: bytes
|
chunk_bits: bytes
|
||||||
signature: Bytes96
|
signature: BLSSignature
|
||||||
```
|
```
|
||||||
|
|
||||||
#### `CustodyChunkChallengeRecord`
|
#### `CustodyChunkChallengeRecord`
|
||||||
@ -162,7 +162,7 @@ class CustodyBitChallengeRecord(Container):
|
|||||||
data_root: Bytes32
|
data_root: Bytes32
|
||||||
chunk_count: uint64
|
chunk_count: uint64
|
||||||
chunk_bits_merkle_root: Bytes32
|
chunk_bits_merkle_root: Bytes32
|
||||||
responder_key: Bytes96
|
responder_key: BLSSignature
|
||||||
```
|
```
|
||||||
|
|
||||||
#### `CustodyResponse`
|
#### `CustodyResponse`
|
||||||
@ -184,9 +184,9 @@ class CustodyResponse(Container):
|
|||||||
```python
|
```python
|
||||||
class CustodyKeyReveal(Container):
|
class CustodyKeyReveal(Container):
|
||||||
# Index of the validator whose key is being revealed
|
# Index of the validator whose key is being revealed
|
||||||
revealer_index: uint64
|
revealer_index: ValidatorIndex
|
||||||
# Reveal (masked signature)
|
# Reveal (masked signature)
|
||||||
reveal: Bytes96
|
reveal: BLSSignature
|
||||||
```
|
```
|
||||||
|
|
||||||
#### `EarlyDerivedSecretReveal`
|
#### `EarlyDerivedSecretReveal`
|
||||||
@ -196,13 +196,13 @@ Represents an early (punishable) reveal of one of the derived secrets, where der
|
|||||||
```python
|
```python
|
||||||
class EarlyDerivedSecretReveal(Container):
|
class EarlyDerivedSecretReveal(Container):
|
||||||
# Index of the validator whose key is being revealed
|
# Index of the validator whose key is being revealed
|
||||||
revealed_index: uint64
|
revealed_index: ValidatorIndex
|
||||||
# RANDAO epoch of the key that is being revealed
|
# RANDAO epoch of the key that is being revealed
|
||||||
epoch: uint64
|
epoch: Epoch
|
||||||
# Reveal (masked signature)
|
# Reveal (masked signature)
|
||||||
reveal: Bytes96
|
reveal: BLSSignature
|
||||||
# Index of the validator who revealed (whistleblower)
|
# Index of the validator who revealed (whistleblower)
|
||||||
masker_index: uint64
|
masker_index: ValidatorIndex
|
||||||
# Mask used to hide the actual reveal signature (prevent reveal from being stolen)
|
# Mask used to hide the actual reveal signature (prevent reveal from being stolen)
|
||||||
mask: Bytes32
|
mask: Bytes32
|
||||||
```
|
```
|
||||||
@ -232,7 +232,7 @@ class BeaconState(Container):
|
|||||||
|
|
||||||
# Future derived secrets already exposed; contains the indices of the exposed validator
|
# Future derived secrets already exposed; contains the indices of the exposed validator
|
||||||
# at RANDAO reveal period % EARLY_DERIVED_SECRET_PENALTY_MAX_FUTURE_EPOCHS
|
# at RANDAO reveal period % EARLY_DERIVED_SECRET_PENALTY_MAX_FUTURE_EPOCHS
|
||||||
exposed_derived_secrets: Vector[List[uint64], EARLY_DERIVED_SECRET_PENALTY_MAX_FUTURE_EPOCHS]
|
exposed_derived_secrets: Vector[List[ValidatorIndex], EARLY_DERIVED_SECRET_PENALTY_MAX_FUTURE_EPOCHS]
|
||||||
```
|
```
|
||||||
|
|
||||||
#### `BeaconBlockBody`
|
#### `BeaconBlockBody`
|
||||||
@ -251,7 +251,7 @@ class BeaconBlockBody(Container):
|
|||||||
### `ceillog2`
|
### `ceillog2`
|
||||||
|
|
||||||
```python
|
```python
|
||||||
def ceillog2(x):
|
def ceillog2(x: int) -> int:
|
||||||
return x.bit_length()
|
return x.bit_length()
|
||||||
```
|
```
|
||||||
|
|
||||||
@ -267,9 +267,9 @@ def get_custody_chunk_count(crosslink: Crosslink) -> int:
|
|||||||
### `get_custody_chunk_bit`
|
### `get_custody_chunk_bit`
|
||||||
|
|
||||||
```python
|
```python
|
||||||
def get_custody_chunk_bit(key: Bytes96, chunk: bytes) -> bool:
|
def get_custody_chunk_bit(key: BLSSignature, chunk: bytes) -> bool:
|
||||||
# TODO: Replace with something MPC-friendly, e.g. the Legendre symbol
|
# TODO: Replace with something MPC-friendly, e.g. the Legendre symbol
|
||||||
return get_bitfield_bit(hash(key + chunk), 0)
|
return bool(get_bitfield_bit(hash(key + chunk), 0))
|
||||||
```
|
```
|
||||||
|
|
||||||
### `get_chunk_bits_root`
|
### `get_chunk_bits_root`
|
||||||
@ -288,7 +288,7 @@ def get_chunk_bits_root(chunk_bitfield: bytes) -> Bytes32:
|
|||||||
```python
|
```python
|
||||||
def get_randao_epoch_for_custody_period(period: int, validator_index: ValidatorIndex) -> Epoch:
|
def get_randao_epoch_for_custody_period(period: int, validator_index: ValidatorIndex) -> Epoch:
|
||||||
next_period_start = (period + 1) * EPOCHS_PER_CUSTODY_PERIOD - validator_index % EPOCHS_PER_CUSTODY_PERIOD
|
next_period_start = (period + 1) * EPOCHS_PER_CUSTODY_PERIOD - validator_index % EPOCHS_PER_CUSTODY_PERIOD
|
||||||
return next_period_start + CUSTODY_PERIOD_TO_RANDAO_PADDING
|
return Epoch(next_period_start + CUSTODY_PERIOD_TO_RANDAO_PADDING)
|
||||||
```
|
```
|
||||||
|
|
||||||
### `get_validators_custody_reveal_period`
|
### `get_validators_custody_reveal_period`
|
||||||
@ -372,7 +372,11 @@ def process_custody_key_reveal(state: BeaconState,
|
|||||||
|
|
||||||
# Reward Block Preposer
|
# Reward Block Preposer
|
||||||
proposer_index = get_beacon_proposer_index(state)
|
proposer_index = get_beacon_proposer_index(state)
|
||||||
increase_balance(state, proposer_index, get_base_reward(state, reveal.revealer_index) // MINOR_REWARD_QUOTIENT)
|
increase_balance(
|
||||||
|
state,
|
||||||
|
proposer_index,
|
||||||
|
Gwei(get_base_reward(state, reveal.revealer_index) // MINOR_REWARD_QUOTIENT)
|
||||||
|
)
|
||||||
```
|
```
|
||||||
|
|
||||||
#### Early derived secret reveals
|
#### Early derived secret reveals
|
||||||
@ -433,7 +437,7 @@ def process_early_derived_secret_reveal(state: BeaconState,
|
|||||||
// len(get_active_validator_indices(state, get_current_epoch(state)))
|
// len(get_active_validator_indices(state, get_current_epoch(state)))
|
||||||
// PROPOSER_REWARD_QUOTIENT
|
// PROPOSER_REWARD_QUOTIENT
|
||||||
)
|
)
|
||||||
penalty = (
|
penalty = Gwei(
|
||||||
max_proposer_slot_reward
|
max_proposer_slot_reward
|
||||||
* EARLY_DERIVED_SECRET_REVEAL_SLOT_REWARD_MULTIPLE
|
* EARLY_DERIVED_SECRET_REVEAL_SLOT_REWARD_MULTIPLE
|
||||||
* (len(state.exposed_derived_secrets[derived_secret_location]) + 1)
|
* (len(state.exposed_derived_secrets[derived_secret_location]) + 1)
|
||||||
@ -442,8 +446,8 @@ def process_early_derived_secret_reveal(state: BeaconState,
|
|||||||
# Apply penalty
|
# Apply penalty
|
||||||
proposer_index = get_beacon_proposer_index(state)
|
proposer_index = get_beacon_proposer_index(state)
|
||||||
whistleblower_index = reveal.masker_index
|
whistleblower_index = reveal.masker_index
|
||||||
whistleblowing_reward = penalty // WHISTLEBLOWING_REWARD_QUOTIENT
|
whistleblowing_reward = Gwei(penalty // WHISTLEBLOWING_REWARD_QUOTIENT)
|
||||||
proposer_reward = whistleblowing_reward // PROPOSER_REWARD_QUOTIENT
|
proposer_reward = Gwei(whistleblowing_reward // PROPOSER_REWARD_QUOTIENT)
|
||||||
increase_balance(state, proposer_index, proposer_reward)
|
increase_balance(state, proposer_index, proposer_reward)
|
||||||
increase_balance(state, whistleblower_index, whistleblowing_reward - proposer_reward)
|
increase_balance(state, whistleblower_index, whistleblowing_reward - proposer_reward)
|
||||||
decrease_balance(state, reveal.revealed_index, penalty)
|
decrease_balance(state, reveal.revealed_index, penalty)
|
||||||
@ -512,7 +516,7 @@ def process_bit_challenge(state: BeaconState,
|
|||||||
pubkey=challenger.pubkey,
|
pubkey=challenger.pubkey,
|
||||||
message_hash=signing_root(challenge),
|
message_hash=signing_root(challenge),
|
||||||
signature=challenge.signature,
|
signature=challenge.signature,
|
||||||
domain=get_domain(state, get_current_epoch(state), DOMAIN_CUSTODY_BIT_CHALLENGE),
|
domain=get_domain(state, DOMAIN_CUSTODY_BIT_CHALLENGE, get_current_epoch(state)),
|
||||||
)
|
)
|
||||||
assert is_slashable_validator(challenger, get_current_epoch(state))
|
assert is_slashable_validator(challenger, get_current_epoch(state))
|
||||||
|
|
||||||
@ -535,8 +539,8 @@ def process_bit_challenge(state: BeaconState,
|
|||||||
# Verify the responder is a valid custody key
|
# Verify the responder is a valid custody key
|
||||||
epoch_to_sign = get_randao_epoch_for_custody_period(
|
epoch_to_sign = get_randao_epoch_for_custody_period(
|
||||||
get_validators_custody_reveal_period(
|
get_validators_custody_reveal_period(
|
||||||
state=state,
|
state,
|
||||||
index=challenge.responder_index,
|
challenge.responder_index,
|
||||||
epoch=slot_to_epoch(attestation.data.slot)),
|
epoch=slot_to_epoch(attestation.data.slot)),
|
||||||
challenge.responder_index
|
challenge.responder_index
|
||||||
)
|
)
|
||||||
@ -610,7 +614,7 @@ def process_chunk_challenge_response(state: BeaconState,
|
|||||||
# Verify the chunk matches the crosslink data root
|
# Verify the chunk matches the crosslink data root
|
||||||
assert verify_merkle_branch(
|
assert verify_merkle_branch(
|
||||||
leaf=hash_tree_root(response.chunk),
|
leaf=hash_tree_root(response.chunk),
|
||||||
branch=response.data_branch,
|
proof=response.data_branch,
|
||||||
depth=challenge.depth,
|
depth=challenge.depth,
|
||||||
index=response.chunk_index,
|
index=response.chunk_index,
|
||||||
root=challenge.data_root,
|
root=challenge.data_root,
|
||||||
@ -620,7 +624,7 @@ def process_chunk_challenge_response(state: BeaconState,
|
|||||||
records[records.index(challenge)] = CustodyChunkChallengeRecord()
|
records[records.index(challenge)] = CustodyChunkChallengeRecord()
|
||||||
# Reward the proposer
|
# Reward the proposer
|
||||||
proposer_index = get_beacon_proposer_index(state)
|
proposer_index = get_beacon_proposer_index(state)
|
||||||
increase_balance(state, proposer_index, get_base_reward(state, proposer_index) // MINOR_REWARD_QUOTIENT)
|
increase_balance(state, proposer_index, Gwei(get_base_reward(state, proposer_index) // MINOR_REWARD_QUOTIENT))
|
||||||
```
|
```
|
||||||
|
|
||||||
```python
|
```python
|
||||||
@ -635,7 +639,7 @@ def process_bit_challenge_response(state: BeaconState,
|
|||||||
# Verify the chunk matches the crosslink data root
|
# Verify the chunk matches the crosslink data root
|
||||||
assert verify_merkle_branch(
|
assert verify_merkle_branch(
|
||||||
leaf=hash_tree_root(response.chunk),
|
leaf=hash_tree_root(response.chunk),
|
||||||
branch=response.data_branch,
|
proof=response.data_branch,
|
||||||
depth=ceillog2(challenge.chunk_count),
|
depth=ceillog2(challenge.chunk_count),
|
||||||
index=response.chunk_index,
|
index=response.chunk_index,
|
||||||
root=challenge.data_root,
|
root=challenge.data_root,
|
||||||
@ -643,7 +647,7 @@ def process_bit_challenge_response(state: BeaconState,
|
|||||||
# Verify the chunk bit leaf matches the challenge data
|
# Verify the chunk bit leaf matches the challenge data
|
||||||
assert verify_merkle_branch(
|
assert verify_merkle_branch(
|
||||||
leaf=response.chunk_bits_leaf,
|
leaf=response.chunk_bits_leaf,
|
||||||
branch=response.chunk_bits_branch,
|
proof=response.chunk_bits_branch,
|
||||||
depth=ceillog2(challenge.chunk_count) >> 8,
|
depth=ceillog2(challenge.chunk_count) >> 8,
|
||||||
index=response.chunk_index // 256,
|
index=response.chunk_index // 256,
|
||||||
root=challenge.chunk_bits_merkle_root
|
root=challenge.chunk_bits_merkle_root
|
||||||
@ -671,8 +675,8 @@ Run `process_reveal_deadlines(state)` immediately after `process_registry_update
|
|||||||
def process_reveal_deadlines(state: BeaconState) -> None:
|
def process_reveal_deadlines(state: BeaconState) -> None:
|
||||||
for index, validator in enumerate(state.validators):
|
for index, validator in enumerate(state.validators):
|
||||||
deadline = validator.next_custody_reveal_period + (CUSTODY_RESPONSE_DEADLINE // EPOCHS_PER_CUSTODY_PERIOD)
|
deadline = validator.next_custody_reveal_period + (CUSTODY_RESPONSE_DEADLINE // EPOCHS_PER_CUSTODY_PERIOD)
|
||||||
if get_validators_custody_reveal_period(state, index) > deadline:
|
if get_validators_custody_reveal_period(state, ValidatorIndex(index)) > deadline:
|
||||||
slash_validator(state, index)
|
slash_validator(state, ValidatorIndex(index))
|
||||||
```
|
```
|
||||||
|
|
||||||
Run `process_challenge_deadlines(state)` immediately after `process_reveal_deadlines(state)`:
|
Run `process_challenge_deadlines(state)` immediately after `process_reveal_deadlines(state)`:
|
||||||
@ -682,17 +686,17 @@ Run `process_challenge_deadlines(state)` immediately after `process_reveal_deadl
|
|||||||
process_challenge_deadlines(state)
|
process_challenge_deadlines(state)
|
||||||
# end insert @process_challenge_deadlines
|
# end insert @process_challenge_deadlines
|
||||||
def process_challenge_deadlines(state: BeaconState) -> None:
|
def process_challenge_deadlines(state: BeaconState) -> None:
|
||||||
for challenge in state.custody_chunk_challenge_records:
|
for custody_chunk_challenge in state.custody_chunk_challenge_records:
|
||||||
if get_current_epoch(state) > challenge.inclusion_epoch + CUSTODY_RESPONSE_DEADLINE:
|
if get_current_epoch(state) > custody_chunk_challenge.inclusion_epoch + CUSTODY_RESPONSE_DEADLINE:
|
||||||
slash_validator(state, challenge.responder_index, challenge.challenger_index)
|
slash_validator(state, custody_chunk_challenge.responder_index, custody_chunk_challenge.challenger_index)
|
||||||
records = state.custody_chunk_challenge_records
|
records = state.custody_chunk_challenge
|
||||||
records[records.index(challenge)] = CustodyChunkChallengeRecord()
|
records[records.index(custody_chunk_challenge)] = CustodyChunkChallengeRecord()
|
||||||
|
|
||||||
for challenge in state.custody_bit_challenge_records:
|
for custody_bit_challenge in state.custody_bit_challenge_records:
|
||||||
if get_current_epoch(state) > challenge.inclusion_epoch + CUSTODY_RESPONSE_DEADLINE:
|
if get_current_epoch(state) > custody_bit_challenge.inclusion_epoch + CUSTODY_RESPONSE_DEADLINE:
|
||||||
slash_validator(state, challenge.responder_index, challenge.challenger_index)
|
slash_validator(state, custody_bit_challenge.responder_index, custody_bit_challenge.challenger_index)
|
||||||
records = state.custody_bit_challenge_records
|
records = state.custody_bit_challenge_records
|
||||||
records[records.index(challenge)] = CustodyBitChallengeRecord()
|
records[records.index(custody_bit_challenge)] = CustodyBitChallengeRecord()
|
||||||
```
|
```
|
||||||
|
|
||||||
Append this to `process_final_updates(state)`:
|
Append this to `process_final_updates(state)`:
|
||||||
@ -713,5 +717,5 @@ def after_process_final_updates(state: BeaconState) -> None:
|
|||||||
for index, validator in enumerate(state.validators):
|
for index, validator in enumerate(state.validators):
|
||||||
if index not in validator_indices_in_records:
|
if index not in validator_indices_in_records:
|
||||||
if validator.exit_epoch != FAR_FUTURE_EPOCH and validator.withdrawable_epoch == FAR_FUTURE_EPOCH:
|
if validator.exit_epoch != FAR_FUTURE_EPOCH and validator.withdrawable_epoch == FAR_FUTURE_EPOCH:
|
||||||
validator.withdrawable_epoch = validator.exit_epoch + MIN_VALIDATOR_WITHDRAWABILITY_DELAY
|
validator.withdrawable_epoch = Epoch(validator.exit_epoch + MIN_VALIDATOR_WITHDRAWABILITY_DELAY)
|
||||||
```
|
```
|
||||||
|
@ -83,39 +83,39 @@ class ShardBlockBody(Container):
|
|||||||
```python
|
```python
|
||||||
class ShardAttestation(Container):
|
class ShardAttestation(Container):
|
||||||
class data(Container):
|
class data(Container):
|
||||||
slot: uint64
|
slot: Slot
|
||||||
shard: uint64
|
shard: Shard
|
||||||
shard_block_root: Bytes32
|
shard_block_root: Bytes32
|
||||||
aggregation_bitfield: bytes
|
aggregation_bitfield: bytes
|
||||||
aggregate_signature: Bytes96
|
aggregate_signature: BLSSignature
|
||||||
```
|
```
|
||||||
|
|
||||||
### `ShardBlock`
|
### `ShardBlock`
|
||||||
|
|
||||||
```python
|
```python
|
||||||
class ShardBlock(Container):
|
class ShardBlock(Container):
|
||||||
slot: uint64
|
slot: Slot
|
||||||
shard: uint64
|
shard: Shard
|
||||||
beacon_chain_root: Bytes32
|
beacon_chain_root: Bytes32
|
||||||
parent_root: Bytes32
|
parent_root: Bytes32
|
||||||
data: ShardBlockBody
|
data: ShardBlockBody
|
||||||
state_root: Bytes32
|
state_root: Bytes32
|
||||||
attestations: List[ShardAttestation]
|
attestations: List[ShardAttestation]
|
||||||
signature: Bytes96
|
signature: BLSSignature
|
||||||
```
|
```
|
||||||
|
|
||||||
### `ShardBlockHeader`
|
### `ShardBlockHeader`
|
||||||
|
|
||||||
```python
|
```python
|
||||||
class ShardBlockHeader(Container):
|
class ShardBlockHeader(Container):
|
||||||
slot: uint64
|
slot: Slot
|
||||||
shard: uint64
|
shard: Shard
|
||||||
beacon_chain_root: Bytes32
|
beacon_chain_root: Bytes32
|
||||||
parent_root: Bytes32
|
parent_root: Bytes32
|
||||||
body_root: Bytes32
|
body_root: Bytes32
|
||||||
state_root: Bytes32
|
state_root: Bytes32
|
||||||
attestations: List[ShardAttestation]
|
attestations: List[ShardAttestation]
|
||||||
signature: Bytes96
|
signature: BLSSignature
|
||||||
```
|
```
|
||||||
|
|
||||||
## Helper functions
|
## Helper functions
|
||||||
@ -142,8 +142,8 @@ def get_period_committee(state: BeaconState,
|
|||||||
### `get_switchover_epoch`
|
### `get_switchover_epoch`
|
||||||
|
|
||||||
```python
|
```python
|
||||||
def get_switchover_epoch(state: BeaconState, epoch: Epoch, index: ValidatorIndex):
|
def get_switchover_epoch(state: BeaconState, epoch: Epoch, index: ValidatorIndex) -> int:
|
||||||
earlier_start_epoch = epoch - (epoch % PERSISTENT_COMMITTEE_PERIOD) - PERSISTENT_COMMITTEE_PERIOD * 2
|
earlier_start_epoch = Epoch(epoch - (epoch % PERSISTENT_COMMITTEE_PERIOD) - PERSISTENT_COMMITTEE_PERIOD * 2)
|
||||||
return (bytes_to_int(hash(generate_seed(state, earlier_start_epoch) + int_to_bytes(index, length=3)[0:8]))
|
return (bytes_to_int(hash(generate_seed(state, earlier_start_epoch) + int_to_bytes(index, length=3)[0:8]))
|
||||||
% PERSISTENT_COMMITTEE_PERIOD)
|
% PERSISTENT_COMMITTEE_PERIOD)
|
||||||
```
|
```
|
||||||
@ -158,19 +158,19 @@ def get_persistent_committee(state: BeaconState,
|
|||||||
Return the persistent committee for the given ``shard`` at the given ``slot``.
|
Return the persistent committee for the given ``shard`` at the given ``slot``.
|
||||||
"""
|
"""
|
||||||
epoch = slot_to_epoch(slot)
|
epoch = slot_to_epoch(slot)
|
||||||
earlier_start_epoch = epoch - (epoch % PERSISTENT_COMMITTEE_PERIOD) - PERSISTENT_COMMITTEE_PERIOD * 2
|
earlier_start_epoch = Epoch(epoch - (epoch % PERSISTENT_COMMITTEE_PERIOD) - PERSISTENT_COMMITTEE_PERIOD * 2)
|
||||||
later_start_epoch = epoch - (epoch % PERSISTENT_COMMITTEE_PERIOD) - PERSISTENT_COMMITTEE_PERIOD
|
later_start_epoch = Epoch(epoch - (epoch % PERSISTENT_COMMITTEE_PERIOD) - PERSISTENT_COMMITTEE_PERIOD)
|
||||||
|
|
||||||
committee_count = max(
|
committee_count = max(
|
||||||
len(get_active_validator_indices(state.validators, earlier_start_epoch)) //
|
len(get_active_validator_indices(state, earlier_start_epoch)) //
|
||||||
(SHARD_COUNT * TARGET_COMMITTEE_SIZE),
|
(SHARD_COUNT * TARGET_COMMITTEE_SIZE),
|
||||||
len(get_active_validator_indices(state.validators, later_start_epoch)) //
|
len(get_active_validator_indices(state, later_start_epoch)) //
|
||||||
(SHARD_COUNT * TARGET_COMMITTEE_SIZE),
|
(SHARD_COUNT * TARGET_COMMITTEE_SIZE),
|
||||||
) + 1
|
) + 1
|
||||||
|
|
||||||
index = slot % committee_count
|
index = slot % committee_count
|
||||||
earlier_committee = get_period_committee(state, shard, earlier_start_epoch, index, committee_count)
|
earlier_committee = get_period_committee(state, earlier_start_epoch, shard, index, committee_count)
|
||||||
later_committee = get_period_committee(state, shard, later_start_epoch, index, committee_count)
|
later_committee = get_period_committee(state, later_start_epoch, shard, index, committee_count)
|
||||||
|
|
||||||
# Take not-yet-cycled-out validators from earlier committee and already-cycled-in validators from
|
# Take not-yet-cycled-out validators from earlier committee and already-cycled-in validators from
|
||||||
# later committee; return a sorted list of the union of the two, deduplicated
|
# later committee; return a sorted list of the union of the two, deduplicated
|
||||||
@ -185,7 +185,7 @@ def get_persistent_committee(state: BeaconState,
|
|||||||
```python
|
```python
|
||||||
def get_shard_proposer_index(state: BeaconState,
|
def get_shard_proposer_index(state: BeaconState,
|
||||||
shard: Shard,
|
shard: Shard,
|
||||||
slot: Slot) -> ValidatorIndex:
|
slot: Slot) -> Optional[ValidatorIndex]:
|
||||||
# Randomly shift persistent committee
|
# Randomly shift persistent committee
|
||||||
persistent_committee = get_persistent_committee(state, shard, slot)
|
persistent_committee = get_persistent_committee(state, shard, slot)
|
||||||
seed = hash(state.current_shuffling_seed + int_to_bytes(shard, length=8) + int_to_bytes(slot, length=8))
|
seed = hash(state.current_shuffling_seed + int_to_bytes(shard, length=8) + int_to_bytes(slot, length=8))
|
||||||
@ -235,7 +235,7 @@ def verify_shard_attestation_signature(state: BeaconState,
|
|||||||
pubkey=bls_aggregate_pubkeys(pubkeys),
|
pubkey=bls_aggregate_pubkeys(pubkeys),
|
||||||
message_hash=data.shard_block_root,
|
message_hash=data.shard_block_root,
|
||||||
signature=attestation.aggregate_signature,
|
signature=attestation.aggregate_signature,
|
||||||
domain=get_domain(state, slot_to_epoch(data.slot), DOMAIN_SHARD_ATTESTER)
|
domain=get_domain(state, DOMAIN_SHARD_ATTESTER, slot_to_epoch(data.slot))
|
||||||
)
|
)
|
||||||
```
|
```
|
||||||
|
|
||||||
@ -332,7 +332,7 @@ def is_valid_shard_block(beacon_blocks: List[BeaconBlock],
|
|||||||
pubkey=beacon_state.validators[proposer_index].pubkey,
|
pubkey=beacon_state.validators[proposer_index].pubkey,
|
||||||
message_hash=signing_root(block),
|
message_hash=signing_root(block),
|
||||||
signature=candidate.signature,
|
signature=candidate.signature,
|
||||||
domain=get_domain(beacon_state, slot_to_epoch(candidate.slot), DOMAIN_SHARD_PROPOSER),
|
domain=get_domain(beacon_state, DOMAIN_SHARD_PROPOSER, slot_to_epoch(candidate.slot)),
|
||||||
)
|
)
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
@ -167,7 +167,7 @@ If a client wants to update its `finalized_header` it asks the network for a `Bl
|
|||||||
```python
|
```python
|
||||||
{
|
{
|
||||||
'header': BeaconBlockHeader,
|
'header': BeaconBlockHeader,
|
||||||
'shard_aggregate_signature': 'bytes96',
|
'shard_aggregate_signature': BLSSignature,
|
||||||
'shard_bitfield': 'bytes',
|
'shard_bitfield': 'bytes',
|
||||||
'shard_parent_block': ShardBlock,
|
'shard_parent_block': ShardBlock,
|
||||||
}
|
}
|
||||||
|
@ -53,8 +53,13 @@ class uint32(uint):
|
|||||||
return super().__new__(cls, value)
|
return super().__new__(cls, value)
|
||||||
|
|
||||||
|
|
||||||
# We simply default to uint64. But do give it a name, for readability
|
class uint64(uint):
|
||||||
uint64 = NewType('uint64', int)
|
byte_len = 8
|
||||||
|
|
||||||
|
def __new__(cls, value, *args, **kwargs):
|
||||||
|
if value.bit_length() > 64:
|
||||||
|
raise ValueError("value out of bounds for uint64")
|
||||||
|
return super().__new__(cls, value)
|
||||||
|
|
||||||
|
|
||||||
class uint128(uint):
|
class uint128(uint):
|
||||||
@ -256,7 +261,7 @@ class Vector(metaclass=VectorMeta):
|
|||||||
# cannot check non-type objects, or parametrized types
|
# cannot check non-type objects, or parametrized types
|
||||||
if isinstance(cls.elem_type, type) and not hasattr(cls.elem_type, '__args__'):
|
if isinstance(cls.elem_type, type) and not hasattr(cls.elem_type, '__args__'):
|
||||||
for i, item in enumerate(self.items):
|
for i, item in enumerate(self.items):
|
||||||
if not issubclass(type(item), cls.elem_type):
|
if not issubclass(cls.elem_type, type(item)):
|
||||||
raise TypeError("Typed vector cannot hold differently typed value"
|
raise TypeError("Typed vector cannot hold differently typed value"
|
||||||
" at index %d. Got type: %s, expected type: %s" % (i, type(item), cls.elem_type))
|
" at index %d. Got type: %s, expected type: %s" % (i, type(item), cls.elem_type))
|
||||||
|
|
||||||
@ -399,11 +404,27 @@ class BytesN(bytes, metaclass=BytesNMeta):
|
|||||||
return hash_tree_root(self, self.__class__)
|
return hash_tree_root(self, self.__class__)
|
||||||
|
|
||||||
|
|
||||||
|
class Bytes4(BytesN):
|
||||||
|
length = 4
|
||||||
|
|
||||||
|
|
||||||
|
class Bytes32(BytesN):
|
||||||
|
length = 32
|
||||||
|
|
||||||
|
|
||||||
|
class Bytes48(BytesN):
|
||||||
|
length = 48
|
||||||
|
|
||||||
|
|
||||||
|
class Bytes96(BytesN):
|
||||||
|
length = 96
|
||||||
|
|
||||||
|
|
||||||
# SSZ Defaults
|
# SSZ Defaults
|
||||||
# -----------------------------
|
# -----------------------------
|
||||||
def get_zero_value(typ):
|
def get_zero_value(typ):
|
||||||
if is_uint_type(typ):
|
if is_uint_type(typ):
|
||||||
return 0
|
return uint64(0)
|
||||||
elif is_list_type(typ):
|
elif is_list_type(typ):
|
||||||
return []
|
return []
|
||||||
elif is_bool_type(typ):
|
elif is_bool_type(typ):
|
||||||
|
@ -2,4 +2,5 @@
|
|||||||
pytest>=3.6,<3.7
|
pytest>=3.6,<3.7
|
||||||
../config_helpers
|
../config_helpers
|
||||||
flake8==3.7.7
|
flake8==3.7.7
|
||||||
|
mypy==0.701
|
||||||
pytest-cov
|
pytest-cov
|
||||||
|
Loading…
x
Reference in New Issue
Block a user