mirror of
https://github.com/status-im/eth2.0-specs.git
synced 2025-02-20 14:28:22 +00:00
Implements new SSZ types
This commit is contained in:
commit
d761b6f041
@ -60,13 +60,13 @@ jobs:
|
||||
- restore_cache:
|
||||
key: v1-specs-repo-{{ .Branch }}-{{ .Revision }}
|
||||
- restore_cached_venv:
|
||||
venv_name: v1-pyspec-03
|
||||
venv_name: v2-pyspec
|
||||
reqs_checksum: '{{ checksum "test_libs/pyspec/requirements.txt" }}-{{ checksum "test_libs/pyspec/requirements-testing.txt" }}'
|
||||
- run:
|
||||
name: Install pyspec requirements
|
||||
command: make install_test && make install_lint
|
||||
command: make install_test
|
||||
- save_cached_venv:
|
||||
venv_name: v1-pyspec-03
|
||||
venv_name: v2-pyspec
|
||||
reqs_checksum: '{{ checksum "test_libs/pyspec/requirements.txt" }}-{{ checksum "test_libs/pyspec/requirements-testing.txt" }}'
|
||||
venv_path: ./test_libs/pyspec/venv
|
||||
test:
|
||||
@ -77,7 +77,7 @@ jobs:
|
||||
- restore_cache:
|
||||
key: v1-specs-repo-{{ .Branch }}-{{ .Revision }}
|
||||
- restore_cached_venv:
|
||||
venv_name: v1-pyspec-03
|
||||
venv_name: v2-pyspec
|
||||
reqs_checksum: '{{ checksum "test_libs/pyspec/requirements.txt" }}-{{ checksum "test_libs/pyspec/requirements-testing.txt" }}'
|
||||
- run:
|
||||
name: Run py-tests
|
||||
@ -92,7 +92,7 @@ jobs:
|
||||
- restore_cache:
|
||||
key: v1-specs-repo-{{ .Branch }}-{{ .Revision }}
|
||||
- restore_cached_venv:
|
||||
venv_name: v1-pyspec-03
|
||||
venv_name: v2-pyspec
|
||||
reqs_checksum: '{{ checksum "test_libs/pyspec/requirements.txt" }}-{{ checksum "test_libs/pyspec/requirements-testing.txt" }}'
|
||||
- run:
|
||||
name: Run linter
|
||||
|
5
Makefile
5
Makefile
@ -45,12 +45,9 @@ citest: $(PY_SPEC_ALL_TARGETS)
|
||||
cd $(PY_SPEC_DIR); mkdir -p test-reports/eth2spec; . venv/bin/activate; \
|
||||
python -m pytest --junitxml=test-reports/eth2spec/test_results_phase0.xml eth2spec
|
||||
|
||||
install_lint:
|
||||
cd $(PY_SPEC_DIR); python3 -m venv venv; . venv/bin/activate; pip3 install flake8==3.5.0
|
||||
|
||||
lint: $(PY_SPEC_ALL_TARGETS)
|
||||
cd $(PY_SPEC_DIR); . venv/bin/activate; \
|
||||
flake8 --max-line-length=120 ./eth2spec
|
||||
flake8 --ignore=E252,W504,W503 --max-line-length=120 ./eth2spec;
|
||||
|
||||
# "make pyspec" to create the pyspec for all phases.
|
||||
pyspec: $(PY_SPEC_ALL_TARGETS)
|
||||
|
@ -18,17 +18,20 @@ PHASE0_IMPORTS = '''from typing import (
|
||||
Tuple,
|
||||
)
|
||||
|
||||
from eth2spec.utils.minimal_ssz import (
|
||||
SSZType,
|
||||
from eth2spec.utils.ssz.ssz_impl import (
|
||||
hash_tree_root,
|
||||
signing_root,
|
||||
)
|
||||
|
||||
from eth2spec.utils.ssz.ssz_typing import (
|
||||
# unused: uint8, uint16, uint32, uint128, uint256,
|
||||
uint64, Container, Vector, BytesN
|
||||
)
|
||||
from eth2spec.utils.bls import (
|
||||
bls_aggregate_pubkeys,
|
||||
bls_verify,
|
||||
bls_verify_multiple,
|
||||
)
|
||||
# Note: 'int' type defaults to being interpreted as a uint64 by SSZ implementation.
|
||||
|
||||
from eth2spec.utils.hash_function import hash
|
||||
'''
|
||||
@ -40,15 +43,16 @@ PHASE1_IMPORTS = '''from typing import (
|
||||
Tuple,
|
||||
)
|
||||
|
||||
from eth2spec.utils.minimal_ssz import (
|
||||
SSZType,
|
||||
from eth2spec.utils.ssz.ssz_impl import (
|
||||
hash_tree_root,
|
||||
signing_root,
|
||||
type_of,
|
||||
empty,
|
||||
serialize,
|
||||
is_empty,
|
||||
)
|
||||
from eth2spec.utils.ssz.ssz_typing import (
|
||||
# unused: uint8, uint16, uint32, uint128, uint256,
|
||||
uint64, Container, Vector, BytesN
|
||||
)
|
||||
|
||||
from eth2spec.utils.bls import (
|
||||
bls_aggregate_pubkeys,
|
||||
bls_verify,
|
||||
@ -63,13 +67,13 @@ NEW_TYPES = {
|
||||
'Shard': 'int',
|
||||
'ValidatorIndex': 'int',
|
||||
'Gwei': 'int',
|
||||
'Bytes32': 'bytes',
|
||||
'BLSPubkey': 'bytes',
|
||||
'BLSSignature': 'bytes',
|
||||
'Store': 'None',
|
||||
'Hash': 'bytes'
|
||||
}
|
||||
BYTE_TYPES = [4, 32, 48, 96]
|
||||
SUNDRY_FUNCTIONS = '''
|
||||
def get_ssz_type_by_name(name: str) -> Container:
|
||||
return globals()[name]
|
||||
|
||||
|
||||
# Monkey patch validator compute committee code
|
||||
_compute_committee = compute_committee
|
||||
committee_cache = {}
|
||||
@ -124,26 +128,22 @@ def objects_to_spec(functions: Dict[str, str],
|
||||
"""
|
||||
new_type_definitions = \
|
||||
'\n'.join(['''%s = NewType('%s', %s)''' % (key, key, value) for key, value in NEW_TYPES.items()])
|
||||
new_type_definitions += '\n' + '\n'.join(['Bytes%s = BytesN[%s]' % (n, n) for n in BYTE_TYPES])
|
||||
functions_spec = '\n\n'.join(functions.values())
|
||||
constants_spec = '\n'.join(map(lambda x: '%s = %s' % (x, constants[x]), constants))
|
||||
ssz_objects_instantiation_spec = '\n'.join(map(
|
||||
lambda x: '%s = SSZType(%s)' % (x, ssz_objects[x][:-1]),
|
||||
ssz_objects
|
||||
))
|
||||
ssz_objects_reinitialization_spec = '\n'.join(map(
|
||||
lambda x: ' global_vars[\'%s\'] = SSZType(%s })' % (x, re.sub('( ){4}', ' '*8, ssz_objects[x][:-2])),
|
||||
ssz_objects
|
||||
))
|
||||
ssz_objects_instantiation_spec = '\n\n'.join(ssz_objects.values())
|
||||
ssz_objects_reinitialization_spec = (
|
||||
'def init_SSZ_types():\n global_vars = globals()\n'
|
||||
+ ssz_objects_reinitialization_spec
|
||||
'def init_SSZ_types():\n global_vars = globals()\n\n '
|
||||
+ '\n\n '.join([re.sub(r'(?!\n\n)\n', r'\n ', value[:-1]) for value in ssz_objects.values()])
|
||||
+ '\n\n'
|
||||
+ '\n'.join(map(lambda x: ' global_vars[\'%s\'] = %s' % (x, x), ssz_objects.keys()))
|
||||
)
|
||||
spec = (
|
||||
imports
|
||||
+ '\n' + new_type_definitions
|
||||
+ '\n\n' + constants_spec
|
||||
+ '\n\n' + ssz_objects_instantiation_spec
|
||||
+ '\n\n\n' + functions_spec
|
||||
+ '\n\n\n' + ssz_objects_instantiation_spec
|
||||
+ '\n\n' + functions_spec
|
||||
+ '\n' + SUNDRY_FUNCTIONS
|
||||
+ '\n\n' + ssz_objects_reinitialization_spec
|
||||
+ '\n'
|
||||
@ -172,10 +172,10 @@ def dependency_order_ssz_objects(objects: Dict[str, str]) -> None:
|
||||
"""
|
||||
items = list(objects.items())
|
||||
for key, value in items:
|
||||
dependencies = re.findall(r'(: [\[]*[A-Z][a-z][\w]+)', value)
|
||||
dependencies = map(lambda x: re.sub(r'\W', '', x), dependencies)
|
||||
dependencies = re.findall(r'(: [A-Z][\w[]*)', value)
|
||||
dependencies = map(lambda x: re.sub(r'\W|Vector|List|Container|uint\d+|Bytes\d+|bytes', '', x), dependencies)
|
||||
for dep in dependencies:
|
||||
if dep in NEW_TYPES:
|
||||
if dep in NEW_TYPES or len(dep) == 0:
|
||||
continue
|
||||
key_list = list(objects.keys())
|
||||
for item in [dep, key] + key_list[key_list.index(dep)+1:]:
|
||||
@ -188,11 +188,12 @@ def combine_ssz_objects(old_objects: Dict[str, str], new_objects: Dict[str, str]
|
||||
and returns the newer versions of the objects in dependency order.
|
||||
"""
|
||||
for key, value in new_objects.items():
|
||||
# remove leading "{" and trailing "\n}"
|
||||
old_objects[key] = old_objects.get(key, '')[1:-3]
|
||||
# remove leading "{"
|
||||
value = value[1:]
|
||||
old_objects[key] = '{' + old_objects.get(key, '') + value
|
||||
if key in old_objects:
|
||||
# remove trailing newline
|
||||
old_objects[key] = old_objects[key]
|
||||
# remove leading variable name
|
||||
value = re.sub(r'^class [\w]*\(Container\):\n', '', value)
|
||||
old_objects[key] = old_objects.get(key, '') + value
|
||||
dependency_order_ssz_objects(old_objects)
|
||||
return old_objects
|
||||
|
||||
|
@ -50,12 +50,19 @@ def get_spec(file_name: str) -> SpecObject:
|
||||
else:
|
||||
# Handle function definitions & ssz_objects
|
||||
if pulling_from is not None:
|
||||
func_match = function_matcher.match(line)
|
||||
if func_match is not None:
|
||||
current_name = func_match.group(0)
|
||||
if function_matcher.match(current_name) is None: # The current line is an SSZ Object
|
||||
# SSZ Object
|
||||
if len(line) > 18 and line[:6] == 'class ' and line[-12:] == '(Container):':
|
||||
name = line[6:-12]
|
||||
# Check consistency with markdown header
|
||||
assert name == current_name
|
||||
is_ssz = True
|
||||
# function definition
|
||||
elif function_matcher.match(line) is not None:
|
||||
current_name = function_matcher.match(line).group(0)
|
||||
is_ssz = False
|
||||
if is_ssz:
|
||||
ssz_objects[current_name] = ssz_objects.get(current_name, '') + line + '\n'
|
||||
else: # The current line is code
|
||||
else:
|
||||
functions[current_name] = functions.get(current_name, '') + line + '\n'
|
||||
# Handle constant table entries
|
||||
elif pulling_from is None and len(line) > 0 and line[0] == '|':
|
||||
|
@ -266,162 +266,151 @@ The types are defined topologically to aid in facilitating an executable version
|
||||
#### `Fork`
|
||||
|
||||
```python
|
||||
{
|
||||
class Fork(Container):
|
||||
# Previous fork version
|
||||
'previous_version': 'bytes4',
|
||||
previous_version: Bytes4
|
||||
# Current fork version
|
||||
'current_version': 'bytes4',
|
||||
current_version: Bytes4
|
||||
# Fork epoch number
|
||||
'epoch': 'uint64',
|
||||
}
|
||||
epoch: uint64
|
||||
```
|
||||
|
||||
#### `Crosslink`
|
||||
|
||||
```python
|
||||
{
|
||||
class Crosslink(Container):
|
||||
# Shard number
|
||||
'shard': 'uint64',
|
||||
shard: uint64
|
||||
# Crosslinking data from epochs [start....end-1]
|
||||
'start_epoch': 'uint64',
|
||||
'end_epoch': 'uint64',
|
||||
start_epoch: uint64
|
||||
end_epoch: uint64
|
||||
# Root of the previous crosslink
|
||||
'parent_root': 'bytes32',
|
||||
parent_root: Bytes32
|
||||
# Root of the crosslinked shard data since the previous crosslink
|
||||
'data_root': 'bytes32',
|
||||
}
|
||||
data_root: Bytes32
|
||||
```
|
||||
|
||||
#### `Eth1Data`
|
||||
|
||||
```python
|
||||
{
|
||||
class Eth1Data(Container):
|
||||
# Root of the deposit tree
|
||||
'deposit_root': 'bytes32',
|
||||
deposit_root: Bytes32
|
||||
# Total number of deposits
|
||||
'deposit_count': 'uint64',
|
||||
deposit_count: uint64
|
||||
# Block hash
|
||||
'block_hash': 'bytes32',
|
||||
}
|
||||
block_hash: Bytes32
|
||||
```
|
||||
|
||||
#### `AttestationData`
|
||||
|
||||
```python
|
||||
{
|
||||
class AttestationData(Container):
|
||||
# LMD GHOST vote
|
||||
'beacon_block_root': 'bytes32',
|
||||
beacon_block_root: Bytes32
|
||||
|
||||
# FFG vote
|
||||
'source_epoch': 'uint64',
|
||||
'source_root': 'bytes32',
|
||||
'target_epoch': 'uint64',
|
||||
'target_root': 'bytes32',
|
||||
source_epoch: uint64
|
||||
source_root: Bytes32
|
||||
target_epoch: uint64
|
||||
target_root: Bytes32
|
||||
|
||||
# Crosslink vote
|
||||
'crosslink': Crosslink,
|
||||
}
|
||||
crosslink: Crosslink
|
||||
```
|
||||
|
||||
#### `AttestationDataAndCustodyBit`
|
||||
|
||||
```python
|
||||
{
|
||||
class AttestationDataAndCustodyBit(Container):
|
||||
# Attestation data
|
||||
'data': AttestationData,
|
||||
data: AttestationData
|
||||
# Custody bit
|
||||
'custody_bit': 'bool',
|
||||
}
|
||||
custody_bit: bool
|
||||
```
|
||||
|
||||
#### `IndexedAttestation`
|
||||
|
||||
```python
|
||||
{
|
||||
class IndexedAttestation(Container):
|
||||
# Validator indices
|
||||
'custody_bit_0_indices': ['uint64'],
|
||||
'custody_bit_1_indices': ['uint64'],
|
||||
custody_bit_0_indices: List[uint64]
|
||||
custody_bit_1_indices: List[uint64]
|
||||
# Attestation data
|
||||
'data': AttestationData,
|
||||
data: AttestationData
|
||||
# Aggregate signature
|
||||
'signature': 'bytes96',
|
||||
}
|
||||
signature: Bytes96
|
||||
```
|
||||
|
||||
#### `DepositData`
|
||||
|
||||
```python
|
||||
{
|
||||
class DepositData(Container):
|
||||
# BLS pubkey
|
||||
'pubkey': 'bytes48',
|
||||
pubkey: Bytes48
|
||||
# Withdrawal credentials
|
||||
'withdrawal_credentials': 'bytes32',
|
||||
withdrawal_credentials: Bytes32
|
||||
# Amount in Gwei
|
||||
'amount': 'uint64',
|
||||
amount: uint64
|
||||
# Container self-signature
|
||||
'signature': 'bytes96',
|
||||
}
|
||||
signature: Bytes96
|
||||
```
|
||||
|
||||
#### `BeaconBlockHeader`
|
||||
|
||||
```python
|
||||
{
|
||||
'slot': 'uint64',
|
||||
'parent_root': 'bytes32',
|
||||
'state_root': 'bytes32',
|
||||
'body_root': 'bytes32',
|
||||
'signature': 'bytes96',
|
||||
}
|
||||
class BeaconBlockHeader(Container):
|
||||
slot: uint64
|
||||
parent_root: Bytes32
|
||||
state_root: Bytes32
|
||||
body_root: Bytes32
|
||||
signature: Bytes96
|
||||
```
|
||||
|
||||
#### `Validator`
|
||||
|
||||
```python
|
||||
{
|
||||
class Validator(Container):
|
||||
# BLS public key
|
||||
'pubkey': 'bytes48',
|
||||
pubkey: Bytes48
|
||||
# Withdrawal credentials
|
||||
'withdrawal_credentials': 'bytes32',
|
||||
withdrawal_credentials: Bytes32
|
||||
# Epoch when became eligible for activation
|
||||
'activation_eligibility_epoch': 'uint64',
|
||||
activation_eligibility_epoch: uint64
|
||||
# Epoch when validator activated
|
||||
'activation_epoch': 'uint64',
|
||||
activation_epoch: uint64
|
||||
# Epoch when validator exited
|
||||
'exit_epoch': 'uint64',
|
||||
exit_epoch: uint64
|
||||
# Epoch when validator is eligible to withdraw
|
||||
'withdrawable_epoch': 'uint64',
|
||||
withdrawable_epoch: uint64
|
||||
# Was the validator slashed
|
||||
'slashed': 'bool',
|
||||
slashed: bool
|
||||
# Effective balance
|
||||
'effective_balance': 'uint64',
|
||||
}
|
||||
effective_balance: uint64
|
||||
```
|
||||
|
||||
#### `PendingAttestation`
|
||||
|
||||
```python
|
||||
{
|
||||
class PendingAttestation(Container):
|
||||
# Attester aggregation bitfield
|
||||
'aggregation_bitfield': 'bytes',
|
||||
aggregation_bitfield: bytes
|
||||
# Attestation data
|
||||
'data': AttestationData,
|
||||
data: AttestationData
|
||||
# Inclusion delay
|
||||
'inclusion_delay': 'uint64',
|
||||
inclusion_delay: uint64
|
||||
# Proposer index
|
||||
'proposer_index': 'uint64',
|
||||
}
|
||||
proposer_index: uint64
|
||||
```
|
||||
|
||||
#### `HistoricalBatch`
|
||||
|
||||
```python
|
||||
{
|
||||
class HistoricalBatch(Container):
|
||||
# Block roots
|
||||
'block_roots': ['bytes32', SLOTS_PER_HISTORICAL_ROOT],
|
||||
block_roots: Vector[Bytes32, SLOTS_PER_HISTORICAL_ROOT]
|
||||
# State roots
|
||||
'state_roots': ['bytes32', SLOTS_PER_HISTORICAL_ROOT],
|
||||
}
|
||||
state_roots: Vector[Bytes32, SLOTS_PER_HISTORICAL_ROOT]
|
||||
```
|
||||
|
||||
### Beacon operations
|
||||
@ -429,85 +418,79 @@ The types are defined topologically to aid in facilitating an executable version
|
||||
#### `ProposerSlashing`
|
||||
|
||||
```python
|
||||
{
|
||||
class ProposerSlashing(Container):
|
||||
# Proposer index
|
||||
'proposer_index': 'uint64',
|
||||
proposer_index: uint64
|
||||
# First block header
|
||||
'header_1': BeaconBlockHeader,
|
||||
header_1: BeaconBlockHeader
|
||||
# Second block header
|
||||
'header_2': BeaconBlockHeader,
|
||||
}
|
||||
header_2: BeaconBlockHeader
|
||||
```
|
||||
|
||||
#### `AttesterSlashing`
|
||||
|
||||
```python
|
||||
{
|
||||
class AttesterSlashing(Container):
|
||||
# First attestation
|
||||
'attestation_1': IndexedAttestation,
|
||||
attestation_1: IndexedAttestation
|
||||
# Second attestation
|
||||
'attestation_2': IndexedAttestation,
|
||||
}
|
||||
attestation_2: IndexedAttestation
|
||||
```
|
||||
|
||||
#### `Attestation`
|
||||
|
||||
```python
|
||||
{
|
||||
class Attestation(Container):
|
||||
# Attester aggregation bitfield
|
||||
'aggregation_bitfield': 'bytes',
|
||||
aggregation_bitfield: bytes
|
||||
# Attestation data
|
||||
'data': AttestationData,
|
||||
data: AttestationData
|
||||
# Custody bitfield
|
||||
'custody_bitfield': 'bytes',
|
||||
custody_bitfield: bytes
|
||||
# BLS aggregate signature
|
||||
'signature': 'bytes96',
|
||||
}
|
||||
signature: Bytes96
|
||||
```
|
||||
|
||||
#### `Deposit`
|
||||
|
||||
```python
|
||||
{
|
||||
class Deposit(Container):
|
||||
# Branch in the deposit tree
|
||||
'proof': ['bytes32', DEPOSIT_CONTRACT_TREE_DEPTH],
|
||||
proof: Vector[Bytes32, DEPOSIT_CONTRACT_TREE_DEPTH]
|
||||
# Data
|
||||
'data': DepositData,
|
||||
}
|
||||
data: DepositData
|
||||
```
|
||||
|
||||
#### `VoluntaryExit`
|
||||
|
||||
```python
|
||||
{
|
||||
class VoluntaryExit(Container):
|
||||
# Minimum epoch for processing exit
|
||||
'epoch': 'uint64',
|
||||
epoch: uint64
|
||||
# Index of the exiting validator
|
||||
'validator_index': 'uint64',
|
||||
validator_index: uint64
|
||||
# Validator signature
|
||||
'signature': 'bytes96',
|
||||
}
|
||||
signature: Bytes96
|
||||
```
|
||||
|
||||
#### `Transfer`
|
||||
|
||||
```python
|
||||
{
|
||||
class Transfer(Container):
|
||||
# Sender index
|
||||
'sender': 'uint64',
|
||||
sender: uint64
|
||||
# Recipient index
|
||||
'recipient': 'uint64',
|
||||
recipient: uint64
|
||||
# Amount in Gwei
|
||||
'amount': 'uint64',
|
||||
amount: uint64
|
||||
# Fee in Gwei for block proposer
|
||||
'fee': 'uint64',
|
||||
fee: uint64
|
||||
# Inclusion slot
|
||||
'slot': 'uint64',
|
||||
slot: uint64
|
||||
# Sender withdrawal pubkey
|
||||
'pubkey': 'bytes48',
|
||||
pubkey: Bytes48
|
||||
# Sender signature
|
||||
'signature': 'bytes96',
|
||||
}
|
||||
signature: Bytes96
|
||||
```
|
||||
|
||||
### Beacon blocks
|
||||
@ -515,30 +498,28 @@ The types are defined topologically to aid in facilitating an executable version
|
||||
#### `BeaconBlockBody`
|
||||
|
||||
```python
|
||||
{
|
||||
'randao_reveal': 'bytes96',
|
||||
'eth1_data': Eth1Data,
|
||||
'graffiti': 'bytes32',
|
||||
'proposer_slashings': [ProposerSlashing],
|
||||
'attester_slashings': [AttesterSlashing],
|
||||
'attestations': [Attestation],
|
||||
'deposits': [Deposit],
|
||||
'voluntary_exits': [VoluntaryExit],
|
||||
'transfers': [Transfer],
|
||||
}
|
||||
class BeaconBlockBody(Container):
|
||||
randao_reveal: Bytes96
|
||||
eth1_data: Eth1Data
|
||||
graffiti: Bytes32
|
||||
proposer_slashings: List[ProposerSlashing]
|
||||
attester_slashings: List[AttesterSlashing]
|
||||
attestations: List[Attestation]
|
||||
deposits: List[Deposit]
|
||||
voluntary_exits: List[VoluntaryExit]
|
||||
transfers: List[Transfer]
|
||||
```
|
||||
|
||||
#### `BeaconBlock`
|
||||
|
||||
```python
|
||||
{
|
||||
class BeaconBlock(Container):
|
||||
# Header
|
||||
'slot': 'uint64',
|
||||
'parent_root': 'bytes32',
|
||||
'state_root': 'bytes32',
|
||||
'body': BeaconBlockBody,
|
||||
'signature': 'bytes96',
|
||||
}
|
||||
slot: uint64
|
||||
parent_root: Bytes32
|
||||
state_root: Bytes32
|
||||
body: BeaconBlockBody
|
||||
signature: Bytes96
|
||||
```
|
||||
|
||||
### Beacon state
|
||||
@ -546,46 +527,40 @@ The types are defined topologically to aid in facilitating an executable version
|
||||
#### `BeaconState`
|
||||
|
||||
```python
|
||||
{
|
||||
class BeaconState(Container):
|
||||
# Misc
|
||||
'slot': 'uint64',
|
||||
'genesis_time': 'uint64',
|
||||
'fork': Fork, # For versioning hard forks
|
||||
|
||||
slot: uint64
|
||||
genesis_time: uint64
|
||||
fork: Fork # For versioning hard forks
|
||||
# Validator registry
|
||||
'validator_registry': [Validator],
|
||||
'balances': ['uint64'],
|
||||
|
||||
validator_registry: List[Validator]
|
||||
balances: List[uint64]
|
||||
# Randomness and committees
|
||||
'latest_randao_mixes': ['bytes32', LATEST_RANDAO_MIXES_LENGTH],
|
||||
'latest_start_shard': 'uint64',
|
||||
|
||||
latest_randao_mixes: Vector[Bytes32, LATEST_RANDAO_MIXES_LENGTH]
|
||||
latest_start_shard: uint64
|
||||
# Finality
|
||||
'previous_epoch_attestations': [PendingAttestation],
|
||||
'current_epoch_attestations': [PendingAttestation],
|
||||
'previous_justified_epoch': 'uint64',
|
||||
'current_justified_epoch': 'uint64',
|
||||
'previous_justified_root': 'bytes32',
|
||||
'current_justified_root': 'bytes32',
|
||||
'justification_bitfield': 'uint64',
|
||||
'finalized_epoch': 'uint64',
|
||||
'finalized_root': 'bytes32',
|
||||
|
||||
previous_epoch_attestations: List[PendingAttestation]
|
||||
current_epoch_attestations: List[PendingAttestation]
|
||||
previous_justified_epoch: uint64
|
||||
current_justified_epoch: uint64
|
||||
previous_justified_root: Bytes32
|
||||
current_justified_root: Bytes32
|
||||
justification_bitfield: uint64
|
||||
finalized_epoch: uint64
|
||||
finalized_root: Bytes32
|
||||
# Recent state
|
||||
'current_crosslinks': [Crosslink, SHARD_COUNT],
|
||||
'previous_crosslinks': [Crosslink, SHARD_COUNT],
|
||||
'latest_block_roots': ['bytes32', SLOTS_PER_HISTORICAL_ROOT],
|
||||
'latest_state_roots': ['bytes32', SLOTS_PER_HISTORICAL_ROOT],
|
||||
'latest_active_index_roots': ['bytes32', LATEST_ACTIVE_INDEX_ROOTS_LENGTH],
|
||||
'latest_slashed_balances': ['uint64', LATEST_SLASHED_EXIT_LENGTH],
|
||||
'latest_block_header': BeaconBlockHeader,
|
||||
'historical_roots': ['bytes32'],
|
||||
|
||||
current_crosslinks: Vector[Crosslink, SHARD_COUNT]
|
||||
previous_crosslinks: Vector[Crosslink, SHARD_COUNT]
|
||||
latest_block_roots: Vector[Bytes32, SLOTS_PER_HISTORICAL_ROOT]
|
||||
latest_state_roots: Vector[Bytes32, SLOTS_PER_HISTORICAL_ROOT]
|
||||
latest_active_index_roots: Vector[Bytes32, LATEST_ACTIVE_INDEX_ROOTS_LENGTH]
|
||||
latest_slashed_balances: Vector[uint64, LATEST_SLASHED_EXIT_LENGTH]
|
||||
latest_block_header: BeaconBlockHeader
|
||||
historical_roots: List[Bytes32]
|
||||
# Ethereum 1.0 chain data
|
||||
'latest_eth1_data': Eth1Data,
|
||||
'eth1_data_votes': [Eth1Data],
|
||||
'deposit_index': 'uint64',
|
||||
}
|
||||
latest_eth1_data: Eth1Data
|
||||
eth1_data_votes: List[Eth1Data]
|
||||
deposit_index: uint64
|
||||
```
|
||||
|
||||
## Custom types
|
||||
@ -599,9 +574,8 @@ We define the following Python custom types for type hinting and readability:
|
||||
| `Shard` | `uint64` | a shard number |
|
||||
| `ValidatorIndex` | `uint64` | a validator registry index |
|
||||
| `Gwei` | `uint64` | an amount in Gwei |
|
||||
| `Bytes32` | `bytes32` | 32 bytes of binary data |
|
||||
| `BLSPubkey` | `bytes48` | a BLS12-381 public key |
|
||||
| `BLSSignature` | `bytes96` | a BLS12-381 signature |
|
||||
| `BLSPubkey` | `Bytes48` | a BLS12-381 public key |
|
||||
| `BLSSignature` | `Bytes96` | a BLS12-381 signature |
|
||||
|
||||
## Helper functions
|
||||
|
||||
@ -611,7 +585,7 @@ We define the following Python custom types for type hinting and readability:
|
||||
|
||||
```python
|
||||
def xor(bytes1: Bytes32, bytes2: Bytes32) -> Bytes32:
|
||||
return bytes(a ^ b for a, b in zip(bytes1, bytes2))
|
||||
return Bytes32(a ^ b for a, b in zip(bytes1, bytes2))
|
||||
```
|
||||
|
||||
### `hash`
|
||||
@ -626,7 +600,7 @@ The `hash` function is SHA256.
|
||||
|
||||
### `signing_root`
|
||||
|
||||
`def signing_root(object: SSZContainer) -> Bytes32` is a function defined in the [SimpleSerialize spec](../simple-serialize.md#self-signed-containers) to compute signing messages.
|
||||
`def signing_root(object: Container) -> Bytes32` is a function defined in the [SimpleSerialize spec](../simple-serialize.md#self-signed-containers) to compute signing messages.
|
||||
|
||||
### `bls_domain`
|
||||
|
||||
|
@ -31,8 +31,6 @@
|
||||
- [`BeaconState`](#beaconstate)
|
||||
- [`BeaconBlockBody`](#beaconblockbody)
|
||||
- [Helpers](#helpers)
|
||||
- [`type_of`](#type_of)
|
||||
- [`empty`](#empty)
|
||||
- [`ceillog2`](#ceillog2)
|
||||
- [`get_crosslink_chunk_count`](#get_crosslink_chunk_count)
|
||||
- [`get_custody_chunk_bit`](#get_custody_chunk_bit)
|
||||
@ -122,66 +120,61 @@ This document details the beacon chain additions and changes in Phase 1 of Ether
|
||||
#### `CustodyChunkChallenge`
|
||||
|
||||
```python
|
||||
{
|
||||
'responder_index': ValidatorIndex,
|
||||
'attestation': Attestation,
|
||||
'chunk_index': 'uint64',
|
||||
}
|
||||
class CustodyChunkChallenge(Container):
|
||||
responder_index: ValidatorIndex
|
||||
attestation: Attestation
|
||||
chunk_index: uint64
|
||||
```
|
||||
|
||||
#### `CustodyBitChallenge`
|
||||
|
||||
```python
|
||||
{
|
||||
'responder_index': ValidatorIndex,
|
||||
'attestation': Attestation,
|
||||
'challenger_index': ValidatorIndex,
|
||||
'responder_key': BLSSignature,
|
||||
'chunk_bits': 'bytes',
|
||||
'signature': BLSSignature,
|
||||
}
|
||||
class CustodyBitChallenge(Container):
|
||||
responder_index: ValidatorIndex
|
||||
attestation: Attestation
|
||||
challenger_index: ValidatorIndex
|
||||
responder_key: Bytes96
|
||||
chunk_bits: bytes
|
||||
signature: Bytes96
|
||||
```
|
||||
|
||||
#### `CustodyChunkChallengeRecord`
|
||||
|
||||
```python
|
||||
{
|
||||
'challenge_index': 'uint64',
|
||||
'challenger_index': ValidatorIndex,
|
||||
'responder_index': ValidatorIndex,
|
||||
'inclusion_epoch': Epoch,
|
||||
'data_root': Hash,
|
||||
'depth': 'uint64',
|
||||
'chunk_index': 'uint64',
|
||||
}
|
||||
class CustodyChunkChallengeRecord(Container):
|
||||
challenge_index: uint64
|
||||
challenger_index: ValidatorIndex
|
||||
responder_index: ValidatorIndex
|
||||
inclusion_epoch: Epoch
|
||||
data_root: Bytes32
|
||||
depth: uint64
|
||||
chunk_index: uint64
|
||||
```
|
||||
|
||||
#### `CustodyBitChallengeRecord`
|
||||
|
||||
```python
|
||||
{
|
||||
'challenge_index': 'uint64',
|
||||
'challenger_index': ValidatorIndex,
|
||||
'responder_index': ValidatorIndex,
|
||||
'inclusion_epoch': Epoch,
|
||||
'data_root': Hash,
|
||||
'chunk_count': 'uint64',
|
||||
'chunk_bits_merkle_root': 'bytes32',
|
||||
'responder_key': BLSSignature,
|
||||
}
|
||||
class CustodyBitChallengeRecord(Container):
|
||||
challenge_index: uint64
|
||||
challenger_index: ValidatorIndex
|
||||
responder_index: ValidatorIndex
|
||||
inclusion_epoch: Epoch
|
||||
data_root: Bytes32
|
||||
chunk_count: uint64
|
||||
chunk_bits_merkle_root: Bytes32
|
||||
responder_key: Bytes96
|
||||
```
|
||||
|
||||
#### `CustodyResponse`
|
||||
|
||||
```python
|
||||
{
|
||||
'challenge_index': 'uint64',
|
||||
'chunk_index': 'uint64',
|
||||
'chunk': ['byte', BYTES_PER_CUSTODY_CHUNK],
|
||||
'data_branch': ['bytes32'],
|
||||
'chunk_bits_branch': ['bytes32'],
|
||||
'chunk_bits_leaf': 'bytes32',
|
||||
}
|
||||
class CustodyResponse(Container):
|
||||
challenge_index: uint64
|
||||
chunk_index: uint64
|
||||
chunk: Vector[bytes, BYTES_PER_CUSTODY_CHUNK]
|
||||
data_branch: List[Bytes32]
|
||||
chunk_bits_branch: List[Bytes32]
|
||||
chunk_bits_leaf: Bytes32
|
||||
```
|
||||
|
||||
### New beacon operations
|
||||
@ -189,12 +182,11 @@ This document details the beacon chain additions and changes in Phase 1 of Ether
|
||||
#### `CustodyKeyReveal`
|
||||
|
||||
```python
|
||||
{
|
||||
class CustodyKeyReveal(Container):
|
||||
# Index of the validator whose key is being revealed
|
||||
'revealer_index': 'uint64',
|
||||
revealer_index: uint64
|
||||
# Reveal (masked signature)
|
||||
'reveal': 'bytes96',
|
||||
}
|
||||
reveal: Bytes96
|
||||
```
|
||||
|
||||
#### `EarlyDerivedSecretReveal`
|
||||
@ -202,18 +194,17 @@ This document details the beacon chain additions and changes in Phase 1 of Ether
|
||||
Represents an early (punishable) reveal of one of the derived secrets, where derived secrets are RANDAO reveals and custody reveals (both are part of the same domain).
|
||||
|
||||
```python
|
||||
{
|
||||
class EarlyDerivedSecretReveal(Container):
|
||||
# Index of the validator whose key is being revealed
|
||||
'revealed_index': 'uint64',
|
||||
revealed_index: uint64
|
||||
# RANDAO epoch of the key that is being revealed
|
||||
'epoch': 'uint64',
|
||||
epoch: uint64
|
||||
# Reveal (masked signature)
|
||||
'reveal': 'bytes96',
|
||||
reveal: Bytes96
|
||||
# Index of the validator who revealed (whistleblower)
|
||||
'masker_index': 'uint64',
|
||||
masker_index: uint64
|
||||
# Mask used to hide the actual reveal signature (prevent reveal from being stolen)
|
||||
'mask': 'bytes32',
|
||||
}
|
||||
mask: Bytes32
|
||||
```
|
||||
|
||||
### Phase 0 container updates
|
||||
@ -223,51 +214,40 @@ Add the following fields to the end of the specified container objects. Fields w
|
||||
#### `Validator`
|
||||
|
||||
```python
|
||||
{
|
||||
class Validator(Container):
|
||||
# next_custody_reveal_period is initialised to the custody period
|
||||
# (of the particular validator) in which the validator is activated
|
||||
# = get_validators_custody_reveal_period(...)
|
||||
'next_custody_reveal_period': 'uint64',
|
||||
'max_reveal_lateness': 'uint64',
|
||||
}
|
||||
next_custody_reveal_period: uint64
|
||||
max_reveal_lateness: uint64
|
||||
```
|
||||
|
||||
#### `BeaconState`
|
||||
|
||||
```python
|
||||
{
|
||||
'custody_chunk_challenge_records': [CustodyChunkChallengeRecord],
|
||||
'custody_bit_challenge_records': [CustodyBitChallengeRecord],
|
||||
'custody_challenge_index': 'uint64',
|
||||
class BeaconState(Container):
|
||||
custody_chunk_challenge_records: List[CustodyChunkChallengeRecord]
|
||||
custody_bit_challenge_records: List[CustodyBitChallengeRecord]
|
||||
custody_challenge_index: uint64
|
||||
|
||||
# Future derived secrets already exposed; contains the indices of the exposed validator
|
||||
# at RANDAO reveal period % EARLY_DERIVED_SECRET_PENALTY_MAX_FUTURE_EPOCHS
|
||||
'exposed_derived_secrets': [['uint64'], EARLY_DERIVED_SECRET_PENALTY_MAX_FUTURE_EPOCHS],
|
||||
}
|
||||
exposed_derived_secrets: Vector[List[uint64], EARLY_DERIVED_SECRET_PENALTY_MAX_FUTURE_EPOCHS]
|
||||
```
|
||||
|
||||
#### `BeaconBlockBody`
|
||||
|
||||
```python
|
||||
{
|
||||
'custody_chunk_challenges': [CustodyChunkChallenge],
|
||||
'custody_bit_challenges': [CustodyBitChallenge],
|
||||
'custody_responses': [CustodyResponse],
|
||||
'custody_key_reveals': [CustodyKeyReveal],
|
||||
'early_derived_secret_reveals': [EarlyDerivedSecretReveal],
|
||||
}
|
||||
class BeaconBlockBody(Container):
|
||||
custody_chunk_challenges: List[CustodyChunkChallenge]
|
||||
custody_bit_challenges: List[CustodyBitChallenge]
|
||||
custody_responses: List[CustodyResponse]
|
||||
custody_key_reveals: List[CustodyKeyReveal]
|
||||
early_derived_secret_reveals: List[EarlyDerivedSecretReveal]
|
||||
```
|
||||
|
||||
## Helpers
|
||||
|
||||
### `type_of`
|
||||
|
||||
The `type_of` function accepts an SSZ object as a single input and returns the corresponding SSZ type.
|
||||
|
||||
### `empty`
|
||||
|
||||
The `empty` function accepts an SSZ type as input and returns an object of that type with all fields initialized to default values.
|
||||
|
||||
### `ceillog2`
|
||||
|
||||
```python
|
||||
@ -287,7 +267,7 @@ def get_custody_chunk_count(crosslink: Crosslink) -> int:
|
||||
### `get_custody_chunk_bit`
|
||||
|
||||
```python
|
||||
def get_custody_chunk_bit(key: BLSSignature, chunk: bytes) -> bool:
|
||||
def get_custody_chunk_bit(key: Bytes96, chunk: bytes) -> bool:
|
||||
# TODO: Replace with something MPC-friendly, e.g. the Legendre symbol
|
||||
return get_bitfield_bit(hash(key + chunk), 0)
|
||||
```
|
||||
@ -299,7 +279,7 @@ def get_chunk_bits_root(chunk_bitfield: bytes) -> Bytes32:
|
||||
aggregated_bits = bytearray([0] * 32)
|
||||
for i in range(0, len(chunk_bitfield), 32):
|
||||
for j in range(32):
|
||||
aggregated_bits[j] ^= chunk_bitfield[i+j]
|
||||
aggregated_bits[j] ^= chunk_bitfield[i + j]
|
||||
return hash(aggregated_bits)
|
||||
```
|
||||
|
||||
@ -332,7 +312,7 @@ def get_validators_custody_reveal_period(state: BeaconState,
|
||||
```python
|
||||
def replace_empty_or_append(list: List[Any], new_element: Any) -> int:
|
||||
for i in range(len(list)):
|
||||
if list[i] == empty(type_of(new_element)):
|
||||
if is_empty(list[i]):
|
||||
list[i] = new_element
|
||||
return i
|
||||
list.append(new_element)
|
||||
|
@ -70,53 +70,48 @@ This document describes the shard data layer and the shard fork choice rule in P
|
||||
### `ShardBlockBody`
|
||||
|
||||
```python
|
||||
{
|
||||
'data': ['byte', BYTES_PER_SHARD_BLOCK_BODY],
|
||||
}
|
||||
class ShardBlockBody(Container):
|
||||
data: Vector[bytes, BYTES_PER_SHARD_BLOCK_BODY]
|
||||
```
|
||||
|
||||
### `ShardAttestation`
|
||||
|
||||
```python
|
||||
{
|
||||
'data': {
|
||||
'slot': Slot,
|
||||
'shard': Shard,
|
||||
'shard_block_root': 'bytes32',
|
||||
},
|
||||
'aggregation_bitfield': 'bytes',
|
||||
'aggregate_signature': BLSSignature,
|
||||
}
|
||||
class ShardAttestation(Container):
|
||||
class data(Container):
|
||||
slot: uint64
|
||||
shard: uint64
|
||||
shard_block_root: Bytes32
|
||||
aggregation_bitfield: bytes
|
||||
aggregate_signature: Bytes96
|
||||
```
|
||||
|
||||
### `ShardBlock`
|
||||
|
||||
```python
|
||||
{
|
||||
'slot': Slot,
|
||||
'shard': Shard,
|
||||
'beacon_chain_root': 'bytes32',
|
||||
'parent_root': 'bytes32',
|
||||
'data': ShardBlockBody,
|
||||
'state_root': 'bytes32',
|
||||
'attestations': [ShardAttestation],
|
||||
'signature': BLSSignature,
|
||||
}
|
||||
class ShardBlock(Container):
|
||||
slot: uint64
|
||||
shard: uint64
|
||||
beacon_chain_root: Bytes32
|
||||
parent_root: Bytes32
|
||||
data: ShardBlockBody
|
||||
state_root: Bytes32
|
||||
attestations: List[ShardAttestation]
|
||||
signature: Bytes96
|
||||
```
|
||||
|
||||
### `ShardBlockHeader`
|
||||
|
||||
```python
|
||||
{
|
||||
'slot': Slot,
|
||||
'shard': Shard,
|
||||
'beacon_chain_root': 'bytes32',
|
||||
'parent_root': 'bytes32',
|
||||
'body_root': 'bytes32',
|
||||
'state_root': 'bytes32',
|
||||
'attestations': [ShardAttestation],
|
||||
'signature': BLSSignature,
|
||||
}
|
||||
class ShardBlockHeader(Container):
|
||||
slot: uint64
|
||||
shard: uint64
|
||||
beacon_chain_root: Bytes32
|
||||
parent_root: Bytes32
|
||||
body_root: Bytes32
|
||||
state_root: Bytes32
|
||||
attestations: List[ShardAttestation]
|
||||
signature: Bytes96
|
||||
```
|
||||
|
||||
## Helper functions
|
||||
@ -265,7 +260,7 @@ def compute_crosslink_data_root(blocks: List[ShardBlock]) -> Bytes32:
|
||||
) for block in blocks
|
||||
]))
|
||||
+ hash_tree_root(pad_to_power_of_2([
|
||||
hash_tree_root_of_bytes(block.body) for block in blocks
|
||||
hash_tree_root_of_bytes(block.body) for block in blocks
|
||||
]))
|
||||
)
|
||||
```
|
||||
|
@ -66,6 +66,10 @@ For convenience we alias:
|
||||
|
||||
The default value of a type upon initialization is recursively defined using `0` for `"uintN"`, `False` for `"bool"`, and `[]` for lists. Unions default to the first type in the union (with type index zero), which is `"null"` if present in the union.
|
||||
|
||||
#### `is_empty`
|
||||
|
||||
An SSZ object is called empty (and thus `is_empty(object)` returns true) if it is equal to the default value for that type.
|
||||
|
||||
### Illegal types
|
||||
|
||||
Empty vector types (i.e. `[subtype, 0]` for some `subtype`) are not legal. The `"null"` type is only legal as the first type in a union subtype (i.e., with type index zero).
|
||||
|
@ -115,12 +115,11 @@ Once a validator has been processed and added to the beacon state's `validator_r
|
||||
|
||||
In normal operation, the validator is quickly activated at which point the validator is added to the shuffling and begins validation after an additional `ACTIVATION_EXIT_DELAY` epochs (25.6 minutes).
|
||||
|
||||
The function [`is_active_validator`](../core/0_beacon-chain.md#is_active_validator) can be used to check if a validator is active during a given shuffling epoch. Note that the `BeaconState` contains a field `current_shuffling_epoch` which dictates from which epoch the current active validators are taken. Usage is as follows:
|
||||
The function [`is_active_validator`](../core/0_beacon-chain.md#is_active_validator) can be used to check if a validator is active during a given epoch. Usage is as follows:
|
||||
|
||||
```python
|
||||
shuffling_epoch = state.current_shuffling_epoch
|
||||
validator = state.validator_registry[validator_index]
|
||||
is_active = is_active_validator(validator, shuffling_epoch)
|
||||
is_active = is_active_validator(validator, get_current_epoch(state))
|
||||
```
|
||||
|
||||
Once a validator is activated, the validator is assigned [responsibilities](#beacon-chain-responsibilities) until exited.
|
||||
|
@ -42,8 +42,8 @@ if __name__ == "__main__":
|
||||
create_suite('attester_slashing', 'mainnet', lambda: generate_from_tests(test_process_attester_slashing)),
|
||||
create_suite('block_header', 'minimal', lambda: generate_from_tests(test_process_block_header)),
|
||||
create_suite('block_header', 'mainnet', lambda: generate_from_tests(test_process_block_header)),
|
||||
create_suite('deposit', 'minimal', lambda: generate_from_tests(test_process_deposit)),
|
||||
create_suite('deposit', 'mainnet', lambda: generate_from_tests(test_process_deposit)),
|
||||
create_suite('deposit', 'minimal', lambda: generate_from_tests(test_process_deposit)),
|
||||
create_suite('deposit', 'mainnet', lambda: generate_from_tests(test_process_deposit)),
|
||||
create_suite('proposer_slashing', 'minimal', lambda: generate_from_tests(test_process_proposer_slashing)),
|
||||
create_suite('proposer_slashing', 'mainnet', lambda: generate_from_tests(test_process_proposer_slashing)),
|
||||
create_suite('transfer', 'minimal', lambda: generate_from_tests(test_process_transfer)),
|
||||
|
@ -2,7 +2,7 @@ from random import Random
|
||||
|
||||
from eth2spec.debug import random_value, encode
|
||||
from eth2spec.phase0 import spec
|
||||
from eth2spec.utils.minimal_ssz import (
|
||||
from eth2spec.utils.ssz.ssz_impl import (
|
||||
hash_tree_root,
|
||||
signing_root,
|
||||
serialize,
|
||||
|
@ -1,28 +1,39 @@
|
||||
from eth2spec.utils.minimal_ssz import hash_tree_root
|
||||
from eth2spec.utils.ssz.ssz_impl import hash_tree_root
|
||||
from eth2spec.utils.ssz.ssz_typing import (
|
||||
is_uint_type, is_bool_type, is_list_type,
|
||||
is_vector_type, is_bytes_type, is_bytesn_type, is_container_type,
|
||||
read_vector_elem_type, read_list_elem_type,
|
||||
Vector, BytesN
|
||||
)
|
||||
|
||||
|
||||
def decode(json, typ):
|
||||
if isinstance(typ, str) and typ[:4] == 'uint':
|
||||
return json
|
||||
elif typ == 'bool':
|
||||
assert json in (True, False)
|
||||
return json
|
||||
elif isinstance(typ, list):
|
||||
return [decode(element, typ[0]) for element in json]
|
||||
elif isinstance(typ, str) and typ[:4] == 'byte':
|
||||
return bytes.fromhex(json[2:])
|
||||
elif hasattr(typ, 'fields'):
|
||||
def decode(data, typ):
|
||||
if is_uint_type(typ):
|
||||
return data
|
||||
elif is_bool_type(typ):
|
||||
assert data in (True, False)
|
||||
return data
|
||||
elif is_list_type(typ):
|
||||
elem_typ = read_list_elem_type(typ)
|
||||
return [decode(element, elem_typ) for element in data]
|
||||
elif is_vector_type(typ):
|
||||
elem_typ = read_vector_elem_type(typ)
|
||||
return Vector(decode(element, elem_typ) for element in data)
|
||||
elif is_bytes_type(typ):
|
||||
return bytes.fromhex(data[2:])
|
||||
elif is_bytesn_type(typ):
|
||||
return BytesN(bytes.fromhex(data[2:]))
|
||||
elif is_container_type(typ):
|
||||
temp = {}
|
||||
for field, subtype in typ.fields.items():
|
||||
temp[field] = decode(json[field], subtype)
|
||||
if field + "_hash_tree_root" in json:
|
||||
assert(json[field + "_hash_tree_root"][2:] ==
|
||||
for field, subtype in typ.get_fields():
|
||||
temp[field] = decode(data[field], subtype)
|
||||
if field + "_hash_tree_root" in data:
|
||||
assert(data[field + "_hash_tree_root"][2:] ==
|
||||
hash_tree_root(temp[field], subtype).hex())
|
||||
ret = typ(**temp)
|
||||
if "hash_tree_root" in json:
|
||||
assert(json["hash_tree_root"][2:] ==
|
||||
if "hash_tree_root" in data:
|
||||
assert(data["hash_tree_root"][2:] ==
|
||||
hash_tree_root(ret, typ).hex())
|
||||
return ret
|
||||
else:
|
||||
print(json, typ)
|
||||
raise Exception("Type not recognized")
|
||||
raise Exception(f"Type not recognized: data={data}, typ={typ}")
|
||||
|
@ -1,27 +1,36 @@
|
||||
from eth2spec.utils.minimal_ssz import hash_tree_root
|
||||
from eth2spec.utils.ssz.ssz_impl import hash_tree_root
|
||||
from eth2spec.utils.ssz.ssz_typing import (
|
||||
is_uint_type, is_bool_type, is_list_type, is_vector_type, is_container_type,
|
||||
read_elem_type,
|
||||
uint
|
||||
)
|
||||
|
||||
|
||||
def encode(value, typ, include_hash_tree_roots=False):
|
||||
if isinstance(typ, str) and typ[:4] == 'uint':
|
||||
if typ[4:] == '128' or typ[4:] == '256':
|
||||
if is_uint_type(typ):
|
||||
if hasattr(typ, '__supertype__'):
|
||||
typ = typ.__supertype__
|
||||
# Larger uints are boxed and the class declares their byte length
|
||||
if issubclass(typ, uint) and typ.byte_len > 8:
|
||||
return str(value)
|
||||
return value
|
||||
elif typ == 'bool':
|
||||
elif is_bool_type(typ):
|
||||
assert value in (True, False)
|
||||
return value
|
||||
elif isinstance(typ, list):
|
||||
return [encode(element, typ[0], include_hash_tree_roots) for element in value]
|
||||
elif isinstance(typ, str) and typ[:4] == 'byte':
|
||||
elif is_list_type(typ) or is_vector_type(typ):
|
||||
elem_typ = read_elem_type(typ)
|
||||
return [encode(element, elem_typ, include_hash_tree_roots) for element in value]
|
||||
elif isinstance(typ, type) and issubclass(typ, bytes): # both bytes and BytesN
|
||||
return '0x' + value.hex()
|
||||
elif hasattr(typ, 'fields'):
|
||||
elif is_container_type(typ):
|
||||
ret = {}
|
||||
for field, subtype in typ.fields.items():
|
||||
ret[field] = encode(getattr(value, field), subtype, include_hash_tree_roots)
|
||||
for field, subtype in typ.get_fields():
|
||||
field_value = getattr(value, field)
|
||||
ret[field] = encode(field_value, subtype, include_hash_tree_roots)
|
||||
if include_hash_tree_roots:
|
||||
ret[field + "_hash_tree_root"] = '0x' + hash_tree_root(getattr(value, field), subtype).hex()
|
||||
ret[field + "_hash_tree_root"] = '0x' + hash_tree_root(field_value, subtype).hex()
|
||||
if include_hash_tree_roots:
|
||||
ret["hash_tree_root"] = '0x' + hash_tree_root(value, typ).hex()
|
||||
return ret
|
||||
else:
|
||||
print(value, typ)
|
||||
raise Exception("Type not recognized")
|
||||
raise Exception(f"Type not recognized: value={value}, typ={typ}")
|
||||
|
@ -2,12 +2,19 @@ from random import Random
|
||||
from typing import Any
|
||||
from enum import Enum
|
||||
|
||||
from eth2spec.utils.ssz.ssz_impl import is_basic_type
|
||||
|
||||
UINT_SIZES = [8, 16, 32, 64, 128, 256]
|
||||
from eth2spec.utils.ssz.ssz_typing import (
|
||||
is_uint_type, is_bool_type, is_list_type,
|
||||
is_vector_type, is_bytes_type, is_bytesn_type, is_container_type,
|
||||
read_vector_elem_type, read_list_elem_type,
|
||||
uint_byte_size
|
||||
)
|
||||
|
||||
basic_types = ["uint%d" % v for v in UINT_SIZES] + ['bool', 'byte']
|
||||
# in bytes
|
||||
UINT_SIZES = (1, 2, 4, 8, 16, 32)
|
||||
|
||||
random_mode_names = ["random", "zero", "max", "nil", "one", "lengthy"]
|
||||
random_mode_names = ("random", "zero", "max", "nil", "one", "lengthy")
|
||||
|
||||
|
||||
class RandomizationMode(Enum):
|
||||
@ -49,104 +56,103 @@ def get_random_ssz_object(rng: Random,
|
||||
"""
|
||||
if chaos:
|
||||
mode = rng.choice(list(RandomizationMode))
|
||||
if isinstance(typ, str):
|
||||
if is_bytes_type(typ):
|
||||
# Bytes array
|
||||
if typ == 'bytes':
|
||||
if mode == RandomizationMode.mode_nil_count:
|
||||
return b''
|
||||
if mode == RandomizationMode.mode_max_count:
|
||||
return get_random_bytes_list(rng, max_bytes_length)
|
||||
if mode == RandomizationMode.mode_one_count:
|
||||
return get_random_bytes_list(rng, 1)
|
||||
if mode == RandomizationMode.mode_zero:
|
||||
return b'\x00'
|
||||
if mode == RandomizationMode.mode_max:
|
||||
return b'\xff'
|
||||
return get_random_bytes_list(rng, rng.randint(0, max_bytes_length))
|
||||
elif typ[:5] == 'bytes' and len(typ) > 5:
|
||||
length = int(typ[5:])
|
||||
# Sanity, don't generate absurdly big random values
|
||||
# If a client is aiming to performance-test, they should create a benchmark suite.
|
||||
assert length <= max_bytes_length
|
||||
if mode == RandomizationMode.mode_zero:
|
||||
return b'\x00' * length
|
||||
if mode == RandomizationMode.mode_max:
|
||||
return b'\xff' * length
|
||||
return get_random_bytes_list(rng, length)
|
||||
# Basic types
|
||||
if mode == RandomizationMode.mode_nil_count:
|
||||
return b''
|
||||
elif mode == RandomizationMode.mode_max_count:
|
||||
return get_random_bytes_list(rng, max_bytes_length)
|
||||
elif mode == RandomizationMode.mode_one_count:
|
||||
return get_random_bytes_list(rng, 1)
|
||||
elif mode == RandomizationMode.mode_zero:
|
||||
return b'\x00'
|
||||
elif mode == RandomizationMode.mode_max:
|
||||
return b'\xff'
|
||||
else:
|
||||
return get_random_bytes_list(rng, rng.randint(0, max_bytes_length))
|
||||
elif is_bytesn_type(typ):
|
||||
# BytesN
|
||||
length = typ.length
|
||||
# Sanity, don't generate absurdly big random values
|
||||
# If a client is aiming to performance-test, they should create a benchmark suite.
|
||||
assert length <= max_bytes_length
|
||||
if mode == RandomizationMode.mode_zero:
|
||||
return b'\x00' * length
|
||||
elif mode == RandomizationMode.mode_max:
|
||||
return b'\xff' * length
|
||||
else:
|
||||
return get_random_bytes_list(rng, length)
|
||||
elif is_basic_type(typ):
|
||||
# Basic types
|
||||
if mode == RandomizationMode.mode_zero:
|
||||
return get_min_basic_value(typ)
|
||||
elif mode == RandomizationMode.mode_max:
|
||||
return get_max_basic_value(typ)
|
||||
else:
|
||||
if mode == RandomizationMode.mode_zero:
|
||||
return get_min_basic_value(typ)
|
||||
if mode == RandomizationMode.mode_max:
|
||||
return get_max_basic_value(typ)
|
||||
return get_random_basic_value(rng, typ)
|
||||
# Vector:
|
||||
elif isinstance(typ, list) and len(typ) == 2:
|
||||
elif is_vector_type(typ):
|
||||
# Vector
|
||||
elem_typ = read_vector_elem_type(typ)
|
||||
return [
|
||||
get_random_ssz_object(rng, typ[0], max_bytes_length, max_list_length, mode, chaos)
|
||||
for _ in range(typ[1])
|
||||
get_random_ssz_object(rng, elem_typ, max_bytes_length, max_list_length, mode, chaos)
|
||||
for _ in range(typ.length)
|
||||
]
|
||||
# List:
|
||||
elif isinstance(typ, list) and len(typ) == 1:
|
||||
elif is_list_type(typ):
|
||||
# List
|
||||
elem_typ = read_list_elem_type(typ)
|
||||
length = rng.randint(0, max_list_length)
|
||||
if mode == RandomizationMode.mode_one_count:
|
||||
length = 1
|
||||
if mode == RandomizationMode.mode_max_count:
|
||||
elif mode == RandomizationMode.mode_max_count:
|
||||
length = max_list_length
|
||||
|
||||
return [
|
||||
get_random_ssz_object(rng, typ[0], max_bytes_length, max_list_length, mode, chaos)
|
||||
get_random_ssz_object(rng, elem_typ, max_bytes_length, max_list_length, mode, chaos)
|
||||
for _ in range(length)
|
||||
]
|
||||
# Container:
|
||||
elif hasattr(typ, 'fields'):
|
||||
elif is_container_type(typ):
|
||||
# Container
|
||||
return typ(**{
|
||||
field:
|
||||
get_random_ssz_object(rng, subtype, max_bytes_length, max_list_length, mode, chaos)
|
||||
for field, subtype in typ.fields.items()
|
||||
for field, subtype in typ.get_fields()
|
||||
})
|
||||
else:
|
||||
print(typ)
|
||||
raise Exception("Type not recognized")
|
||||
raise Exception(f"Type not recognized: typ={typ}")
|
||||
|
||||
|
||||
def get_random_bytes_list(rng: Random, length: int) -> bytes:
|
||||
return bytes(rng.getrandbits(8) for _ in range(length))
|
||||
|
||||
|
||||
def get_random_basic_value(rng: Random, typ: str) -> Any:
|
||||
if typ == 'bool':
|
||||
def get_random_basic_value(rng: Random, typ) -> Any:
|
||||
if is_bool_type(typ):
|
||||
return rng.choice((True, False))
|
||||
if typ[:4] == 'uint':
|
||||
size = int(typ[4:])
|
||||
elif is_uint_type(typ):
|
||||
size = uint_byte_size(typ)
|
||||
assert size in UINT_SIZES
|
||||
return rng.randint(0, 2**size - 1)
|
||||
if typ == 'byte':
|
||||
return rng.randint(0, 8)
|
||||
return rng.randint(0, 256**size - 1)
|
||||
else:
|
||||
raise ValueError("Not a basic type")
|
||||
raise ValueError(f"Not a basic type: typ={typ}")
|
||||
|
||||
|
||||
def get_min_basic_value(typ: str) -> Any:
|
||||
if typ == 'bool':
|
||||
def get_min_basic_value(typ) -> Any:
|
||||
if is_bool_type(typ):
|
||||
return False
|
||||
if typ[:4] == 'uint':
|
||||
size = int(typ[4:])
|
||||
elif is_uint_type(typ):
|
||||
size = uint_byte_size(typ)
|
||||
assert size in UINT_SIZES
|
||||
return 0
|
||||
if typ == 'byte':
|
||||
return 0x00
|
||||
else:
|
||||
raise ValueError("Not a basic type")
|
||||
raise ValueError(f"Not a basic type: typ={typ}")
|
||||
|
||||
|
||||
def get_max_basic_value(typ: str) -> Any:
|
||||
if typ == 'bool':
|
||||
def get_max_basic_value(typ) -> Any:
|
||||
if is_bool_type(typ):
|
||||
return True
|
||||
if typ[:4] == 'uint':
|
||||
size = int(typ[4:])
|
||||
elif is_uint_type(typ):
|
||||
size = uint_byte_size(typ)
|
||||
assert size in UINT_SIZES
|
||||
return 2**size - 1
|
||||
if typ == 'byte':
|
||||
return 0xff
|
||||
return 256**size - 1
|
||||
else:
|
||||
raise ValueError("Not a basic type")
|
||||
raise ValueError(f"Not a basic type: typ={typ}")
|
||||
|
@ -145,7 +145,7 @@ def test_wrong_deposit_for_deposit_count(spec, state):
|
||||
pubkey_1,
|
||||
privkey_1,
|
||||
spec.MAX_EFFECTIVE_BALANCE,
|
||||
withdrawal_credentials=b'\x00'*32,
|
||||
withdrawal_credentials=b'\x00' * 32,
|
||||
signed=True,
|
||||
)
|
||||
deposit_count_1 = len(deposit_data_leaves)
|
||||
@ -161,7 +161,7 @@ def test_wrong_deposit_for_deposit_count(spec, state):
|
||||
pubkey_2,
|
||||
privkey_2,
|
||||
spec.MAX_EFFECTIVE_BALANCE,
|
||||
withdrawal_credentials=b'\x00'*32,
|
||||
withdrawal_credentials=b'\x00' * 32,
|
||||
signed=True,
|
||||
)
|
||||
|
||||
|
@ -4,7 +4,7 @@ from eth2spec.test.helpers.bitfields import set_bitfield_bit
|
||||
from eth2spec.test.helpers.block import build_empty_block_for_next_slot, sign_block
|
||||
from eth2spec.test.helpers.keys import privkeys
|
||||
from eth2spec.utils.bls import bls_sign, bls_aggregate_signatures
|
||||
from eth2spec.utils.minimal_ssz import hash_tree_root
|
||||
from eth2spec.utils.ssz.ssz_impl import hash_tree_root
|
||||
|
||||
|
||||
def build_attestation_data(spec, state, slot, shard):
|
||||
|
@ -5,7 +5,7 @@ def set_bitfield_bit(bitfield, i):
|
||||
byte_index = i // 8
|
||||
bit_index = i % 8
|
||||
return (
|
||||
bitfield[:byte_index] +
|
||||
bytes([bitfield[byte_index] | (1 << bit_index)]) +
|
||||
bitfield[byte_index + 1:]
|
||||
bitfield[:byte_index] +
|
||||
bytes([bitfield[byte_index] | (1 << bit_index)]) +
|
||||
bitfield[byte_index + 1:]
|
||||
)
|
||||
|
@ -2,7 +2,7 @@ from copy import deepcopy
|
||||
|
||||
from eth2spec.test.helpers.keys import privkeys
|
||||
from eth2spec.utils.bls import bls_sign, only_with_bls
|
||||
from eth2spec.utils.minimal_ssz import signing_root, hash_tree_root
|
||||
from eth2spec.utils.ssz.ssz_impl import signing_root, hash_tree_root
|
||||
|
||||
|
||||
# Fully ignore the function if BLS is off, beacon-proposer index calculation is slow.
|
||||
|
@ -1,5 +1,5 @@
|
||||
from eth2spec.utils.bls import bls_sign
|
||||
from eth2spec.utils.minimal_ssz import signing_root
|
||||
from eth2spec.utils.ssz.ssz_impl import signing_root
|
||||
|
||||
|
||||
def sign_block_header(spec, state, header, privkey):
|
||||
|
@ -1,7 +1,7 @@
|
||||
from eth2spec.test.helpers.keys import pubkeys, privkeys
|
||||
from eth2spec.utils.bls import bls_sign
|
||||
from eth2spec.utils.merkle_minimal import calc_merkle_tree_from_leaves, get_merkle_root, get_merkle_proof
|
||||
from eth2spec.utils.minimal_ssz import signing_root
|
||||
from eth2spec.utils.ssz.ssz_impl import signing_root
|
||||
|
||||
|
||||
def build_deposit_data(spec, state, pubkey, privkey, amount, withdrawal_credentials, signed=False):
|
||||
|
@ -1,5 +1,5 @@
|
||||
from eth2spec.test.helpers.keys import pubkeys
|
||||
from eth2spec.utils.minimal_ssz import hash_tree_root
|
||||
from eth2spec.utils.ssz.ssz_impl import hash_tree_root
|
||||
|
||||
|
||||
def build_mock_validator(spec, i: int, balance: int):
|
||||
|
@ -1,7 +1,7 @@
|
||||
from eth2spec.test.helpers.keys import pubkeys, privkeys
|
||||
from eth2spec.test.helpers.state import get_balance
|
||||
from eth2spec.utils.bls import bls_sign
|
||||
from eth2spec.utils.minimal_ssz import signing_root
|
||||
from eth2spec.utils.ssz.ssz_impl import signing_root
|
||||
|
||||
|
||||
def get_valid_transfer(spec, state, slot=None, sender_index=None, amount=None, fee=None, signed=False):
|
||||
@ -32,7 +32,7 @@ def get_valid_transfer(spec, state, slot=None, sender_index=None, amount=None, f
|
||||
|
||||
# ensure withdrawal_credentials reproducible
|
||||
state.validator_registry[transfer.sender].withdrawal_credentials = (
|
||||
spec.BLS_WITHDRAWAL_PREFIX_BYTE + spec.hash(transfer.pubkey)[1:]
|
||||
spec.BLS_WITHDRAWAL_PREFIX_BYTE + spec.hash(transfer.pubkey)[1:]
|
||||
)
|
||||
|
||||
return transfer
|
||||
|
@ -1,5 +1,5 @@
|
||||
from eth2spec.utils.bls import bls_sign
|
||||
from eth2spec.utils.minimal_ssz import signing_root
|
||||
from eth2spec.utils.ssz.ssz_impl import signing_root
|
||||
|
||||
|
||||
def build_voluntary_exit(spec, state, epoch, validator_index, privkey, signed=False):
|
||||
|
@ -1,7 +1,9 @@
|
||||
from copy import deepcopy
|
||||
from typing import List
|
||||
|
||||
from eth2spec.utils.minimal_ssz import signing_root
|
||||
from eth2spec.utils.ssz.ssz_impl import signing_root
|
||||
from eth2spec.utils.bls import bls_sign
|
||||
|
||||
from eth2spec.test.helpers.state import get_balance
|
||||
from eth2spec.test.helpers.transfers import get_valid_transfer
|
||||
from eth2spec.test.helpers.block import build_empty_block_for_next_slot, sign_block
|
||||
@ -24,7 +26,7 @@ def test_empty_block_transition(spec, state):
|
||||
yield 'pre', state
|
||||
|
||||
block = build_empty_block_for_next_slot(spec, state, signed=True)
|
||||
yield 'blocks', [block], [spec.BeaconBlock]
|
||||
yield 'blocks', [block], List[spec.BeaconBlock]
|
||||
|
||||
spec.state_transition(state, block)
|
||||
yield 'post', state
|
||||
@ -43,7 +45,7 @@ def test_skipped_slots(spec, state):
|
||||
block = build_empty_block_for_next_slot(spec, state)
|
||||
block.slot += 3
|
||||
sign_block(spec, state, block)
|
||||
yield 'blocks', [block], [spec.BeaconBlock]
|
||||
yield 'blocks', [block], List[spec.BeaconBlock]
|
||||
|
||||
spec.state_transition(state, block)
|
||||
yield 'post', state
|
||||
@ -62,7 +64,7 @@ def test_empty_epoch_transition(spec, state):
|
||||
block = build_empty_block_for_next_slot(spec, state)
|
||||
block.slot += spec.SLOTS_PER_EPOCH
|
||||
sign_block(spec, state, block)
|
||||
yield 'blocks', [block], [spec.BeaconBlock]
|
||||
yield 'blocks', [block], List[spec.BeaconBlock]
|
||||
|
||||
spec.state_transition(state, block)
|
||||
yield 'post', state
|
||||
@ -82,7 +84,7 @@ def test_empty_epoch_transition_not_finalizing(spec, state):
|
||||
block = build_empty_block_for_next_slot(spec, state)
|
||||
block.slot += spec.SLOTS_PER_EPOCH * 5
|
||||
sign_block(spec, state, block, proposer_index=0)
|
||||
yield 'blocks', [block], [spec.BeaconBlock]
|
||||
yield 'blocks', [block], List[spec.BeaconBlock]
|
||||
|
||||
spec.state_transition(state, block)
|
||||
yield 'post', state
|
||||
@ -111,7 +113,7 @@ def test_proposer_slashing(spec, state):
|
||||
block = build_empty_block_for_next_slot(spec, state)
|
||||
block.body.proposer_slashings.append(proposer_slashing)
|
||||
sign_block(spec, state, block)
|
||||
yield 'blocks', [block], [spec.BeaconBlock]
|
||||
yield 'blocks', [block], List[spec.BeaconBlock]
|
||||
|
||||
spec.state_transition(state, block)
|
||||
yield 'post', state
|
||||
@ -145,7 +147,7 @@ def test_attester_slashing(spec, state):
|
||||
block = build_empty_block_for_next_slot(spec, state)
|
||||
block.body.attester_slashings.append(attester_slashing)
|
||||
sign_block(spec, state, block)
|
||||
yield 'blocks', [block], [spec.BeaconBlock]
|
||||
yield 'blocks', [block], List[spec.BeaconBlock]
|
||||
|
||||
spec.state_transition(state, block)
|
||||
yield 'post', state
|
||||
@ -183,7 +185,7 @@ def test_deposit_in_block(spec, state):
|
||||
block.body.deposits.append(deposit)
|
||||
sign_block(spec, state, block)
|
||||
|
||||
yield 'blocks', [block], [spec.BeaconBlock]
|
||||
yield 'blocks', [block], List[spec.BeaconBlock]
|
||||
|
||||
spec.state_transition(state, block)
|
||||
yield 'post', state
|
||||
@ -211,7 +213,7 @@ def test_deposit_top_up(spec, state):
|
||||
block.body.deposits.append(deposit)
|
||||
sign_block(spec, state, block)
|
||||
|
||||
yield 'blocks', [block], [spec.BeaconBlock]
|
||||
yield 'blocks', [block], List[spec.BeaconBlock]
|
||||
|
||||
spec.state_transition(state, block)
|
||||
yield 'post', state
|
||||
@ -248,7 +250,7 @@ def test_attestation(spec, state):
|
||||
sign_block(spec, state, epoch_block)
|
||||
spec.state_transition(state, epoch_block)
|
||||
|
||||
yield 'blocks', [attestation_block, epoch_block], [spec.BeaconBlock]
|
||||
yield 'blocks', [attestation_block, epoch_block], List[spec.BeaconBlock]
|
||||
yield 'post', state
|
||||
|
||||
assert len(state.current_epoch_attestations) == 0
|
||||
@ -295,7 +297,7 @@ def test_voluntary_exit(spec, state):
|
||||
sign_block(spec, state, exit_block)
|
||||
spec.state_transition(state, exit_block)
|
||||
|
||||
yield 'blocks', [initiate_exit_block, exit_block], [spec.BeaconBlock]
|
||||
yield 'blocks', [initiate_exit_block, exit_block], List[spec.BeaconBlock]
|
||||
yield 'post', state
|
||||
|
||||
assert state.validator_registry[validator_index].exit_epoch < spec.FAR_FUTURE_EPOCH
|
||||
@ -324,7 +326,7 @@ def test_transfer(spec, state):
|
||||
block.body.transfers.append(transfer)
|
||||
sign_block(spec, state, block)
|
||||
|
||||
yield 'blocks', [block], [spec.BeaconBlock]
|
||||
yield 'blocks', [block], List[spec.BeaconBlock]
|
||||
|
||||
spec.state_transition(state, block)
|
||||
yield 'post', state
|
||||
@ -354,7 +356,7 @@ def test_balance_driven_status_transitions(spec, state):
|
||||
sign_block(spec, state, block)
|
||||
spec.state_transition(state, block)
|
||||
|
||||
yield 'blocks', [block], [spec.BeaconBlock]
|
||||
yield 'blocks', [block], List[spec.BeaconBlock]
|
||||
yield 'post', state
|
||||
|
||||
assert state.validator_registry[validator_index].exit_epoch < spec.FAR_FUTURE_EPOCH
|
||||
@ -371,7 +373,7 @@ def test_historical_batch(spec, state):
|
||||
block = build_empty_block_for_next_slot(spec, state, signed=True)
|
||||
spec.state_transition(state, block)
|
||||
|
||||
yield 'blocks', [block], [spec.BeaconBlock]
|
||||
yield 'blocks', [block], List[spec.BeaconBlock]
|
||||
yield 'post', state
|
||||
|
||||
assert state.slot == block.slot
|
||||
@ -400,7 +402,7 @@ def test_eth1_data_votes(spec, state):
|
||||
|
||||
spec.state_transition(state, block)
|
||||
|
||||
yield 'blocks', [block], [spec.BeaconBlock]
|
||||
yield 'blocks', [block], List[spec.BeaconBlock]
|
||||
yield 'post', state
|
||||
|
||||
assert state.slot % spec.SLOTS_PER_ETH1_VOTING_PERIOD == 0
|
||||
|
@ -1,4 +1,5 @@
|
||||
from copy import deepcopy
|
||||
from typing import List
|
||||
|
||||
from eth2spec.test.context import spec_state_test, never_bls, with_all_phases
|
||||
from eth2spec.test.helpers.state import next_epoch
|
||||
@ -84,7 +85,7 @@ def test_finality_rule_4(spec, state):
|
||||
assert state.finalized_epoch == prev_state.current_justified_epoch
|
||||
assert state.finalized_root == prev_state.current_justified_root
|
||||
|
||||
yield 'blocks', blocks, [spec.BeaconBlock]
|
||||
yield 'blocks', blocks, List[spec.BeaconBlock]
|
||||
yield 'post', state
|
||||
|
||||
|
||||
@ -115,7 +116,7 @@ def test_finality_rule_1(spec, state):
|
||||
assert state.finalized_epoch == prev_state.previous_justified_epoch
|
||||
assert state.finalized_root == prev_state.previous_justified_root
|
||||
|
||||
yield 'blocks', blocks, [spec.BeaconBlock]
|
||||
yield 'blocks', blocks, List[spec.BeaconBlock]
|
||||
yield 'post', state
|
||||
|
||||
|
||||
@ -148,7 +149,7 @@ def test_finality_rule_2(spec, state):
|
||||
|
||||
blocks += new_blocks
|
||||
|
||||
yield 'blocks', blocks, [spec.BeaconBlock]
|
||||
yield 'blocks', blocks, List[spec.BeaconBlock]
|
||||
yield 'post', state
|
||||
|
||||
|
||||
@ -198,5 +199,5 @@ def test_finality_rule_3(spec, state):
|
||||
assert state.finalized_epoch == prev_state.current_justified_epoch
|
||||
assert state.finalized_root == prev_state.current_justified_root
|
||||
|
||||
yield 'blocks', blocks, [spec.BeaconBlock]
|
||||
yield 'blocks', blocks, List[spec.BeaconBlock]
|
||||
yield 'post', state
|
||||
|
@ -1,4 +1,5 @@
|
||||
from hashlib import sha256
|
||||
|
||||
|
||||
def hash(x): return sha256(x).digest()
|
||||
def hash(x):
|
||||
return sha256(x).digest()
|
||||
|
@ -1,7 +1,9 @@
|
||||
from .hash_function import hash
|
||||
|
||||
|
||||
zerohashes = [b'\x00' * 32]
|
||||
ZERO_BYTES32 = b'\x00' * 32
|
||||
|
||||
zerohashes = [ZERO_BYTES32]
|
||||
for layer in range(1, 32):
|
||||
zerohashes.append(hash(zerohashes[layer - 1] + zerohashes[layer - 1]))
|
||||
|
||||
@ -28,3 +30,25 @@ def get_merkle_proof(tree, item_index):
|
||||
subindex = (item_index // 2**i) ^ 1
|
||||
proof.append(tree[i][subindex] if subindex < len(tree[i]) else zerohashes[i])
|
||||
return proof
|
||||
|
||||
|
||||
def next_power_of_two(v: int) -> int:
|
||||
"""
|
||||
Get the next power of 2. (for 64 bit range ints).
|
||||
0 is a special case, to have non-empty defaults.
|
||||
Examples:
|
||||
0 -> 1, 1 -> 1, 2 -> 2, 3 -> 4, 32 -> 32, 33 -> 64
|
||||
"""
|
||||
if v == 0:
|
||||
return 1
|
||||
return 1 << (v - 1).bit_length()
|
||||
|
||||
|
||||
def merkleize_chunks(chunks):
|
||||
tree = chunks[::]
|
||||
margin = next_power_of_two(len(chunks)) - len(chunks)
|
||||
tree.extend([ZERO_BYTES32] * margin)
|
||||
tree = [ZERO_BYTES32] * len(tree) + tree
|
||||
for i in range(len(tree) // 2 - 1, 0, -1):
|
||||
tree[i] = hash(tree[i * 2] + tree[i * 2 + 1])
|
||||
return tree[1]
|
||||
|
0
test_libs/pyspec/eth2spec/utils/ssz/__init__.py
Normal file
0
test_libs/pyspec/eth2spec/utils/ssz/__init__.py
Normal file
163
test_libs/pyspec/eth2spec/utils/ssz/ssz_impl.py
Normal file
163
test_libs/pyspec/eth2spec/utils/ssz/ssz_impl.py
Normal file
@ -0,0 +1,163 @@
|
||||
from ..merkle_minimal import merkleize_chunks, hash
|
||||
from eth2spec.utils.ssz.ssz_typing import (
|
||||
is_uint_type, is_bool_type, is_container_type,
|
||||
is_list_kind, is_vector_kind,
|
||||
read_vector_elem_type, read_elem_type,
|
||||
uint_byte_size,
|
||||
infer_input_type,
|
||||
get_zero_value,
|
||||
)
|
||||
|
||||
# SSZ Serialization
|
||||
# -----------------------------
|
||||
|
||||
BYTES_PER_LENGTH_OFFSET = 4
|
||||
|
||||
|
||||
def is_basic_type(typ):
|
||||
return is_uint_type(typ) or is_bool_type(typ)
|
||||
|
||||
|
||||
def serialize_basic(value, typ):
|
||||
if is_uint_type(typ):
|
||||
return value.to_bytes(uint_byte_size(typ), 'little')
|
||||
elif is_bool_type(typ):
|
||||
if value:
|
||||
return b'\x01'
|
||||
else:
|
||||
return b'\x00'
|
||||
else:
|
||||
raise Exception("Type not supported: {}".format(typ))
|
||||
|
||||
|
||||
def deserialize_basic(value, typ):
|
||||
if is_uint_type(typ):
|
||||
return typ(int.from_bytes(value, 'little'))
|
||||
elif is_bool_type(typ):
|
||||
assert value in (b'\x00', b'\x01')
|
||||
return True if value == b'\x01' else False
|
||||
else:
|
||||
raise Exception("Type not supported: {}".format(typ))
|
||||
|
||||
|
||||
def is_fixed_size(typ):
|
||||
if is_basic_type(typ):
|
||||
return True
|
||||
elif is_list_kind(typ):
|
||||
return False
|
||||
elif is_vector_kind(typ):
|
||||
return is_fixed_size(read_vector_elem_type(typ))
|
||||
elif is_container_type(typ):
|
||||
return all(is_fixed_size(t) for t in typ.get_field_types())
|
||||
else:
|
||||
raise Exception("Type not supported: {}".format(typ))
|
||||
|
||||
|
||||
def is_empty(obj):
|
||||
return get_zero_value(type(obj)) == obj
|
||||
|
||||
|
||||
@infer_input_type
|
||||
def serialize(obj, typ=None):
|
||||
if is_basic_type(typ):
|
||||
return serialize_basic(obj, typ)
|
||||
elif is_list_kind(typ) or is_vector_kind(typ):
|
||||
return encode_series(obj, [read_elem_type(typ)] * len(obj))
|
||||
elif is_container_type(typ):
|
||||
return encode_series(obj.get_field_values(), typ.get_field_types())
|
||||
else:
|
||||
raise Exception("Type not supported: {}".format(typ))
|
||||
|
||||
|
||||
def encode_series(values, types):
|
||||
# bytes and bytesN are already in the right format.
|
||||
if isinstance(values, bytes):
|
||||
return values
|
||||
|
||||
# Recursively serialize
|
||||
parts = [(is_fixed_size(types[i]), serialize(values[i], typ=types[i])) for i in range(len(values))]
|
||||
|
||||
# Compute and check lengths
|
||||
fixed_lengths = [len(serialized) if constant_size else BYTES_PER_LENGTH_OFFSET
|
||||
for (constant_size, serialized) in parts]
|
||||
variable_lengths = [len(serialized) if not constant_size else 0
|
||||
for (constant_size, serialized) in parts]
|
||||
|
||||
# Check if integer is not out of bounds (Python)
|
||||
assert sum(fixed_lengths + variable_lengths) < 2 ** (BYTES_PER_LENGTH_OFFSET * 8)
|
||||
|
||||
# Interleave offsets of variable-size parts with fixed-size parts.
|
||||
# Avoid quadratic complexity in calculation of offsets.
|
||||
offset = sum(fixed_lengths)
|
||||
variable_parts = []
|
||||
fixed_parts = []
|
||||
for (constant_size, serialized) in parts:
|
||||
if constant_size:
|
||||
fixed_parts.append(serialized)
|
||||
else:
|
||||
fixed_parts.append(offset.to_bytes(BYTES_PER_LENGTH_OFFSET, 'little'))
|
||||
variable_parts.append(serialized)
|
||||
offset += len(serialized)
|
||||
|
||||
# Return the concatenation of the fixed-size parts (offsets interleaved) with the variable-size parts
|
||||
return b''.join(fixed_parts + variable_parts)
|
||||
|
||||
|
||||
# SSZ Hash-tree-root
|
||||
# -----------------------------
|
||||
|
||||
|
||||
def pack(values, subtype):
|
||||
if isinstance(values, bytes):
|
||||
return values
|
||||
return b''.join([serialize_basic(value, subtype) for value in values])
|
||||
|
||||
|
||||
def chunkify(bytez):
|
||||
# pad `bytez` to nearest 32-byte multiple
|
||||
bytez += b'\x00' * (-len(bytez) % 32)
|
||||
return [bytez[i:i + 32] for i in range(0, len(bytez), 32)]
|
||||
|
||||
|
||||
def mix_in_length(root, length):
|
||||
return hash(root + length.to_bytes(32, 'little'))
|
||||
|
||||
|
||||
def is_bottom_layer_kind(typ):
|
||||
return (
|
||||
is_basic_type(typ) or
|
||||
(is_list_kind(typ) or is_vector_kind(typ)) and is_basic_type(read_elem_type(typ))
|
||||
)
|
||||
|
||||
|
||||
@infer_input_type
|
||||
def get_typed_values(obj, typ=None):
|
||||
if is_container_type(typ):
|
||||
return obj.get_typed_values()
|
||||
elif is_list_kind(typ) or is_vector_kind(typ):
|
||||
elem_type = read_elem_type(typ)
|
||||
return list(zip(obj, [elem_type] * len(obj)))
|
||||
else:
|
||||
raise Exception("Invalid type")
|
||||
|
||||
|
||||
@infer_input_type
|
||||
def hash_tree_root(obj, typ=None):
|
||||
if is_bottom_layer_kind(typ):
|
||||
data = serialize_basic(obj, typ) if is_basic_type(typ) else pack(obj, read_elem_type(typ))
|
||||
leaves = chunkify(data)
|
||||
else:
|
||||
fields = get_typed_values(obj, typ=typ)
|
||||
leaves = [hash_tree_root(field_value, typ=field_typ) for field_value, field_typ in fields]
|
||||
if is_list_kind(typ):
|
||||
return mix_in_length(merkleize_chunks(leaves), len(obj))
|
||||
else:
|
||||
return merkleize_chunks(leaves)
|
||||
|
||||
|
||||
@infer_input_type
|
||||
def signing_root(obj, typ):
|
||||
assert is_container_type(typ)
|
||||
# ignore last field
|
||||
leaves = [hash_tree_root(field_value, typ=field_typ) for field_value, field_typ in obj.get_typed_values()[:-1]]
|
||||
return merkleize_chunks(chunkify(b''.join(leaves)))
|
526
test_libs/pyspec/eth2spec/utils/ssz/ssz_typing.py
Normal file
526
test_libs/pyspec/eth2spec/utils/ssz/ssz_typing.py
Normal file
@ -0,0 +1,526 @@
|
||||
from inspect import isclass
|
||||
from typing import List, Iterable, TypeVar, Type, NewType
|
||||
from typing import Union
|
||||
from typing_inspect import get_origin
|
||||
|
||||
# SSZ integers
|
||||
# -----------------------------
|
||||
|
||||
|
||||
class uint(int):
|
||||
byte_len = 0
|
||||
|
||||
def __new__(cls, value, *args, **kwargs):
|
||||
if value < 0:
|
||||
raise ValueError("unsigned types must not be negative")
|
||||
return super().__new__(cls, value)
|
||||
|
||||
|
||||
class uint8(uint):
|
||||
byte_len = 1
|
||||
|
||||
def __new__(cls, value, *args, **kwargs):
|
||||
if value.bit_length() > 8:
|
||||
raise ValueError("value out of bounds for uint8")
|
||||
return super().__new__(cls, value)
|
||||
|
||||
|
||||
# Alias for uint8
|
||||
byte = NewType('byte', uint8)
|
||||
|
||||
|
||||
class uint16(uint):
|
||||
byte_len = 2
|
||||
|
||||
def __new__(cls, value, *args, **kwargs):
|
||||
if value.bit_length() > 16:
|
||||
raise ValueError("value out of bounds for uint16")
|
||||
return super().__new__(cls, value)
|
||||
|
||||
|
||||
class uint32(uint):
|
||||
byte_len = 4
|
||||
|
||||
def __new__(cls, value, *args, **kwargs):
|
||||
if value.bit_length() > 32:
|
||||
raise ValueError("value out of bounds for uint16")
|
||||
return super().__new__(cls, value)
|
||||
|
||||
|
||||
# We simply default to uint64. But do give it a name, for readability
|
||||
uint64 = NewType('uint64', int)
|
||||
|
||||
|
||||
class uint128(uint):
|
||||
byte_len = 16
|
||||
|
||||
def __new__(cls, value, *args, **kwargs):
|
||||
if value.bit_length() > 128:
|
||||
raise ValueError("value out of bounds for uint128")
|
||||
return super().__new__(cls, value)
|
||||
|
||||
|
||||
class uint256(uint):
|
||||
byte_len = 32
|
||||
|
||||
def __new__(cls, value, *args, **kwargs):
|
||||
if value.bit_length() > 256:
|
||||
raise ValueError("value out of bounds for uint256")
|
||||
return super().__new__(cls, value)
|
||||
|
||||
|
||||
def is_uint_type(typ):
|
||||
# All integers are uint in the scope of the spec here.
|
||||
# Since we default to uint64. Bounds can be checked elsewhere.
|
||||
# However, some are wrapped in a NewType
|
||||
if hasattr(typ, '__supertype__'):
|
||||
# get the type that the NewType is wrapping
|
||||
typ = typ.__supertype__
|
||||
|
||||
return isinstance(typ, type) and issubclass(typ, int) and not issubclass(typ, bool)
|
||||
|
||||
|
||||
def uint_byte_size(typ):
|
||||
if hasattr(typ, '__supertype__'):
|
||||
typ = typ.__supertype__
|
||||
|
||||
if isinstance(typ, type):
|
||||
if issubclass(typ, uint):
|
||||
return typ.byte_len
|
||||
elif issubclass(typ, int):
|
||||
# Default to uint64
|
||||
return 8
|
||||
else:
|
||||
raise TypeError("Type %s is not an uint (or int-default uint64) type" % typ)
|
||||
|
||||
|
||||
# SSZ Container base class
|
||||
# -----------------------------
|
||||
|
||||
# Note: importing ssz functionality locally, to avoid import loop
|
||||
|
||||
class Container(object):
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
cls = self.__class__
|
||||
for f, t in cls.get_fields():
|
||||
if f not in kwargs:
|
||||
setattr(self, f, get_zero_value(t))
|
||||
else:
|
||||
setattr(self, f, kwargs[f])
|
||||
|
||||
def serialize(self):
|
||||
from .ssz_impl import serialize
|
||||
return serialize(self, self.__class__)
|
||||
|
||||
def hash_tree_root(self):
|
||||
from .ssz_impl import hash_tree_root
|
||||
return hash_tree_root(self, self.__class__)
|
||||
|
||||
def signing_root(self):
|
||||
from .ssz_impl import signing_root
|
||||
return signing_root(self, self.__class__)
|
||||
|
||||
def get_field_values(self):
|
||||
cls = self.__class__
|
||||
return [getattr(self, field) for field in cls.get_field_names()]
|
||||
|
||||
def __repr__(self):
|
||||
return repr({field: getattr(self, field) for field in self.get_field_names()})
|
||||
|
||||
def __str__(self):
|
||||
output = []
|
||||
for field in self.get_field_names():
|
||||
output.append(f'{field}: {getattr(self, field)}')
|
||||
return "\n".join(output)
|
||||
|
||||
def __eq__(self, other):
|
||||
return self.hash_tree_root() == other.hash_tree_root()
|
||||
|
||||
def __hash__(self):
|
||||
return hash(self.hash_tree_root())
|
||||
|
||||
@classmethod
|
||||
def get_fields_dict(cls):
|
||||
return dict(cls.__annotations__)
|
||||
|
||||
@classmethod
|
||||
def get_fields(cls):
|
||||
return list(dict(cls.__annotations__).items())
|
||||
|
||||
def get_typed_values(self):
|
||||
return list(zip(self.get_field_values(), self.get_field_types()))
|
||||
|
||||
@classmethod
|
||||
def get_field_names(cls):
|
||||
return list(cls.__annotations__.keys())
|
||||
|
||||
@classmethod
|
||||
def get_field_types(cls):
|
||||
# values of annotations are the types corresponding to the fields, not instance values.
|
||||
return list(cls.__annotations__.values())
|
||||
|
||||
|
||||
# SSZ vector
|
||||
# -----------------------------
|
||||
|
||||
|
||||
def _is_vector_instance_of(a, b):
|
||||
# Other must not be a BytesN
|
||||
if issubclass(b, bytes):
|
||||
return False
|
||||
elif not hasattr(b, 'elem_type') or not hasattr(b, 'length'):
|
||||
# Vector (b) is not an instance of Vector[X, Y] (a)
|
||||
return False
|
||||
elif not hasattr(a, 'elem_type') or not hasattr(a, 'length'):
|
||||
# Vector[X, Y] (b) is an instance of Vector (a)
|
||||
return True
|
||||
else:
|
||||
# Vector[X, Y] (a) is an instance of Vector[X, Y] (b)
|
||||
return a.elem_type == b.elem_type and a.length == b.length
|
||||
|
||||
|
||||
def _is_equal_vector_type(a, b):
|
||||
# Other must not be a BytesN
|
||||
if issubclass(b, bytes):
|
||||
return False
|
||||
elif not hasattr(a, 'elem_type') or not hasattr(a, 'length'):
|
||||
if not hasattr(b, 'elem_type') or not hasattr(b, 'length'):
|
||||
# Vector == Vector
|
||||
return True
|
||||
else:
|
||||
# Vector != Vector[X, Y]
|
||||
return False
|
||||
elif not hasattr(b, 'elem_type') or not hasattr(b, 'length'):
|
||||
# Vector[X, Y] != Vector
|
||||
return False
|
||||
else:
|
||||
# Vector[X, Y] == Vector[X, Y]
|
||||
return a.elem_type == b.elem_type and a.length == b.length
|
||||
|
||||
|
||||
class VectorMeta(type):
|
||||
def __new__(cls, class_name, parents, attrs):
|
||||
out = type.__new__(cls, class_name, parents, attrs)
|
||||
if 'elem_type' in attrs and 'length' in attrs:
|
||||
setattr(out, 'elem_type', attrs['elem_type'])
|
||||
setattr(out, 'length', attrs['length'])
|
||||
return out
|
||||
|
||||
def __getitem__(self, params):
|
||||
if not isinstance(params, tuple) or len(params) != 2:
|
||||
raise Exception("Vector must be instantiated with two args: elem type and length")
|
||||
o = self.__class__(self.__name__, (Vector,), {'elem_type': params[0], 'length': params[1]})
|
||||
o._name = 'Vector'
|
||||
return o
|
||||
|
||||
def __subclasscheck__(self, sub):
|
||||
return _is_vector_instance_of(self, sub)
|
||||
|
||||
def __instancecheck__(self, other):
|
||||
return _is_vector_instance_of(self, other.__class__)
|
||||
|
||||
def __eq__(self, other):
|
||||
return _is_equal_vector_type(self, other)
|
||||
|
||||
def __ne__(self, other):
|
||||
return not _is_equal_vector_type(self, other)
|
||||
|
||||
def __hash__(self):
|
||||
return hash(self.__class__)
|
||||
|
||||
|
||||
class Vector(metaclass=VectorMeta):
|
||||
|
||||
def __init__(self, *args: Iterable):
|
||||
cls = self.__class__
|
||||
if not hasattr(cls, 'elem_type'):
|
||||
raise TypeError("Type Vector without elem_type data cannot be instantiated")
|
||||
elif not hasattr(cls, 'length'):
|
||||
raise TypeError("Type Vector without length data cannot be instantiated")
|
||||
|
||||
if len(args) != cls.length:
|
||||
if len(args) == 0:
|
||||
args = [get_zero_value(cls.elem_type) for _ in range(cls.length)]
|
||||
else:
|
||||
raise TypeError("Typed vector with length %d cannot hold %d items" % (cls.length, len(args)))
|
||||
|
||||
self.items = list(args)
|
||||
|
||||
# cannot check non-class objects
|
||||
if isclass(cls.elem_type):
|
||||
for i, item in enumerate(self.items):
|
||||
if not isinstance(item, cls.elem_type):
|
||||
raise TypeError("Typed vector cannot hold differently typed value"
|
||||
" at index %d. Got type: %s, expected type: %s" % (i, type(item), cls.elem_type))
|
||||
|
||||
def serialize(self):
|
||||
from .ssz_impl import serialize
|
||||
return serialize(self, self.__class__)
|
||||
|
||||
def hash_tree_root(self):
|
||||
from .ssz_impl import hash_tree_root
|
||||
return hash_tree_root(self, self.__class__)
|
||||
|
||||
def __repr__(self):
|
||||
return repr({'length': self.__class__.length, 'items': self.items})
|
||||
|
||||
def __getitem__(self, key):
|
||||
return self.items[key]
|
||||
|
||||
def __setitem__(self, key, value):
|
||||
self.items[key] = value
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self.items)
|
||||
|
||||
def __len__(self):
|
||||
return len(self.items)
|
||||
|
||||
def __eq__(self, other):
|
||||
return self.hash_tree_root() == other.hash_tree_root()
|
||||
|
||||
|
||||
# SSZ BytesN
|
||||
# -----------------------------
|
||||
|
||||
|
||||
def _is_bytes_n_instance_of(a, b):
|
||||
# Other has to be a Bytes derivative class to be a BytesN
|
||||
if not issubclass(b, bytes):
|
||||
return False
|
||||
elif not hasattr(b, 'length'):
|
||||
# BytesN (b) is not an instance of BytesN[X] (a)
|
||||
return False
|
||||
elif not hasattr(a, 'length'):
|
||||
# BytesN[X] (b) is an instance of BytesN (a)
|
||||
return True
|
||||
else:
|
||||
# BytesN[X] (a) is an instance of BytesN[X] (b)
|
||||
return a.length == b.length
|
||||
|
||||
|
||||
def _is_equal_bytes_n_type(a, b):
|
||||
# Other has to be a Bytes derivative class to be a BytesN
|
||||
if not issubclass(b, bytes):
|
||||
return False
|
||||
elif not hasattr(a, 'length'):
|
||||
if not hasattr(b, 'length'):
|
||||
# BytesN == BytesN
|
||||
return True
|
||||
else:
|
||||
# BytesN != BytesN[X]
|
||||
return False
|
||||
elif not hasattr(b, 'length'):
|
||||
# BytesN[X] != BytesN
|
||||
return False
|
||||
else:
|
||||
# BytesN[X] == BytesN[X]
|
||||
return a.length == b.length
|
||||
|
||||
|
||||
class BytesNMeta(type):
|
||||
def __new__(cls, class_name, parents, attrs):
|
||||
out = type.__new__(cls, class_name, parents, attrs)
|
||||
if 'length' in attrs:
|
||||
setattr(out, 'length', attrs['length'])
|
||||
out._name = 'BytesN'
|
||||
out.elem_type = byte
|
||||
return out
|
||||
|
||||
def __getitem__(self, n):
|
||||
return self.__class__(self.__name__, (BytesN,), {'length': n})
|
||||
|
||||
def __subclasscheck__(self, sub):
|
||||
return _is_bytes_n_instance_of(self, sub)
|
||||
|
||||
def __instancecheck__(self, other):
|
||||
return _is_bytes_n_instance_of(self, other.__class__)
|
||||
|
||||
def __eq__(self, other):
|
||||
return _is_equal_bytes_n_type(self, other)
|
||||
|
||||
def __ne__(self, other):
|
||||
return not _is_equal_bytes_n_type(self, other)
|
||||
|
||||
def __hash__(self):
|
||||
return hash(self.__class__)
|
||||
|
||||
|
||||
def parse_bytes(val):
|
||||
if val is None:
|
||||
return None
|
||||
elif isinstance(val, str):
|
||||
# TODO: import from eth-utils instead, and do: hexstr_if_str(to_bytes, val)
|
||||
return None
|
||||
elif isinstance(val, bytes):
|
||||
return val
|
||||
elif isinstance(val, int):
|
||||
return bytes([val])
|
||||
else:
|
||||
return None
|
||||
|
||||
|
||||
class BytesN(bytes, metaclass=BytesNMeta):
|
||||
def __new__(cls, *args):
|
||||
if not hasattr(cls, 'length'):
|
||||
return
|
||||
bytesval = None
|
||||
if len(args) == 1:
|
||||
val: Union[bytes, int, str] = args[0]
|
||||
bytesval = parse_bytes(val)
|
||||
elif len(args) > 1:
|
||||
# TODO: each int is 1 byte, check size, create bytesval
|
||||
bytesval = bytes(args)
|
||||
|
||||
if bytesval is None:
|
||||
if cls.length == 0:
|
||||
bytesval = b''
|
||||
else:
|
||||
bytesval = b'\x00' * cls.length
|
||||
if len(bytesval) != cls.length:
|
||||
raise TypeError("BytesN[%d] cannot be initialized with value of %d bytes" % (cls.length, len(bytesval)))
|
||||
return super().__new__(cls, bytesval)
|
||||
|
||||
def serialize(self):
|
||||
from .ssz_impl import serialize
|
||||
return serialize(self, self.__class__)
|
||||
|
||||
def hash_tree_root(self):
|
||||
from .ssz_impl import hash_tree_root
|
||||
return hash_tree_root(self, self.__class__)
|
||||
|
||||
|
||||
# SSZ Defaults
|
||||
# -----------------------------
|
||||
def get_zero_value(typ):
|
||||
if is_uint_type(typ):
|
||||
return 0
|
||||
elif is_list_type(typ):
|
||||
return []
|
||||
elif is_bool_type(typ):
|
||||
return False
|
||||
elif is_vector_type(typ):
|
||||
return typ()
|
||||
elif is_bytesn_type(typ):
|
||||
return typ()
|
||||
elif is_bytes_type(typ):
|
||||
return b''
|
||||
elif is_container_type(typ):
|
||||
return typ(**{f: get_zero_value(t) for f, t in typ.get_fields()})
|
||||
else:
|
||||
raise Exception("Type not supported: {}".format(typ))
|
||||
|
||||
|
||||
# Type helpers
|
||||
# -----------------------------
|
||||
|
||||
|
||||
def infer_type(obj):
|
||||
if is_uint_type(obj.__class__):
|
||||
return obj.__class__
|
||||
elif isinstance(obj, int):
|
||||
return uint64
|
||||
elif isinstance(obj, list):
|
||||
return List[infer_type(obj[0])]
|
||||
elif isinstance(obj, (Vector, Container, bool, BytesN, bytes)):
|
||||
return obj.__class__
|
||||
else:
|
||||
raise Exception("Unknown type for {}".format(obj))
|
||||
|
||||
|
||||
def infer_input_type(fn):
|
||||
"""
|
||||
Decorator to run infer_type on the obj if typ argument is None
|
||||
"""
|
||||
def infer_helper(obj, typ=None, **kwargs):
|
||||
if typ is None:
|
||||
typ = infer_type(obj)
|
||||
return fn(obj, typ=typ, **kwargs)
|
||||
return infer_helper
|
||||
|
||||
|
||||
def is_bool_type(typ):
|
||||
"""
|
||||
Check if the given type is a bool.
|
||||
"""
|
||||
if hasattr(typ, '__supertype__'):
|
||||
typ = typ.__supertype__
|
||||
return isinstance(typ, type) and issubclass(typ, bool)
|
||||
|
||||
|
||||
def is_list_type(typ):
|
||||
"""
|
||||
Check if the given type is a list.
|
||||
"""
|
||||
return get_origin(typ) is List or get_origin(typ) is list
|
||||
|
||||
|
||||
def is_bytes_type(typ):
|
||||
"""
|
||||
Check if the given type is a ``bytes``.
|
||||
"""
|
||||
# Do not accept subclasses of bytes here, to avoid confusion with BytesN
|
||||
return typ == bytes
|
||||
|
||||
|
||||
def is_bytesn_type(typ):
|
||||
"""
|
||||
Check if the given type is a BytesN.
|
||||
"""
|
||||
return isinstance(typ, type) and issubclass(typ, BytesN)
|
||||
|
||||
|
||||
def is_list_kind(typ):
|
||||
"""
|
||||
Check if the given type is a kind of list. Can be bytes.
|
||||
"""
|
||||
return is_list_type(typ) or is_bytes_type(typ)
|
||||
|
||||
|
||||
def is_vector_type(typ):
|
||||
"""
|
||||
Check if the given type is a vector.
|
||||
"""
|
||||
return isinstance(typ, type) and issubclass(typ, Vector)
|
||||
|
||||
|
||||
def is_vector_kind(typ):
|
||||
"""
|
||||
Check if the given type is a kind of vector. Can be BytesN.
|
||||
"""
|
||||
return is_vector_type(typ) or is_bytesn_type(typ)
|
||||
|
||||
|
||||
def is_container_type(typ):
|
||||
"""
|
||||
Check if the given type is a container.
|
||||
"""
|
||||
return isinstance(typ, type) and issubclass(typ, Container)
|
||||
|
||||
|
||||
T = TypeVar('T')
|
||||
L = TypeVar('L')
|
||||
|
||||
|
||||
def read_list_elem_type(list_typ: Type[List[T]]) -> T:
|
||||
if list_typ.__args__ is None or len(list_typ.__args__) != 1:
|
||||
raise TypeError("Supplied list-type is invalid, no element type found.")
|
||||
return list_typ.__args__[0]
|
||||
|
||||
|
||||
def read_vector_elem_type(vector_typ: Type[Vector[T, L]]) -> T:
|
||||
return vector_typ.elem_type
|
||||
|
||||
|
||||
def read_elem_type(typ):
|
||||
if typ == bytes:
|
||||
return byte
|
||||
elif is_list_type(typ):
|
||||
return read_list_elem_type(typ)
|
||||
elif is_vector_type(typ):
|
||||
return read_vector_elem_type(typ)
|
||||
elif issubclass(typ, bytes): # bytes or bytesN
|
||||
return byte
|
||||
else:
|
||||
raise TypeError("Unexpected type: {}".format(typ))
|
@ -1,3 +1,4 @@
|
||||
-r requirements.txt
|
||||
pytest>=3.6,<3.7
|
||||
../config_helpers
|
||||
flake8==3.7.7
|
||||
|
@ -2,3 +2,4 @@ eth-utils>=1.3.0,<2
|
||||
eth-typing>=2.1.0,<3.0.0
|
||||
pycryptodome==3.7.3
|
||||
py_ecc>=1.6.0
|
||||
typing_inspect==0.4.0
|
||||
|
@ -9,5 +9,6 @@ setup(
|
||||
"eth-typing>=2.1.0,<3.0.0",
|
||||
"pycryptodome==3.7.3",
|
||||
"py_ecc>=1.6.0",
|
||||
"typing_inspect==0.4.0"
|
||||
]
|
||||
)
|
||||
|
Loading…
x
Reference in New Issue
Block a user