mirror of
https://github.com/status-im/eth2.0-specs.git
synced 2025-02-21 06:48:12 +00:00
Merge pull request #1077 from ethereum/ssz-impl-rework
SSZ implementation for exec. spec - Support for Python 3 typing.
This commit is contained in:
commit
e8b4c4c57f
@ -60,13 +60,13 @@ jobs:
|
||||
- restore_cache:
|
||||
key: v1-specs-repo-{{ .Branch }}-{{ .Revision }}
|
||||
- restore_cached_venv:
|
||||
venv_name: v1-pyspec-03
|
||||
venv_name: v2-pyspec
|
||||
reqs_checksum: '{{ checksum "test_libs/pyspec/requirements.txt" }}-{{ checksum "test_libs/pyspec/requirements-testing.txt" }}'
|
||||
- run:
|
||||
name: Install pyspec requirements
|
||||
command: make install_test && make install_lint
|
||||
command: make install_test
|
||||
- save_cached_venv:
|
||||
venv_name: v1-pyspec-03
|
||||
venv_name: v2-pyspec
|
||||
reqs_checksum: '{{ checksum "test_libs/pyspec/requirements.txt" }}-{{ checksum "test_libs/pyspec/requirements-testing.txt" }}'
|
||||
venv_path: ./test_libs/pyspec/venv
|
||||
test:
|
||||
@ -77,7 +77,7 @@ jobs:
|
||||
- restore_cache:
|
||||
key: v1-specs-repo-{{ .Branch }}-{{ .Revision }}
|
||||
- restore_cached_venv:
|
||||
venv_name: v1-pyspec-03
|
||||
venv_name: v2-pyspec
|
||||
reqs_checksum: '{{ checksum "test_libs/pyspec/requirements.txt" }}-{{ checksum "test_libs/pyspec/requirements-testing.txt" }}'
|
||||
- run:
|
||||
name: Run py-tests
|
||||
@ -92,7 +92,7 @@ jobs:
|
||||
- restore_cache:
|
||||
key: v1-specs-repo-{{ .Branch }}-{{ .Revision }}
|
||||
- restore_cached_venv:
|
||||
venv_name: v1-pyspec-03
|
||||
venv_name: v2-pyspec
|
||||
reqs_checksum: '{{ checksum "test_libs/pyspec/requirements.txt" }}-{{ checksum "test_libs/pyspec/requirements-testing.txt" }}'
|
||||
- run:
|
||||
name: Run linter
|
||||
|
5
Makefile
5
Makefile
@ -39,12 +39,9 @@ test: $(PY_SPEC_ALL_TARGETS)
|
||||
citest: $(PY_SPEC_ALL_TARGETS)
|
||||
cd $(PY_SPEC_DIR); mkdir -p test-reports/eth2spec; . venv/bin/activate; python -m pytest --junitxml=test-reports/eth2spec/test_results.xml .
|
||||
|
||||
install_lint:
|
||||
cd $(PY_SPEC_DIR); python3 -m venv venv; . venv/bin/activate; pip3 install flake8==3.5.0
|
||||
|
||||
lint: $(PY_SPEC_ALL_TARGETS)
|
||||
cd $(PY_SPEC_DIR); . venv/bin/activate; \
|
||||
flake8 --max-line-length=120 ./eth2spec;
|
||||
flake8 --ignore=E252,W504 --max-line-length=120 ./eth2spec;
|
||||
|
||||
# "make pyspec" to create the pyspec for all phases.
|
||||
pyspec: $(PY_SPEC_ALL_TARGETS)
|
||||
|
@ -13,11 +13,14 @@ from typing import (
|
||||
NewType,
|
||||
Tuple,
|
||||
)
|
||||
from eth2spec.utils.minimal_ssz import (
|
||||
SSZType,
|
||||
from eth2spec.utils.ssz.ssz_impl import (
|
||||
hash_tree_root,
|
||||
signing_root,
|
||||
)
|
||||
from eth2spec.utils.ssz.ssz_typing import (
|
||||
# unused: uint8, uint16, uint32, uint128, uint256,
|
||||
uint64, Container, Vector, BytesN
|
||||
)
|
||||
from eth2spec.utils.hash_function import hash
|
||||
from eth2spec.utils.bls import (
|
||||
bls_aggregate_pubkeys,
|
||||
@ -25,19 +28,18 @@ from eth2spec.utils.bls import (
|
||||
bls_verify_multiple,
|
||||
)
|
||||
|
||||
|
||||
# stub, will get overwritten by real var
|
||||
SLOTS_PER_EPOCH = 64
|
||||
|
||||
# Note: 'int' type defaults to being interpreted as a uint64 by SSZ implementation.
|
||||
Slot = NewType('Slot', int) # uint64
|
||||
Epoch = NewType('Epoch', int) # uint64
|
||||
Shard = NewType('Shard', int) # uint64
|
||||
ValidatorIndex = NewType('ValidatorIndex', int) # uint64
|
||||
Gwei = NewType('Gwei', int) # uint64
|
||||
Bytes32 = NewType('Bytes32', bytes) # bytes32
|
||||
BLSPubkey = NewType('BLSPubkey', bytes) # bytes48
|
||||
BLSSignature = NewType('BLSSignature', bytes) # bytes96
|
||||
Store = None
|
||||
|
||||
Bytes4 = BytesN[4]
|
||||
Bytes32 = BytesN[32]
|
||||
Bytes48 = BytesN[48]
|
||||
Bytes96 = BytesN[96]
|
||||
|
||||
""")
|
||||
|
||||
code_lines += function_puller.get_spec(sourcefile)
|
||||
|
@ -1,4 +1,3 @@
|
||||
import sys
|
||||
from typing import List
|
||||
|
||||
|
||||
@ -6,9 +5,11 @@ def get_spec(file_name: str) -> List[str]:
|
||||
code_lines = []
|
||||
pulling_from = None
|
||||
current_name = None
|
||||
# list of current type definition being parsed, or None otherwise
|
||||
current_typedef = None
|
||||
# list of (name, definition lines list) tuples.
|
||||
type_defs = []
|
||||
for linenum, line in enumerate(open(sys.argv[1]).readlines()):
|
||||
for linenum, line in enumerate(open(file_name).readlines()):
|
||||
line = line.rstrip()
|
||||
if pulling_from is None and len(line) > 0 and line[0] == '#' and line[-1] == '`':
|
||||
current_name = line[line[:-1].rfind('`') + 1: -1]
|
||||
@ -19,26 +20,25 @@ def get_spec(file_name: str) -> List[str]:
|
||||
if pulling_from is None:
|
||||
pulling_from = linenum
|
||||
else:
|
||||
if current_typedef is not None:
|
||||
assert code_lines[-1] == '}'
|
||||
code_lines[-1] = '})'
|
||||
current_typedef[-1] = '})'
|
||||
type_defs.append((current_name, current_typedef))
|
||||
pulling_from = None
|
||||
current_typedef = None
|
||||
if current_typedef is not None:
|
||||
type_defs.append((current_name, current_typedef))
|
||||
current_typedef = None
|
||||
else:
|
||||
if pulling_from == linenum and line == '{':
|
||||
code_lines.append('%s = SSZType({' % current_name)
|
||||
current_typedef = ['global_vars["%s"] = SSZType({' % current_name]
|
||||
elif pulling_from is not None:
|
||||
if pulling_from is not None:
|
||||
# Add some whitespace between functions
|
||||
if line[:3] == 'def':
|
||||
if line[:3] == 'def' or line[:5] == 'class':
|
||||
code_lines.append('')
|
||||
code_lines.append('')
|
||||
code_lines.append(line)
|
||||
# Remember type def lines
|
||||
# Check for SSZ type definitions
|
||||
if len(line) > 18 and line[:6] == 'class ' and line[-12:] == '(Container):':
|
||||
name = line[6:-12]
|
||||
# Check consistency with markdown header
|
||||
assert name == current_name
|
||||
current_typedef = []
|
||||
if current_typedef is not None:
|
||||
current_typedef.append(line)
|
||||
code_lines.append(line)
|
||||
elif pulling_from is None and len(line) > 0 and line[0] == '|':
|
||||
row = line[1:].split('|')
|
||||
if len(row) >= 2:
|
||||
@ -56,6 +56,11 @@ def get_spec(file_name: str) -> List[str]:
|
||||
code_lines.append(row[0] + ' = ' + (row[1].replace('**TBD**', '0x1234567890123456789012345678901234567890')))
|
||||
# Build type-def re-initialization
|
||||
code_lines.append('\n')
|
||||
code_lines.append('ssz_types = [\n')
|
||||
for (ssz_type_name, _) in type_defs:
|
||||
code_lines.append(f' {ssz_type_name},')
|
||||
code_lines.append(']')
|
||||
code_lines.append('\n')
|
||||
code_lines.append('def init_SSZ_types():')
|
||||
code_lines.append(' global_vars = globals()')
|
||||
for ssz_type_name, ssz_type in type_defs:
|
||||
@ -63,13 +68,14 @@ def get_spec(file_name: str) -> List[str]:
|
||||
for type_line in ssz_type:
|
||||
if len(type_line) > 0:
|
||||
code_lines.append(' ' + type_line)
|
||||
code_lines.append('\n')
|
||||
code_lines.append('ssz_types = [\n')
|
||||
for (ssz_type_name, _) in type_defs:
|
||||
code_lines.append(f' "{ssz_type_name}",\n')
|
||||
code_lines.append(']')
|
||||
code_lines.append(f' global_vars["{ssz_type_name}"] = {ssz_type_name}')
|
||||
code_lines.append(' global_vars["ssz_types"] = [')
|
||||
for (ssz_type_name, _) in type_defs:
|
||||
code_lines.append(f' "{ssz_type_name}",')
|
||||
code_lines.append(' ]')
|
||||
code_lines.append('\n')
|
||||
code_lines.append('def get_ssz_type_by_name(name: str) -> SSZType:')
|
||||
code_lines.append('def get_ssz_type_by_name(name: str) -> Container:')
|
||||
code_lines.append(' return globals()[name]')
|
||||
code_lines.append('')
|
||||
return code_lines
|
||||
|
@ -266,162 +266,151 @@ The types are defined topologically to aid in facilitating an executable version
|
||||
#### `Fork`
|
||||
|
||||
```python
|
||||
{
|
||||
class Fork(Container):
|
||||
# Previous fork version
|
||||
'previous_version': 'bytes4',
|
||||
previous_version: Bytes4
|
||||
# Current fork version
|
||||
'current_version': 'bytes4',
|
||||
current_version: Bytes4
|
||||
# Fork epoch number
|
||||
'epoch': 'uint64',
|
||||
}
|
||||
epoch: uint64
|
||||
```
|
||||
|
||||
#### `Crosslink`
|
||||
|
||||
```python
|
||||
{
|
||||
class Crosslink(Container):
|
||||
# Shard number
|
||||
'shard': 'uint64',
|
||||
shard: uint64
|
||||
# Crosslinking data from epochs [start....end-1]
|
||||
'start_epoch': 'uint64',
|
||||
'end_epoch': 'uint64',
|
||||
start_epoch: uint64
|
||||
end_epoch: uint64
|
||||
# Root of the previous crosslink
|
||||
'parent_root': 'bytes32',
|
||||
parent_root: Bytes32
|
||||
# Root of the crosslinked shard data since the previous crosslink
|
||||
'data_root': 'bytes32',
|
||||
}
|
||||
data_root: Bytes32
|
||||
```
|
||||
|
||||
#### `Eth1Data`
|
||||
|
||||
```python
|
||||
{
|
||||
class Eth1Data(Container):
|
||||
# Root of the deposit tree
|
||||
'deposit_root': 'bytes32',
|
||||
deposit_root: Bytes32
|
||||
# Total number of deposits
|
||||
'deposit_count': 'uint64',
|
||||
deposit_count: uint64
|
||||
# Block hash
|
||||
'block_hash': 'bytes32',
|
||||
}
|
||||
block_hash: Bytes32
|
||||
```
|
||||
|
||||
#### `AttestationData`
|
||||
|
||||
```python
|
||||
{
|
||||
class AttestationData(Container):
|
||||
# LMD GHOST vote
|
||||
'beacon_block_root': 'bytes32',
|
||||
|
||||
beacon_block_root: Bytes32
|
||||
|
||||
# FFG vote
|
||||
'source_epoch': 'uint64',
|
||||
'source_root': 'bytes32',
|
||||
'target_epoch': 'uint64',
|
||||
'target_root': 'bytes32',
|
||||
|
||||
source_epoch: uint64
|
||||
source_root: Bytes32
|
||||
target_epoch: uint64
|
||||
target_root: Bytes32
|
||||
|
||||
# Crosslink vote
|
||||
'crosslink': Crosslink,
|
||||
}
|
||||
crosslink: Crosslink
|
||||
```
|
||||
|
||||
#### `AttestationDataAndCustodyBit`
|
||||
|
||||
```python
|
||||
{
|
||||
class AttestationDataAndCustodyBit(Container):
|
||||
# Attestation data
|
||||
'data': AttestationData,
|
||||
data: AttestationData
|
||||
# Custody bit
|
||||
'custody_bit': 'bool',
|
||||
}
|
||||
custody_bit: bool
|
||||
```
|
||||
|
||||
#### `IndexedAttestation`
|
||||
|
||||
```python
|
||||
{
|
||||
class IndexedAttestation(Container):
|
||||
# Validator indices
|
||||
'custody_bit_0_indices': ['uint64'],
|
||||
'custody_bit_1_indices': ['uint64'],
|
||||
custody_bit_0_indices: List[uint64]
|
||||
custody_bit_1_indices: List[uint64]
|
||||
# Attestation data
|
||||
'data': AttestationData,
|
||||
data: AttestationData
|
||||
# Aggregate signature
|
||||
'signature': 'bytes96',
|
||||
}
|
||||
signature: Bytes96
|
||||
```
|
||||
|
||||
#### `DepositData`
|
||||
|
||||
```python
|
||||
{
|
||||
class DepositData(Container):
|
||||
# BLS pubkey
|
||||
'pubkey': 'bytes48',
|
||||
pubkey: Bytes48
|
||||
# Withdrawal credentials
|
||||
'withdrawal_credentials': 'bytes32',
|
||||
withdrawal_credentials: Bytes32
|
||||
# Amount in Gwei
|
||||
'amount': 'uint64',
|
||||
amount: uint64
|
||||
# Container self-signature
|
||||
'signature': 'bytes96',
|
||||
}
|
||||
signature: Bytes96
|
||||
```
|
||||
|
||||
#### `BeaconBlockHeader`
|
||||
|
||||
```python
|
||||
{
|
||||
'slot': 'uint64',
|
||||
'parent_root': 'bytes32',
|
||||
'state_root': 'bytes32',
|
||||
'body_root': 'bytes32',
|
||||
'signature': 'bytes96',
|
||||
}
|
||||
class BeaconBlockHeader(Container):
|
||||
slot: uint64
|
||||
parent_root: Bytes32
|
||||
state_root: Bytes32
|
||||
body_root: Bytes32
|
||||
signature: Bytes96
|
||||
```
|
||||
|
||||
#### `Validator`
|
||||
|
||||
```python
|
||||
{
|
||||
class Validator(Container):
|
||||
# BLS public key
|
||||
'pubkey': 'bytes48',
|
||||
pubkey: Bytes48
|
||||
# Withdrawal credentials
|
||||
'withdrawal_credentials': 'bytes32',
|
||||
withdrawal_credentials: Bytes32
|
||||
# Epoch when became eligible for activation
|
||||
'activation_eligibility_epoch': 'uint64',
|
||||
activation_eligibility_epoch: uint64
|
||||
# Epoch when validator activated
|
||||
'activation_epoch': 'uint64',
|
||||
activation_epoch: uint64
|
||||
# Epoch when validator exited
|
||||
'exit_epoch': 'uint64',
|
||||
exit_epoch: uint64
|
||||
# Epoch when validator is eligible to withdraw
|
||||
'withdrawable_epoch': 'uint64',
|
||||
withdrawable_epoch: uint64
|
||||
# Was the validator slashed
|
||||
'slashed': 'bool',
|
||||
slashed: bool
|
||||
# Effective balance
|
||||
'effective_balance': 'uint64',
|
||||
}
|
||||
effective_balance: uint64
|
||||
```
|
||||
|
||||
#### `PendingAttestation`
|
||||
|
||||
```python
|
||||
{
|
||||
class PendingAttestation(Container):
|
||||
# Attester aggregation bitfield
|
||||
'aggregation_bitfield': 'bytes',
|
||||
aggregation_bitfield: bytes
|
||||
# Attestation data
|
||||
'data': AttestationData,
|
||||
data: AttestationData
|
||||
# Inclusion delay
|
||||
'inclusion_delay': 'uint64',
|
||||
inclusion_delay: uint64
|
||||
# Proposer index
|
||||
'proposer_index': 'uint64',
|
||||
}
|
||||
proposer_index: uint64
|
||||
```
|
||||
|
||||
#### `HistoricalBatch`
|
||||
|
||||
```python
|
||||
{
|
||||
class HistoricalBatch(Container):
|
||||
# Block roots
|
||||
'block_roots': ['bytes32', SLOTS_PER_HISTORICAL_ROOT],
|
||||
block_roots: Vector[Bytes32, SLOTS_PER_HISTORICAL_ROOT]
|
||||
# State roots
|
||||
'state_roots': ['bytes32', SLOTS_PER_HISTORICAL_ROOT],
|
||||
}
|
||||
state_roots: Vector[Bytes32, SLOTS_PER_HISTORICAL_ROOT]
|
||||
```
|
||||
|
||||
### Beacon operations
|
||||
@ -429,85 +418,79 @@ The types are defined topologically to aid in facilitating an executable version
|
||||
#### `ProposerSlashing`
|
||||
|
||||
```python
|
||||
{
|
||||
class ProposerSlashing(Container):
|
||||
# Proposer index
|
||||
'proposer_index': 'uint64',
|
||||
proposer_index: uint64
|
||||
# First block header
|
||||
'header_1': BeaconBlockHeader,
|
||||
header_1: BeaconBlockHeader
|
||||
# Second block header
|
||||
'header_2': BeaconBlockHeader,
|
||||
}
|
||||
header_2: BeaconBlockHeader
|
||||
```
|
||||
|
||||
#### `AttesterSlashing`
|
||||
|
||||
```python
|
||||
{
|
||||
class AttesterSlashing(Container):
|
||||
# First attestation
|
||||
'attestation_1': IndexedAttestation,
|
||||
attestation_1: IndexedAttestation
|
||||
# Second attestation
|
||||
'attestation_2': IndexedAttestation,
|
||||
}
|
||||
attestation_2: IndexedAttestation
|
||||
```
|
||||
|
||||
#### `Attestation`
|
||||
|
||||
```python
|
||||
{
|
||||
class Attestation(Container):
|
||||
# Attester aggregation bitfield
|
||||
'aggregation_bitfield': 'bytes',
|
||||
aggregation_bitfield: bytes
|
||||
# Attestation data
|
||||
'data': AttestationData,
|
||||
data: AttestationData
|
||||
# Custody bitfield
|
||||
'custody_bitfield': 'bytes',
|
||||
custody_bitfield: bytes
|
||||
# BLS aggregate signature
|
||||
'signature': 'bytes96',
|
||||
}
|
||||
signature: Bytes96
|
||||
```
|
||||
|
||||
#### `Deposit`
|
||||
|
||||
```python
|
||||
{
|
||||
class Deposit(Container):
|
||||
# Branch in the deposit tree
|
||||
'proof': ['bytes32', DEPOSIT_CONTRACT_TREE_DEPTH],
|
||||
proof: Vector[Bytes32, DEPOSIT_CONTRACT_TREE_DEPTH]
|
||||
# Data
|
||||
'data': DepositData,
|
||||
}
|
||||
data: DepositData
|
||||
```
|
||||
|
||||
#### `VoluntaryExit`
|
||||
|
||||
```python
|
||||
{
|
||||
class VoluntaryExit(Container):
|
||||
# Minimum epoch for processing exit
|
||||
'epoch': 'uint64',
|
||||
epoch: uint64
|
||||
# Index of the exiting validator
|
||||
'validator_index': 'uint64',
|
||||
validator_index: uint64
|
||||
# Validator signature
|
||||
'signature': 'bytes96',
|
||||
}
|
||||
signature: Bytes96
|
||||
```
|
||||
|
||||
#### `Transfer`
|
||||
|
||||
```python
|
||||
{
|
||||
class Transfer(Container):
|
||||
# Sender index
|
||||
'sender': 'uint64',
|
||||
sender: uint64
|
||||
# Recipient index
|
||||
'recipient': 'uint64',
|
||||
recipient: uint64
|
||||
# Amount in Gwei
|
||||
'amount': 'uint64',
|
||||
amount: uint64
|
||||
# Fee in Gwei for block proposer
|
||||
'fee': 'uint64',
|
||||
fee: uint64
|
||||
# Inclusion slot
|
||||
'slot': 'uint64',
|
||||
slot: uint64
|
||||
# Sender withdrawal pubkey
|
||||
'pubkey': 'bytes48',
|
||||
pubkey: Bytes48
|
||||
# Sender signature
|
||||
'signature': 'bytes96',
|
||||
}
|
||||
signature: Bytes96
|
||||
```
|
||||
|
||||
### Beacon blocks
|
||||
@ -515,30 +498,28 @@ The types are defined topologically to aid in facilitating an executable version
|
||||
#### `BeaconBlockBody`
|
||||
|
||||
```python
|
||||
{
|
||||
'randao_reveal': 'bytes96',
|
||||
'eth1_data': Eth1Data,
|
||||
'graffiti': 'bytes32',
|
||||
'proposer_slashings': [ProposerSlashing],
|
||||
'attester_slashings': [AttesterSlashing],
|
||||
'attestations': [Attestation],
|
||||
'deposits': [Deposit],
|
||||
'voluntary_exits': [VoluntaryExit],
|
||||
'transfers': [Transfer],
|
||||
}
|
||||
class BeaconBlockBody(Container):
|
||||
randao_reveal: Bytes96
|
||||
eth1_data: Eth1Data
|
||||
graffiti: Bytes32
|
||||
proposer_slashings: List[ProposerSlashing]
|
||||
attester_slashings: List[AttesterSlashing]
|
||||
attestations: List[Attestation]
|
||||
deposits: List[Deposit]
|
||||
voluntary_exits: List[VoluntaryExit]
|
||||
transfers: List[Transfer]
|
||||
```
|
||||
|
||||
#### `BeaconBlock`
|
||||
|
||||
```python
|
||||
{
|
||||
class BeaconBlock(Container):
|
||||
# Header
|
||||
'slot': 'uint64',
|
||||
'parent_root': 'bytes32',
|
||||
'state_root': 'bytes32',
|
||||
'body': BeaconBlockBody,
|
||||
'signature': 'bytes96',
|
||||
}
|
||||
slot: uint64
|
||||
parent_root: Bytes32
|
||||
state_root: Bytes32
|
||||
body: BeaconBlockBody
|
||||
signature: Bytes96
|
||||
```
|
||||
|
||||
### Beacon state
|
||||
@ -546,46 +527,40 @@ The types are defined topologically to aid in facilitating an executable version
|
||||
#### `BeaconState`
|
||||
|
||||
```python
|
||||
{
|
||||
class BeaconState(Container):
|
||||
# Misc
|
||||
'slot': 'uint64',
|
||||
'genesis_time': 'uint64',
|
||||
'fork': Fork, # For versioning hard forks
|
||||
|
||||
slot: uint64
|
||||
genesis_time: uint64
|
||||
fork: Fork # For versioning hard forks
|
||||
# Validator registry
|
||||
'validator_registry': [Validator],
|
||||
'balances': ['uint64'],
|
||||
|
||||
validator_registry: List[Validator]
|
||||
balances: List[uint64]
|
||||
# Randomness and committees
|
||||
'latest_randao_mixes': ['bytes32', LATEST_RANDAO_MIXES_LENGTH],
|
||||
'latest_start_shard': 'uint64',
|
||||
|
||||
latest_randao_mixes: Vector[Bytes32, LATEST_RANDAO_MIXES_LENGTH]
|
||||
latest_start_shard: uint64
|
||||
# Finality
|
||||
'previous_epoch_attestations': [PendingAttestation],
|
||||
'current_epoch_attestations': [PendingAttestation],
|
||||
'previous_justified_epoch': 'uint64',
|
||||
'current_justified_epoch': 'uint64',
|
||||
'previous_justified_root': 'bytes32',
|
||||
'current_justified_root': 'bytes32',
|
||||
'justification_bitfield': 'uint64',
|
||||
'finalized_epoch': 'uint64',
|
||||
'finalized_root': 'bytes32',
|
||||
|
||||
previous_epoch_attestations: List[PendingAttestation]
|
||||
current_epoch_attestations: List[PendingAttestation]
|
||||
previous_justified_epoch: uint64
|
||||
current_justified_epoch: uint64
|
||||
previous_justified_root: Bytes32
|
||||
current_justified_root: Bytes32
|
||||
justification_bitfield: uint64
|
||||
finalized_epoch: uint64
|
||||
finalized_root: Bytes32
|
||||
# Recent state
|
||||
'current_crosslinks': [Crosslink, SHARD_COUNT],
|
||||
'previous_crosslinks': [Crosslink, SHARD_COUNT],
|
||||
'latest_block_roots': ['bytes32', SLOTS_PER_HISTORICAL_ROOT],
|
||||
'latest_state_roots': ['bytes32', SLOTS_PER_HISTORICAL_ROOT],
|
||||
'latest_active_index_roots': ['bytes32', LATEST_ACTIVE_INDEX_ROOTS_LENGTH],
|
||||
'latest_slashed_balances': ['uint64', LATEST_SLASHED_EXIT_LENGTH],
|
||||
'latest_block_header': BeaconBlockHeader,
|
||||
'historical_roots': ['bytes32'],
|
||||
|
||||
current_crosslinks: Vector[Crosslink, SHARD_COUNT]
|
||||
previous_crosslinks: Vector[Crosslink, SHARD_COUNT]
|
||||
latest_block_roots: Vector[Bytes32, SLOTS_PER_HISTORICAL_ROOT]
|
||||
latest_state_roots: Vector[Bytes32, SLOTS_PER_HISTORICAL_ROOT]
|
||||
latest_active_index_roots: Vector[Bytes32, LATEST_ACTIVE_INDEX_ROOTS_LENGTH]
|
||||
latest_slashed_balances: Vector[uint64, LATEST_SLASHED_EXIT_LENGTH]
|
||||
latest_block_header: BeaconBlockHeader
|
||||
historical_roots: List[Bytes32]
|
||||
# Ethereum 1.0 chain data
|
||||
'latest_eth1_data': Eth1Data,
|
||||
'eth1_data_votes': [Eth1Data],
|
||||
'deposit_index': 'uint64',
|
||||
}
|
||||
latest_eth1_data: Eth1Data
|
||||
eth1_data_votes: List[Eth1Data]
|
||||
deposit_index: uint64
|
||||
```
|
||||
|
||||
## Custom types
|
||||
@ -599,9 +574,8 @@ We define the following Python custom types for type hinting and readability:
|
||||
| `Shard` | `uint64` | a shard number |
|
||||
| `ValidatorIndex` | `uint64` | a validator registry index |
|
||||
| `Gwei` | `uint64` | an amount in Gwei |
|
||||
| `Bytes32` | `bytes32` | 32 bytes of binary data |
|
||||
| `BLSPubkey` | `bytes48` | a BLS12-381 public key |
|
||||
| `BLSSignature` | `bytes96` | a BLS12-381 signature |
|
||||
| `BLSPubkey` | `Bytes48` | a BLS12-381 public key |
|
||||
| `BLSSignature` | `Bytes96` | a BLS12-381 signature |
|
||||
|
||||
## Helper functions
|
||||
|
||||
@ -611,7 +585,7 @@ We define the following Python custom types for type hinting and readability:
|
||||
|
||||
```python
|
||||
def xor(bytes1: Bytes32, bytes2: Bytes32) -> Bytes32:
|
||||
return bytes(a ^ b for a, b in zip(bytes1, bytes2))
|
||||
return Bytes32(a ^ b for a, b in zip(bytes1, bytes2))
|
||||
```
|
||||
|
||||
### `hash`
|
||||
@ -626,7 +600,7 @@ The `hash` function is SHA256.
|
||||
|
||||
### `signing_root`
|
||||
|
||||
`def signing_root(object: SSZContainer) -> Bytes32` is a function defined in the [SimpleSerialize spec](../simple-serialize.md#self-signed-containers) to compute signing messages.
|
||||
`def signing_root(object: Container) -> Bytes32` is a function defined in the [SimpleSerialize spec](../simple-serialize.md#self-signed-containers) to compute signing messages.
|
||||
|
||||
### `bls_domain`
|
||||
|
||||
|
@ -42,8 +42,8 @@ if __name__ == "__main__":
|
||||
create_suite('attester_slashing', 'mainnet', lambda: generate_from_tests(test_process_attester_slashing)),
|
||||
create_suite('block_header', 'minimal', lambda: generate_from_tests(test_process_block_header)),
|
||||
create_suite('block_header', 'mainnet', lambda: generate_from_tests(test_process_block_header)),
|
||||
create_suite('deposit', 'minimal', lambda: generate_from_tests(test_process_deposit)),
|
||||
create_suite('deposit', 'mainnet', lambda: generate_from_tests(test_process_deposit)),
|
||||
create_suite('deposit', 'minimal', lambda: generate_from_tests(test_process_deposit)),
|
||||
create_suite('deposit', 'mainnet', lambda: generate_from_tests(test_process_deposit)),
|
||||
create_suite('proposer_slashing', 'minimal', lambda: generate_from_tests(test_process_proposer_slashing)),
|
||||
create_suite('proposer_slashing', 'mainnet', lambda: generate_from_tests(test_process_proposer_slashing)),
|
||||
create_suite('transfer', 'minimal', lambda: generate_from_tests(test_process_transfer)),
|
||||
|
@ -2,7 +2,7 @@ from random import Random
|
||||
|
||||
from eth2spec.debug import random_value, encode
|
||||
from eth2spec.phase0 import spec
|
||||
from eth2spec.utils.minimal_ssz import (
|
||||
from eth2spec.utils.ssz.ssz_impl import (
|
||||
hash_tree_root,
|
||||
signing_root,
|
||||
serialize,
|
||||
|
@ -1,28 +1,39 @@
|
||||
from eth2spec.utils.minimal_ssz import hash_tree_root
|
||||
from eth2spec.utils.ssz.ssz_impl import hash_tree_root
|
||||
from eth2spec.utils.ssz.ssz_typing import (
|
||||
is_uint_type, is_bool_type, is_list_type,
|
||||
is_vector_type, is_bytes_type, is_bytesn_type, is_container_type,
|
||||
read_vector_elem_type, read_list_elem_type,
|
||||
Vector, BytesN
|
||||
)
|
||||
|
||||
|
||||
def decode(json, typ):
|
||||
if isinstance(typ, str) and typ[:4] == 'uint':
|
||||
return json
|
||||
elif typ == 'bool':
|
||||
assert json in (True, False)
|
||||
return json
|
||||
elif isinstance(typ, list):
|
||||
return [decode(element, typ[0]) for element in json]
|
||||
elif isinstance(typ, str) and typ[:4] == 'byte':
|
||||
return bytes.fromhex(json[2:])
|
||||
elif hasattr(typ, 'fields'):
|
||||
def decode(data, typ):
|
||||
if is_uint_type(typ):
|
||||
return data
|
||||
elif is_bool_type(typ):
|
||||
assert data in (True, False)
|
||||
return data
|
||||
elif is_list_type(typ):
|
||||
elem_typ = read_list_elem_type(typ)
|
||||
return [decode(element, elem_typ) for element in data]
|
||||
elif is_vector_type(typ):
|
||||
elem_typ = read_vector_elem_type(typ)
|
||||
return Vector(decode(element, elem_typ) for element in data)
|
||||
elif is_bytes_type(typ):
|
||||
return bytes.fromhex(data[2:])
|
||||
elif is_bytesn_type(typ):
|
||||
return BytesN(bytes.fromhex(data[2:]))
|
||||
elif is_container_type(typ):
|
||||
temp = {}
|
||||
for field, subtype in typ.fields.items():
|
||||
temp[field] = decode(json[field], subtype)
|
||||
if field + "_hash_tree_root" in json:
|
||||
assert(json[field + "_hash_tree_root"][2:] ==
|
||||
for field, subtype in typ.get_fields():
|
||||
temp[field] = decode(data[field], subtype)
|
||||
if field + "_hash_tree_root" in data:
|
||||
assert(data[field + "_hash_tree_root"][2:] ==
|
||||
hash_tree_root(temp[field], subtype).hex())
|
||||
ret = typ(**temp)
|
||||
if "hash_tree_root" in json:
|
||||
assert(json["hash_tree_root"][2:] ==
|
||||
if "hash_tree_root" in data:
|
||||
assert(data["hash_tree_root"][2:] ==
|
||||
hash_tree_root(ret, typ).hex())
|
||||
return ret
|
||||
else:
|
||||
print(json, typ)
|
||||
raise Exception("Type not recognized")
|
||||
raise Exception(f"Type not recognized: data={data}, typ={typ}")
|
||||
|
@ -1,27 +1,36 @@
|
||||
from eth2spec.utils.minimal_ssz import hash_tree_root
|
||||
from eth2spec.utils.ssz.ssz_impl import hash_tree_root
|
||||
from eth2spec.utils.ssz.ssz_typing import (
|
||||
is_uint_type, is_bool_type, is_list_type, is_vector_type, is_container_type,
|
||||
read_elem_type,
|
||||
uint
|
||||
)
|
||||
|
||||
|
||||
def encode(value, typ, include_hash_tree_roots=False):
|
||||
if isinstance(typ, str) and typ[:4] == 'uint':
|
||||
if typ[4:] == '128' or typ[4:] == '256':
|
||||
if is_uint_type(typ):
|
||||
if hasattr(typ, '__supertype__'):
|
||||
typ = typ.__supertype__
|
||||
# Larger uints are boxed and the class declares their byte length
|
||||
if issubclass(typ, uint) and typ.byte_len > 8:
|
||||
return str(value)
|
||||
return value
|
||||
elif typ == 'bool':
|
||||
elif is_bool_type(typ):
|
||||
assert value in (True, False)
|
||||
return value
|
||||
elif isinstance(typ, list):
|
||||
return [encode(element, typ[0], include_hash_tree_roots) for element in value]
|
||||
elif isinstance(typ, str) and typ[:4] == 'byte':
|
||||
elif is_list_type(typ) or is_vector_type(typ):
|
||||
elem_typ = read_elem_type(typ)
|
||||
return [encode(element, elem_typ, include_hash_tree_roots) for element in value]
|
||||
elif isinstance(typ, type) and issubclass(typ, bytes): # both bytes and BytesN
|
||||
return '0x' + value.hex()
|
||||
elif hasattr(typ, 'fields'):
|
||||
elif is_container_type(typ):
|
||||
ret = {}
|
||||
for field, subtype in typ.fields.items():
|
||||
ret[field] = encode(getattr(value, field), subtype, include_hash_tree_roots)
|
||||
for field, subtype in typ.get_fields():
|
||||
field_value = getattr(value, field)
|
||||
ret[field] = encode(field_value, subtype, include_hash_tree_roots)
|
||||
if include_hash_tree_roots:
|
||||
ret[field + "_hash_tree_root"] = '0x' + hash_tree_root(getattr(value, field), subtype).hex()
|
||||
ret[field + "_hash_tree_root"] = '0x' + hash_tree_root(field_value, subtype).hex()
|
||||
if include_hash_tree_roots:
|
||||
ret["hash_tree_root"] = '0x' + hash_tree_root(value, typ).hex()
|
||||
return ret
|
||||
else:
|
||||
print(value, typ)
|
||||
raise Exception("Type not recognized")
|
||||
raise Exception(f"Type not recognized: value={value}, typ={typ}")
|
||||
|
@ -2,12 +2,19 @@ from random import Random
|
||||
from typing import Any
|
||||
from enum import Enum
|
||||
|
||||
from eth2spec.utils.ssz.ssz_impl import is_basic_type
|
||||
|
||||
UINT_SIZES = [8, 16, 32, 64, 128, 256]
|
||||
from eth2spec.utils.ssz.ssz_typing import (
|
||||
is_uint_type, is_bool_type, is_list_type,
|
||||
is_vector_type, is_bytes_type, is_bytesn_type, is_container_type,
|
||||
read_vector_elem_type, read_list_elem_type,
|
||||
uint_byte_size
|
||||
)
|
||||
|
||||
basic_types = ["uint%d" % v for v in UINT_SIZES] + ['bool', 'byte']
|
||||
# in bytes
|
||||
UINT_SIZES = (1, 2, 4, 8, 16, 32)
|
||||
|
||||
random_mode_names = ["random", "zero", "max", "nil", "one", "lengthy"]
|
||||
random_mode_names = ("random", "zero", "max", "nil", "one", "lengthy")
|
||||
|
||||
|
||||
class RandomizationMode(Enum):
|
||||
@ -49,104 +56,103 @@ def get_random_ssz_object(rng: Random,
|
||||
"""
|
||||
if chaos:
|
||||
mode = rng.choice(list(RandomizationMode))
|
||||
if isinstance(typ, str):
|
||||
if is_bytes_type(typ):
|
||||
# Bytes array
|
||||
if typ == 'bytes':
|
||||
if mode == RandomizationMode.mode_nil_count:
|
||||
return b''
|
||||
if mode == RandomizationMode.mode_max_count:
|
||||
return get_random_bytes_list(rng, max_bytes_length)
|
||||
if mode == RandomizationMode.mode_one_count:
|
||||
return get_random_bytes_list(rng, 1)
|
||||
if mode == RandomizationMode.mode_zero:
|
||||
return b'\x00'
|
||||
if mode == RandomizationMode.mode_max:
|
||||
return b'\xff'
|
||||
return get_random_bytes_list(rng, rng.randint(0, max_bytes_length))
|
||||
elif typ[:5] == 'bytes' and len(typ) > 5:
|
||||
length = int(typ[5:])
|
||||
# Sanity, don't generate absurdly big random values
|
||||
# If a client is aiming to performance-test, they should create a benchmark suite.
|
||||
assert length <= max_bytes_length
|
||||
if mode == RandomizationMode.mode_zero:
|
||||
return b'\x00' * length
|
||||
if mode == RandomizationMode.mode_max:
|
||||
return b'\xff' * length
|
||||
return get_random_bytes_list(rng, length)
|
||||
# Basic types
|
||||
if mode == RandomizationMode.mode_nil_count:
|
||||
return b''
|
||||
elif mode == RandomizationMode.mode_max_count:
|
||||
return get_random_bytes_list(rng, max_bytes_length)
|
||||
elif mode == RandomizationMode.mode_one_count:
|
||||
return get_random_bytes_list(rng, 1)
|
||||
elif mode == RandomizationMode.mode_zero:
|
||||
return b'\x00'
|
||||
elif mode == RandomizationMode.mode_max:
|
||||
return b'\xff'
|
||||
else:
|
||||
return get_random_bytes_list(rng, rng.randint(0, max_bytes_length))
|
||||
elif is_bytesn_type(typ):
|
||||
# BytesN
|
||||
length = typ.length
|
||||
# Sanity, don't generate absurdly big random values
|
||||
# If a client is aiming to performance-test, they should create a benchmark suite.
|
||||
assert length <= max_bytes_length
|
||||
if mode == RandomizationMode.mode_zero:
|
||||
return b'\x00' * length
|
||||
elif mode == RandomizationMode.mode_max:
|
||||
return b'\xff' * length
|
||||
else:
|
||||
return get_random_bytes_list(rng, length)
|
||||
elif is_basic_type(typ):
|
||||
# Basic types
|
||||
if mode == RandomizationMode.mode_zero:
|
||||
return get_min_basic_value(typ)
|
||||
elif mode == RandomizationMode.mode_max:
|
||||
return get_max_basic_value(typ)
|
||||
else:
|
||||
if mode == RandomizationMode.mode_zero:
|
||||
return get_min_basic_value(typ)
|
||||
if mode == RandomizationMode.mode_max:
|
||||
return get_max_basic_value(typ)
|
||||
return get_random_basic_value(rng, typ)
|
||||
# Vector:
|
||||
elif isinstance(typ, list) and len(typ) == 2:
|
||||
elif is_vector_type(typ):
|
||||
# Vector
|
||||
elem_typ = read_vector_elem_type(typ)
|
||||
return [
|
||||
get_random_ssz_object(rng, typ[0], max_bytes_length, max_list_length, mode, chaos)
|
||||
for _ in range(typ[1])
|
||||
get_random_ssz_object(rng, elem_typ, max_bytes_length, max_list_length, mode, chaos)
|
||||
for _ in range(typ.length)
|
||||
]
|
||||
# List:
|
||||
elif isinstance(typ, list) and len(typ) == 1:
|
||||
elif is_list_type(typ):
|
||||
# List
|
||||
elem_typ = read_list_elem_type(typ)
|
||||
length = rng.randint(0, max_list_length)
|
||||
if mode == RandomizationMode.mode_one_count:
|
||||
length = 1
|
||||
if mode == RandomizationMode.mode_max_count:
|
||||
elif mode == RandomizationMode.mode_max_count:
|
||||
length = max_list_length
|
||||
|
||||
return [
|
||||
get_random_ssz_object(rng, typ[0], max_bytes_length, max_list_length, mode, chaos)
|
||||
get_random_ssz_object(rng, elem_typ, max_bytes_length, max_list_length, mode, chaos)
|
||||
for _ in range(length)
|
||||
]
|
||||
# Container:
|
||||
elif hasattr(typ, 'fields'):
|
||||
elif is_container_type(typ):
|
||||
# Container
|
||||
return typ(**{
|
||||
field:
|
||||
get_random_ssz_object(rng, subtype, max_bytes_length, max_list_length, mode, chaos)
|
||||
for field, subtype in typ.fields.items()
|
||||
for field, subtype in typ.get_fields()
|
||||
})
|
||||
else:
|
||||
print(typ)
|
||||
raise Exception("Type not recognized")
|
||||
raise Exception(f"Type not recognized: typ={typ}")
|
||||
|
||||
|
||||
def get_random_bytes_list(rng: Random, length: int) -> bytes:
|
||||
return bytes(rng.getrandbits(8) for _ in range(length))
|
||||
|
||||
|
||||
def get_random_basic_value(rng: Random, typ: str) -> Any:
|
||||
if typ == 'bool':
|
||||
def get_random_basic_value(rng: Random, typ) -> Any:
|
||||
if is_bool_type(typ):
|
||||
return rng.choice((True, False))
|
||||
if typ[:4] == 'uint':
|
||||
size = int(typ[4:])
|
||||
elif is_uint_type(typ):
|
||||
size = uint_byte_size(typ)
|
||||
assert size in UINT_SIZES
|
||||
return rng.randint(0, 2**size - 1)
|
||||
if typ == 'byte':
|
||||
return rng.randint(0, 8)
|
||||
return rng.randint(0, 256**size - 1)
|
||||
else:
|
||||
raise ValueError("Not a basic type")
|
||||
raise ValueError(f"Not a basic type: typ={typ}")
|
||||
|
||||
|
||||
def get_min_basic_value(typ: str) -> Any:
|
||||
if typ == 'bool':
|
||||
def get_min_basic_value(typ) -> Any:
|
||||
if is_bool_type(typ):
|
||||
return False
|
||||
if typ[:4] == 'uint':
|
||||
size = int(typ[4:])
|
||||
elif is_uint_type(typ):
|
||||
size = uint_byte_size(typ)
|
||||
assert size in UINT_SIZES
|
||||
return 0
|
||||
if typ == 'byte':
|
||||
return 0x00
|
||||
else:
|
||||
raise ValueError("Not a basic type")
|
||||
raise ValueError(f"Not a basic type: typ={typ}")
|
||||
|
||||
|
||||
def get_max_basic_value(typ: str) -> Any:
|
||||
if typ == 'bool':
|
||||
def get_max_basic_value(typ) -> Any:
|
||||
if is_bool_type(typ):
|
||||
return True
|
||||
if typ[:4] == 'uint':
|
||||
size = int(typ[4:])
|
||||
elif is_uint_type(typ):
|
||||
size = uint_byte_size(typ)
|
||||
assert size in UINT_SIZES
|
||||
return 2**size - 1
|
||||
if typ == 'byte':
|
||||
return 0xff
|
||||
return 256**size - 1
|
||||
else:
|
||||
raise ValueError("Not a basic type")
|
||||
raise ValueError(f"Not a basic type: typ={typ}")
|
||||
|
@ -138,7 +138,7 @@ def test_wrong_deposit_for_deposit_count(state):
|
||||
pubkey_1,
|
||||
privkey_1,
|
||||
spec.MAX_EFFECTIVE_BALANCE,
|
||||
withdrawal_credentials=b'\x00'*32,
|
||||
withdrawal_credentials=b'\x00' * 32,
|
||||
signed=True,
|
||||
)
|
||||
deposit_count_1 = len(deposit_data_leaves)
|
||||
@ -153,7 +153,7 @@ def test_wrong_deposit_for_deposit_count(state):
|
||||
pubkey_2,
|
||||
privkey_2,
|
||||
spec.MAX_EFFECTIVE_BALANCE,
|
||||
withdrawal_credentials=b'\x00'*32,
|
||||
withdrawal_credentials=b'\x00' * 32,
|
||||
signed=True,
|
||||
)
|
||||
|
||||
|
@ -16,7 +16,7 @@ from eth2spec.test.helpers.bitfields import set_bitfield_bit
|
||||
from eth2spec.test.helpers.block import build_empty_block_for_next_slot, sign_block
|
||||
from eth2spec.test.helpers.keys import privkeys
|
||||
from eth2spec.utils.bls import bls_sign, bls_aggregate_signatures
|
||||
from eth2spec.utils.minimal_ssz import hash_tree_root
|
||||
from eth2spec.utils.ssz.ssz_impl import hash_tree_root
|
||||
|
||||
|
||||
def build_attestation_data(state, slot, shard):
|
||||
|
@ -5,7 +5,7 @@ def set_bitfield_bit(bitfield, i):
|
||||
byte_index = i // 8
|
||||
bit_index = i % 8
|
||||
return (
|
||||
bitfield[:byte_index] +
|
||||
bytes([bitfield[byte_index] | (1 << bit_index)]) +
|
||||
bitfield[byte_index + 1:]
|
||||
bitfield[:byte_index] +
|
||||
bytes([bitfield[byte_index] | (1 << bit_index)]) +
|
||||
bitfield[byte_index + 1:]
|
||||
)
|
||||
|
@ -8,7 +8,7 @@ from eth2spec.phase0.spec import (
|
||||
)
|
||||
from eth2spec.test.helpers.keys import privkeys
|
||||
from eth2spec.utils.bls import bls_sign, only_with_bls
|
||||
from eth2spec.utils.minimal_ssz import signing_root, hash_tree_root
|
||||
from eth2spec.utils.ssz.ssz_impl import signing_root, hash_tree_root
|
||||
|
||||
|
||||
# Fully ignore the function if BLS is off, beacon-proposer index calculation is slow.
|
||||
|
@ -3,7 +3,7 @@ import eth2spec.phase0.spec as spec
|
||||
|
||||
from eth2spec.phase0.spec import get_domain
|
||||
from eth2spec.utils.bls import bls_sign
|
||||
from eth2spec.utils.minimal_ssz import signing_root
|
||||
from eth2spec.utils.ssz.ssz_impl import signing_root
|
||||
|
||||
|
||||
def sign_block_header(state, header, privkey):
|
||||
|
@ -5,7 +5,7 @@ from eth2spec.phase0.spec import get_domain, DepositData, verify_merkle_branch,
|
||||
from eth2spec.test.helpers.keys import pubkeys, privkeys
|
||||
from eth2spec.utils.bls import bls_sign
|
||||
from eth2spec.utils.merkle_minimal import calc_merkle_tree_from_leaves, get_merkle_root, get_merkle_proof
|
||||
from eth2spec.utils.minimal_ssz import signing_root
|
||||
from eth2spec.utils.ssz.ssz_impl import signing_root
|
||||
|
||||
|
||||
def build_deposit_data(state, pubkey, privkey, amount, withdrawal_credentials, signed=False):
|
||||
|
@ -3,7 +3,7 @@ import eth2spec.phase0.spec as spec
|
||||
|
||||
from eth2spec.phase0.spec import Eth1Data, ZERO_HASH, get_active_validator_indices
|
||||
from eth2spec.test.helpers.keys import pubkeys
|
||||
from eth2spec.utils.minimal_ssz import hash_tree_root
|
||||
from eth2spec.utils.ssz.ssz_impl import hash_tree_root
|
||||
|
||||
|
||||
def build_mock_validator(i: int, balance: int):
|
||||
|
@ -5,7 +5,7 @@ from eth2spec.phase0.spec import get_current_epoch, get_active_validator_indices
|
||||
from eth2spec.test.helpers.keys import pubkeys, privkeys
|
||||
from eth2spec.test.helpers.state import get_balance
|
||||
from eth2spec.utils.bls import bls_sign
|
||||
from eth2spec.utils.minimal_ssz import signing_root
|
||||
from eth2spec.utils.ssz.ssz_impl import signing_root
|
||||
|
||||
|
||||
def get_valid_transfer(state, slot=None, sender_index=None, amount=None, fee=None, signed=False):
|
||||
@ -36,7 +36,7 @@ def get_valid_transfer(state, slot=None, sender_index=None, amount=None, fee=Non
|
||||
|
||||
# ensure withdrawal_credentials reproducible
|
||||
state.validator_registry[transfer.sender].withdrawal_credentials = (
|
||||
spec.BLS_WITHDRAWAL_PREFIX_BYTE + spec.hash(transfer.pubkey)[1:]
|
||||
spec.BLS_WITHDRAWAL_PREFIX_BYTE + spec.hash(transfer.pubkey)[1:]
|
||||
)
|
||||
|
||||
return transfer
|
||||
|
@ -3,7 +3,7 @@ import eth2spec.phase0.spec as spec
|
||||
|
||||
from eth2spec.phase0.spec import VoluntaryExit, get_domain
|
||||
from eth2spec.utils.bls import bls_sign
|
||||
from eth2spec.utils.minimal_ssz import signing_root
|
||||
from eth2spec.utils.ssz.ssz_impl import signing_root
|
||||
|
||||
|
||||
def build_voluntary_exit(state, epoch, validator_index, privkey, signed=False):
|
||||
|
@ -1,9 +1,10 @@
|
||||
from copy import deepcopy
|
||||
from typing import List
|
||||
|
||||
import eth2spec.phase0.spec as spec
|
||||
from eth2spec.utils.bls import bls_sign
|
||||
|
||||
from eth2spec.utils.minimal_ssz import signing_root
|
||||
from eth2spec.utils.ssz.ssz_impl import signing_root
|
||||
from eth2spec.phase0.spec import (
|
||||
# SSZ
|
||||
VoluntaryExit,
|
||||
@ -36,7 +37,7 @@ def test_empty_block_transition(state):
|
||||
yield 'pre', state
|
||||
|
||||
block = build_empty_block_for_next_slot(state, signed=True)
|
||||
yield 'blocks', [block], [spec.BeaconBlock]
|
||||
yield 'blocks', [block], List[spec.BeaconBlock]
|
||||
|
||||
state_transition(state, block)
|
||||
yield 'post', state
|
||||
@ -54,7 +55,7 @@ def test_skipped_slots(state):
|
||||
block = build_empty_block_for_next_slot(state)
|
||||
block.slot += 3
|
||||
sign_block(state, block)
|
||||
yield 'blocks', [block], [spec.BeaconBlock]
|
||||
yield 'blocks', [block], List[spec.BeaconBlock]
|
||||
|
||||
state_transition(state, block)
|
||||
yield 'post', state
|
||||
@ -72,7 +73,7 @@ def test_empty_epoch_transition(state):
|
||||
block = build_empty_block_for_next_slot(state)
|
||||
block.slot += spec.SLOTS_PER_EPOCH
|
||||
sign_block(state, block)
|
||||
yield 'blocks', [block], [spec.BeaconBlock]
|
||||
yield 'blocks', [block], List[spec.BeaconBlock]
|
||||
|
||||
state_transition(state, block)
|
||||
yield 'post', state
|
||||
@ -91,7 +92,7 @@ def test_empty_epoch_transition(state):
|
||||
# block = build_empty_block_for_next_slot(state)
|
||||
# block.slot += spec.SLOTS_PER_EPOCH * 5
|
||||
# sign_block(state, block, proposer_index=0)
|
||||
# yield 'blocks', [block], [spec.BeaconBlock]
|
||||
# yield 'blocks', [block], List[spec.BeaconBlock]
|
||||
#
|
||||
# state_transition(state, block)
|
||||
# yield 'post', state
|
||||
@ -119,7 +120,7 @@ def test_proposer_slashing(state):
|
||||
block = build_empty_block_for_next_slot(state)
|
||||
block.body.proposer_slashings.append(proposer_slashing)
|
||||
sign_block(state, block)
|
||||
yield 'blocks', [block], [spec.BeaconBlock]
|
||||
yield 'blocks', [block], List[spec.BeaconBlock]
|
||||
|
||||
state_transition(state, block)
|
||||
yield 'post', state
|
||||
@ -152,7 +153,7 @@ def test_attester_slashing(state):
|
||||
block = build_empty_block_for_next_slot(state)
|
||||
block.body.attester_slashings.append(attester_slashing)
|
||||
sign_block(state, block)
|
||||
yield 'blocks', [block], [spec.BeaconBlock]
|
||||
yield 'blocks', [block], List[spec.BeaconBlock]
|
||||
|
||||
state_transition(state, block)
|
||||
yield 'post', state
|
||||
@ -189,7 +190,7 @@ def test_deposit_in_block(state):
|
||||
block.body.deposits.append(deposit)
|
||||
sign_block(state, block)
|
||||
|
||||
yield 'blocks', [block], [spec.BeaconBlock]
|
||||
yield 'blocks', [block], List[spec.BeaconBlock]
|
||||
|
||||
state_transition(state, block)
|
||||
yield 'post', state
|
||||
@ -216,7 +217,7 @@ def test_deposit_top_up(state):
|
||||
block.body.deposits.append(deposit)
|
||||
sign_block(state, block)
|
||||
|
||||
yield 'blocks', [block], [spec.BeaconBlock]
|
||||
yield 'blocks', [block], List[spec.BeaconBlock]
|
||||
|
||||
state_transition(state, block)
|
||||
yield 'post', state
|
||||
@ -252,7 +253,7 @@ def test_attestation(state):
|
||||
sign_block(state, epoch_block)
|
||||
state_transition(state, epoch_block)
|
||||
|
||||
yield 'blocks', [attestation_block, epoch_block], [spec.BeaconBlock]
|
||||
yield 'blocks', [attestation_block, epoch_block], List[spec.BeaconBlock]
|
||||
yield 'post', state
|
||||
|
||||
assert len(state.current_epoch_attestations) == 0
|
||||
@ -298,7 +299,7 @@ def test_voluntary_exit(state):
|
||||
sign_block(state, exit_block)
|
||||
state_transition(state, exit_block)
|
||||
|
||||
yield 'blocks', [initiate_exit_block, exit_block], [spec.BeaconBlock]
|
||||
yield 'blocks', [initiate_exit_block, exit_block], List[spec.BeaconBlock]
|
||||
yield 'post', state
|
||||
|
||||
assert state.validator_registry[validator_index].exit_epoch < spec.FAR_FUTURE_EPOCH
|
||||
@ -326,7 +327,7 @@ def test_transfer(state):
|
||||
block.body.transfers.append(transfer)
|
||||
sign_block(state, block)
|
||||
|
||||
yield 'blocks', [block], [spec.BeaconBlock]
|
||||
yield 'blocks', [block], List[spec.BeaconBlock]
|
||||
|
||||
state_transition(state, block)
|
||||
yield 'post', state
|
||||
@ -355,7 +356,7 @@ def test_balance_driven_status_transitions(state):
|
||||
sign_block(state, block)
|
||||
state_transition(state, block)
|
||||
|
||||
yield 'blocks', [block], [spec.BeaconBlock]
|
||||
yield 'blocks', [block], List[spec.BeaconBlock]
|
||||
yield 'post', state
|
||||
|
||||
assert state.validator_registry[validator_index].exit_epoch < spec.FAR_FUTURE_EPOCH
|
||||
@ -371,7 +372,7 @@ def test_historical_batch(state):
|
||||
block = build_empty_block_for_next_slot(state, signed=True)
|
||||
state_transition(state, block)
|
||||
|
||||
yield 'blocks', [block], [spec.BeaconBlock]
|
||||
yield 'blocks', [block], List[spec.BeaconBlock]
|
||||
yield 'post', state
|
||||
|
||||
assert state.slot == block.slot
|
||||
@ -399,7 +400,7 @@ def test_historical_batch(state):
|
||||
#
|
||||
# state_transition(state, block)
|
||||
#
|
||||
# yield 'blocks', [block], [spec.BeaconBlock]
|
||||
# yield 'blocks', [block], List[spec.BeaconBlock]
|
||||
# yield 'post', state
|
||||
#
|
||||
# assert state.slot % spec.SLOTS_PER_ETH1_VOTING_PERIOD == 0
|
||||
|
@ -1,4 +1,5 @@
|
||||
from copy import deepcopy
|
||||
from typing import List
|
||||
|
||||
import eth2spec.phase0.spec as spec
|
||||
from eth2spec.phase0.spec import (
|
||||
@ -86,7 +87,7 @@ def test_finality_rule_4(state):
|
||||
assert state.finalized_epoch == prev_state.current_justified_epoch
|
||||
assert state.finalized_root == prev_state.current_justified_root
|
||||
|
||||
yield 'blocks', blocks, [spec.BeaconBlock]
|
||||
yield 'blocks', blocks, List[spec.BeaconBlock]
|
||||
yield 'post', state
|
||||
|
||||
|
||||
@ -116,7 +117,7 @@ def test_finality_rule_1(state):
|
||||
assert state.finalized_epoch == prev_state.previous_justified_epoch
|
||||
assert state.finalized_root == prev_state.previous_justified_root
|
||||
|
||||
yield 'blocks', blocks, [spec.BeaconBlock]
|
||||
yield 'blocks', blocks, List[spec.BeaconBlock]
|
||||
yield 'post', state
|
||||
|
||||
|
||||
@ -148,7 +149,7 @@ def test_finality_rule_2(state):
|
||||
|
||||
blocks += new_blocks
|
||||
|
||||
yield 'blocks', blocks, [spec.BeaconBlock]
|
||||
yield 'blocks', blocks, List[spec.BeaconBlock]
|
||||
yield 'post', state
|
||||
|
||||
|
||||
@ -197,5 +198,5 @@ def test_finality_rule_3(state):
|
||||
assert state.finalized_epoch == prev_state.current_justified_epoch
|
||||
assert state.finalized_root == prev_state.current_justified_root
|
||||
|
||||
yield 'blocks', blocks, [spec.BeaconBlock]
|
||||
yield 'blocks', blocks, List[spec.BeaconBlock]
|
||||
yield 'post', state
|
||||
|
@ -1,4 +1,5 @@
|
||||
from hashlib import sha256
|
||||
|
||||
|
||||
def hash(x): return sha256(x).digest()
|
||||
def hash(x):
|
||||
return sha256(x).digest()
|
||||
|
@ -1,7 +1,9 @@
|
||||
from .hash_function import hash
|
||||
|
||||
|
||||
zerohashes = [b'\x00' * 32]
|
||||
ZERO_BYTES32 = b'\x00' * 32
|
||||
|
||||
zerohashes = [ZERO_BYTES32]
|
||||
for layer in range(1, 32):
|
||||
zerohashes.append(hash(zerohashes[layer - 1] + zerohashes[layer - 1]))
|
||||
|
||||
@ -28,3 +30,25 @@ def get_merkle_proof(tree, item_index):
|
||||
subindex = (item_index // 2**i) ^ 1
|
||||
proof.append(tree[i][subindex] if subindex < len(tree[i]) else zerohashes[i])
|
||||
return proof
|
||||
|
||||
|
||||
def next_power_of_two(v: int) -> int:
|
||||
"""
|
||||
Get the next power of 2. (for 64 bit range ints).
|
||||
0 is a special case, to have non-empty defaults.
|
||||
Examples:
|
||||
0 -> 1, 1 -> 1, 2 -> 2, 3 -> 4, 32 -> 32, 33 -> 64
|
||||
"""
|
||||
if v == 0:
|
||||
return 1
|
||||
return 1 << (v - 1).bit_length()
|
||||
|
||||
|
||||
def merkleize_chunks(chunks):
|
||||
tree = chunks[::]
|
||||
margin = next_power_of_two(len(chunks)) - len(chunks)
|
||||
tree.extend([ZERO_BYTES32] * margin)
|
||||
tree = [ZERO_BYTES32] * len(tree) + tree
|
||||
for i in range(len(tree) // 2 - 1, 0, -1):
|
||||
tree[i] = hash(tree[i * 2] + tree[i * 2 + 1])
|
||||
return tree[1]
|
||||
|
@ -1,331 +0,0 @@
|
||||
from typing import Any
|
||||
|
||||
from .hash_function import hash
|
||||
|
||||
BYTES_PER_CHUNK = 32
|
||||
BYTES_PER_LENGTH_OFFSET = 4
|
||||
ZERO_CHUNK = b'\x00' * BYTES_PER_CHUNK
|
||||
|
||||
|
||||
def SSZType(fields):
|
||||
class SSZObject():
|
||||
def __init__(self, **kwargs):
|
||||
for f, t in fields.items():
|
||||
if f not in kwargs:
|
||||
setattr(self, f, get_zero_value(t))
|
||||
else:
|
||||
setattr(self, f, kwargs[f])
|
||||
|
||||
def __eq__(self, other):
|
||||
return self.fields == other.fields and self.serialize() == other.serialize()
|
||||
|
||||
def __hash__(self):
|
||||
return int.from_bytes(self.hash_tree_root(), byteorder="little")
|
||||
|
||||
def __str__(self):
|
||||
output = []
|
||||
for field in self.fields:
|
||||
output.append(f'{field}: {getattr(self, field)}')
|
||||
return "\n".join(output)
|
||||
|
||||
def serialize(self):
|
||||
return serialize_value(self, self.__class__)
|
||||
|
||||
def hash_tree_root(self):
|
||||
return hash_tree_root(self, self.__class__)
|
||||
|
||||
SSZObject.fields = fields
|
||||
return SSZObject
|
||||
|
||||
|
||||
class Vector():
|
||||
def __init__(self, items):
|
||||
self.items = items
|
||||
self.length = len(items)
|
||||
|
||||
def __getitem__(self, key):
|
||||
return self.items[key]
|
||||
|
||||
def __setitem__(self, key, value):
|
||||
self.items[key] = value
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self.items)
|
||||
|
||||
def __len__(self):
|
||||
return self.length
|
||||
|
||||
|
||||
def is_basic(typ):
|
||||
# if not a string, it is a complex, and cannot be basic
|
||||
if not isinstance(typ, str):
|
||||
return False
|
||||
# "uintN": N-bit unsigned integer (where N in [8, 16, 32, 64, 128, 256])
|
||||
elif typ[:4] == 'uint' and typ[4:] in ['8', '16', '32', '64', '128', '256']:
|
||||
return True
|
||||
# "bool": True or False
|
||||
elif typ == 'bool':
|
||||
return True
|
||||
# alias: "byte" -> "uint8"
|
||||
elif typ == 'byte':
|
||||
return True
|
||||
# default
|
||||
else:
|
||||
return False
|
||||
|
||||
|
||||
def is_constant_sized(typ):
|
||||
# basic objects are fixed size by definition
|
||||
if is_basic(typ):
|
||||
return True
|
||||
# dynamic size array type, "list": [elem_type].
|
||||
# Not constant size by definition.
|
||||
elif isinstance(typ, list) and len(typ) == 1:
|
||||
return False
|
||||
# fixed size array type, "vector": [elem_type, length]
|
||||
# Constant size, but only if the elements are.
|
||||
elif isinstance(typ, list) and len(typ) == 2:
|
||||
return is_constant_sized(typ[0])
|
||||
# bytes array (fixed or dynamic size)
|
||||
elif isinstance(typ, str) and typ[:5] == 'bytes':
|
||||
# if no length suffix, it has a dynamic size
|
||||
return typ != 'bytes'
|
||||
# containers are only constant-size if all of the fields are constant size.
|
||||
elif hasattr(typ, 'fields'):
|
||||
for subtype in typ.fields.values():
|
||||
if not is_constant_sized(subtype):
|
||||
return False
|
||||
return True
|
||||
else:
|
||||
raise Exception("Type not recognized")
|
||||
|
||||
|
||||
def coerce_to_bytes(x):
|
||||
if isinstance(x, str):
|
||||
o = x.encode('utf-8')
|
||||
assert len(o) == len(x)
|
||||
return o
|
||||
elif isinstance(x, bytes):
|
||||
return x
|
||||
else:
|
||||
raise Exception("Expecting bytes")
|
||||
|
||||
|
||||
def encode_series(values, types):
|
||||
# Recursively serialize
|
||||
parts = [(is_constant_sized(types[i]), serialize_value(values[i], types[i])) for i in range(len(values))]
|
||||
|
||||
# Compute and check lengths
|
||||
fixed_lengths = [len(serialized) if constant_size else BYTES_PER_LENGTH_OFFSET
|
||||
for (constant_size, serialized) in parts]
|
||||
variable_lengths = [len(serialized) if not constant_size else 0
|
||||
for (constant_size, serialized) in parts]
|
||||
|
||||
# Check if integer is not out of bounds (Python)
|
||||
assert sum(fixed_lengths + variable_lengths) < 2 ** (BYTES_PER_LENGTH_OFFSET * 8)
|
||||
|
||||
# Interleave offsets of variable-size parts with fixed-size parts.
|
||||
# Avoid quadratic complexity in calculation of offsets.
|
||||
offset = sum(fixed_lengths)
|
||||
variable_parts = []
|
||||
fixed_parts = []
|
||||
for (constant_size, serialized) in parts:
|
||||
if constant_size:
|
||||
fixed_parts.append(serialized)
|
||||
else:
|
||||
fixed_parts.append(offset.to_bytes(BYTES_PER_LENGTH_OFFSET, 'little'))
|
||||
variable_parts.append(serialized)
|
||||
offset += len(serialized)
|
||||
|
||||
# Return the concatenation of the fixed-size parts (offsets interleaved) with the variable-size parts
|
||||
return b"".join(fixed_parts + variable_parts)
|
||||
|
||||
|
||||
def serialize_value(value, typ=None):
|
||||
if typ is None:
|
||||
typ = infer_type(value)
|
||||
# "uintN"
|
||||
if isinstance(typ, str) and typ[:4] == 'uint':
|
||||
length = int(typ[4:])
|
||||
assert length in (8, 16, 32, 64, 128, 256)
|
||||
return value.to_bytes(length // 8, 'little')
|
||||
# "bool"
|
||||
elif isinstance(typ, str) and typ == 'bool':
|
||||
assert value in (True, False)
|
||||
return b'\x01' if value is True else b'\x00'
|
||||
# Vector
|
||||
elif isinstance(typ, list) and len(typ) == 2:
|
||||
# (regardless of element type, sanity-check if the length reported in the vector type matches the value length)
|
||||
assert len(value) == typ[1]
|
||||
return encode_series(value, [typ[0]] * len(value))
|
||||
# List
|
||||
elif isinstance(typ, list) and len(typ) == 1:
|
||||
return encode_series(value, [typ[0]] * len(value))
|
||||
# "bytes" (variable size)
|
||||
elif isinstance(typ, str) and typ == 'bytes':
|
||||
return coerce_to_bytes(value)
|
||||
# "bytesN" (fixed size)
|
||||
elif isinstance(typ, str) and len(typ) > 5 and typ[:5] == 'bytes':
|
||||
assert len(value) == int(typ[5:]), (value, int(typ[5:]))
|
||||
return coerce_to_bytes(value)
|
||||
# containers
|
||||
elif hasattr(typ, 'fields'):
|
||||
values = [getattr(value, field) for field in typ.fields.keys()]
|
||||
types = list(typ.fields.values())
|
||||
return encode_series(values, types)
|
||||
else:
|
||||
print(value, typ)
|
||||
raise Exception("Type not recognized")
|
||||
|
||||
|
||||
def get_zero_value(typ: Any) -> Any:
|
||||
if isinstance(typ, str):
|
||||
# Bytes array
|
||||
if typ == 'bytes':
|
||||
return b''
|
||||
# bytesN
|
||||
elif typ[:5] == 'bytes' and len(typ) > 5:
|
||||
length = int(typ[5:])
|
||||
return b'\x00' * length
|
||||
# Basic types
|
||||
elif typ == 'bool':
|
||||
return False
|
||||
elif typ[:4] == 'uint':
|
||||
return 0
|
||||
elif typ == 'byte':
|
||||
return 0x00
|
||||
else:
|
||||
raise ValueError("Type not recognized")
|
||||
# Vector:
|
||||
elif isinstance(typ, list) and len(typ) == 2:
|
||||
return [get_zero_value(typ[0]) for _ in range(typ[1])]
|
||||
# List:
|
||||
elif isinstance(typ, list) and len(typ) == 1:
|
||||
return []
|
||||
# Container:
|
||||
elif hasattr(typ, 'fields'):
|
||||
return typ(**{field: get_zero_value(subtype) for field, subtype in typ.fields.items()})
|
||||
else:
|
||||
print(typ)
|
||||
raise Exception("Type not recognized")
|
||||
|
||||
|
||||
def chunkify(bytez):
|
||||
bytez += b'\x00' * (-len(bytez) % BYTES_PER_CHUNK)
|
||||
return [bytez[i:i + 32] for i in range(0, len(bytez), 32)]
|
||||
|
||||
|
||||
def pack(values, subtype):
|
||||
return chunkify(b''.join([serialize_value(value, subtype) for value in values]))
|
||||
|
||||
|
||||
def is_power_of_two(x):
|
||||
return x > 0 and x & (x - 1) == 0
|
||||
|
||||
|
||||
def merkleize(chunks):
|
||||
tree = chunks[::]
|
||||
while not is_power_of_two(len(tree)):
|
||||
tree.append(ZERO_CHUNK)
|
||||
tree = [ZERO_CHUNK] * len(tree) + tree
|
||||
for i in range(len(tree) // 2 - 1, 0, -1):
|
||||
tree[i] = hash(tree[i * 2] + tree[i * 2 + 1])
|
||||
return tree[1]
|
||||
|
||||
|
||||
def mix_in_length(root, length):
|
||||
return hash(root + length.to_bytes(32, 'little'))
|
||||
|
||||
|
||||
def infer_type(value):
|
||||
"""
|
||||
Note: defaults to uint64 for integer type inference due to lack of information.
|
||||
Other integer sizes are still supported, see spec.
|
||||
:param value: The value to infer a SSZ type for.
|
||||
:return: The SSZ type.
|
||||
"""
|
||||
if hasattr(value.__class__, 'fields'):
|
||||
return value.__class__
|
||||
elif isinstance(value, Vector):
|
||||
if len(value) > 0:
|
||||
return [infer_type(value[0]), len(value)]
|
||||
else:
|
||||
# Element type does not matter too much,
|
||||
# assumed to be a basic type for size-encoding purposes, vector is empty.
|
||||
return ['uint64']
|
||||
elif isinstance(value, list):
|
||||
if len(value) > 0:
|
||||
return [infer_type(value[0])]
|
||||
else:
|
||||
# Element type does not matter, list-content size will be encoded regardless, list is empty.
|
||||
return ['uint64']
|
||||
elif isinstance(value, (bytes, str)):
|
||||
return 'bytes'
|
||||
elif isinstance(value, int):
|
||||
return 'uint64'
|
||||
else:
|
||||
raise Exception("Failed to infer type")
|
||||
|
||||
|
||||
def hash_tree_root(value, typ=None):
|
||||
if typ is None:
|
||||
typ = infer_type(value)
|
||||
# -------------------------------------
|
||||
# merkleize(pack(value))
|
||||
# basic object: merkleize packed version (merkleization pads it to 32 bytes if it is not already)
|
||||
if is_basic(typ):
|
||||
return merkleize(pack([value], typ))
|
||||
# or a vector of basic objects
|
||||
elif isinstance(typ, list) and len(typ) == 2 and is_basic(typ[0]):
|
||||
assert len(value) == typ[1]
|
||||
return merkleize(pack(value, typ[0]))
|
||||
# -------------------------------------
|
||||
# mix_in_length(merkleize(pack(value)), len(value))
|
||||
# if value is a list of basic objects
|
||||
elif isinstance(typ, list) and len(typ) == 1 and is_basic(typ[0]):
|
||||
return mix_in_length(merkleize(pack(value, typ[0])), len(value))
|
||||
# (needs some extra work for non-fixed-sized bytes array)
|
||||
elif typ == 'bytes':
|
||||
return mix_in_length(merkleize(chunkify(coerce_to_bytes(value))), len(value))
|
||||
# -------------------------------------
|
||||
# merkleize([hash_tree_root(element) for element in value])
|
||||
# if value is a vector of composite objects
|
||||
elif isinstance(typ, list) and len(typ) == 2 and not is_basic(typ[0]):
|
||||
return merkleize([hash_tree_root(element, typ[0]) for element in value])
|
||||
# (needs some extra work for fixed-sized bytes array)
|
||||
elif isinstance(typ, str) and typ[:5] == 'bytes' and len(typ) > 5:
|
||||
assert len(value) == int(typ[5:])
|
||||
return merkleize(chunkify(coerce_to_bytes(value)))
|
||||
# or a container
|
||||
elif hasattr(typ, 'fields'):
|
||||
return merkleize([hash_tree_root(getattr(value, field), subtype) for field, subtype in typ.fields.items()])
|
||||
# -------------------------------------
|
||||
# mix_in_length(merkleize([hash_tree_root(element) for element in value]), len(value))
|
||||
# if value is a list of composite objects
|
||||
elif isinstance(typ, list) and len(typ) == 1 and not is_basic(typ[0]):
|
||||
return mix_in_length(merkleize([hash_tree_root(element, typ[0]) for element in value]), len(value))
|
||||
# -------------------------------------
|
||||
else:
|
||||
raise Exception("Type not recognized")
|
||||
|
||||
|
||||
def truncate(container):
|
||||
field_keys = list(container.fields.keys())
|
||||
truncated_fields = {
|
||||
key: container.fields[key]
|
||||
for key in field_keys[:-1]
|
||||
}
|
||||
truncated_class = SSZType(truncated_fields)
|
||||
kwargs = {
|
||||
field: getattr(container, field)
|
||||
for field in field_keys[:-1]
|
||||
}
|
||||
return truncated_class(**kwargs)
|
||||
|
||||
|
||||
def signing_root(container):
|
||||
return hash_tree_root(truncate(container))
|
||||
|
||||
|
||||
def serialize(ssz_object):
|
||||
return getattr(ssz_object, 'serialize')()
|
0
test_libs/pyspec/eth2spec/utils/ssz/__init__.py
Normal file
0
test_libs/pyspec/eth2spec/utils/ssz/__init__.py
Normal file
158
test_libs/pyspec/eth2spec/utils/ssz/ssz_impl.py
Normal file
158
test_libs/pyspec/eth2spec/utils/ssz/ssz_impl.py
Normal file
@ -0,0 +1,158 @@
|
||||
from ..merkle_minimal import merkleize_chunks, hash
|
||||
from eth2spec.utils.ssz.ssz_typing import (
|
||||
is_uint_type, is_bool_type, is_container_type,
|
||||
is_list_kind, is_vector_kind,
|
||||
read_vector_elem_type, read_elem_type,
|
||||
uint_byte_size,
|
||||
infer_input_type
|
||||
)
|
||||
|
||||
# SSZ Serialization
|
||||
# -----------------------------
|
||||
|
||||
BYTES_PER_LENGTH_OFFSET = 4
|
||||
|
||||
|
||||
def is_basic_type(typ):
|
||||
return is_uint_type(typ) or is_bool_type(typ)
|
||||
|
||||
|
||||
def serialize_basic(value, typ):
|
||||
if is_uint_type(typ):
|
||||
return value.to_bytes(uint_byte_size(typ), 'little')
|
||||
elif is_bool_type(typ):
|
||||
if value:
|
||||
return b'\x01'
|
||||
else:
|
||||
return b'\x00'
|
||||
else:
|
||||
raise Exception("Type not supported: {}".format(typ))
|
||||
|
||||
|
||||
def deserialize_basic(value, typ):
|
||||
if is_uint_type(typ):
|
||||
return typ(int.from_bytes(value, 'little'))
|
||||
elif is_bool_type(typ):
|
||||
assert value in (b'\x00', b'\x01')
|
||||
return True if value == b'\x01' else False
|
||||
else:
|
||||
raise Exception("Type not supported: {}".format(typ))
|
||||
|
||||
|
||||
def is_fixed_size(typ):
|
||||
if is_basic_type(typ):
|
||||
return True
|
||||
elif is_list_kind(typ):
|
||||
return False
|
||||
elif is_vector_kind(typ):
|
||||
return is_fixed_size(read_vector_elem_type(typ))
|
||||
elif is_container_type(typ):
|
||||
return all(is_fixed_size(t) for t in typ.get_field_types())
|
||||
else:
|
||||
raise Exception("Type not supported: {}".format(typ))
|
||||
|
||||
|
||||
@infer_input_type
|
||||
def serialize(obj, typ=None):
|
||||
if is_basic_type(typ):
|
||||
return serialize_basic(obj, typ)
|
||||
elif is_list_kind(typ) or is_vector_kind(typ):
|
||||
return encode_series(obj, [read_elem_type(typ)] * len(obj))
|
||||
elif is_container_type(typ):
|
||||
return encode_series(obj.get_field_values(), typ.get_field_types())
|
||||
else:
|
||||
raise Exception("Type not supported: {}".format(typ))
|
||||
|
||||
|
||||
def encode_series(values, types):
|
||||
# bytes and bytesN are already in the right format.
|
||||
if isinstance(values, bytes):
|
||||
return values
|
||||
|
||||
# Recursively serialize
|
||||
parts = [(is_fixed_size(types[i]), serialize(values[i], typ=types[i])) for i in range(len(values))]
|
||||
|
||||
# Compute and check lengths
|
||||
fixed_lengths = [len(serialized) if constant_size else BYTES_PER_LENGTH_OFFSET
|
||||
for (constant_size, serialized) in parts]
|
||||
variable_lengths = [len(serialized) if not constant_size else 0
|
||||
for (constant_size, serialized) in parts]
|
||||
|
||||
# Check if integer is not out of bounds (Python)
|
||||
assert sum(fixed_lengths + variable_lengths) < 2 ** (BYTES_PER_LENGTH_OFFSET * 8)
|
||||
|
||||
# Interleave offsets of variable-size parts with fixed-size parts.
|
||||
# Avoid quadratic complexity in calculation of offsets.
|
||||
offset = sum(fixed_lengths)
|
||||
variable_parts = []
|
||||
fixed_parts = []
|
||||
for (constant_size, serialized) in parts:
|
||||
if constant_size:
|
||||
fixed_parts.append(serialized)
|
||||
else:
|
||||
fixed_parts.append(offset.to_bytes(BYTES_PER_LENGTH_OFFSET, 'little'))
|
||||
variable_parts.append(serialized)
|
||||
offset += len(serialized)
|
||||
|
||||
# Return the concatenation of the fixed-size parts (offsets interleaved) with the variable-size parts
|
||||
return b''.join(fixed_parts + variable_parts)
|
||||
|
||||
|
||||
# SSZ Hash-tree-root
|
||||
# -----------------------------
|
||||
|
||||
|
||||
def pack(values, subtype):
|
||||
if isinstance(values, bytes):
|
||||
return values
|
||||
return b''.join([serialize_basic(value, subtype) for value in values])
|
||||
|
||||
|
||||
def chunkify(bytez):
|
||||
# pad `bytez` to nearest 32-byte multiple
|
||||
bytez += b'\x00' * (-len(bytez) % 32)
|
||||
return [bytez[i:i + 32] for i in range(0, len(bytez), 32)]
|
||||
|
||||
|
||||
def mix_in_length(root, length):
|
||||
return hash(root + length.to_bytes(32, 'little'))
|
||||
|
||||
|
||||
def is_bottom_layer_kind(typ):
|
||||
return (
|
||||
is_basic_type(typ) or
|
||||
(is_list_kind(typ) or is_vector_kind(typ)) and is_basic_type(read_elem_type(typ))
|
||||
)
|
||||
|
||||
|
||||
@infer_input_type
|
||||
def get_typed_values(obj, typ=None):
|
||||
if is_container_type(typ):
|
||||
return obj.get_typed_values()
|
||||
elif is_list_kind(typ) or is_vector_kind(typ):
|
||||
elem_type = read_elem_type(typ)
|
||||
return list(zip(obj, [elem_type] * len(obj)))
|
||||
else:
|
||||
raise Exception("Invalid type")
|
||||
|
||||
|
||||
@infer_input_type
|
||||
def hash_tree_root(obj, typ=None):
|
||||
if is_bottom_layer_kind(typ):
|
||||
data = serialize_basic(obj, typ) if is_basic_type(typ) else pack(obj, read_elem_type(typ))
|
||||
leaves = chunkify(data)
|
||||
else:
|
||||
fields = get_typed_values(obj, typ=typ)
|
||||
leaves = [hash_tree_root(field_value, typ=field_typ) for field_value, field_typ in fields]
|
||||
if is_list_kind(typ):
|
||||
return mix_in_length(merkleize_chunks(leaves), len(obj))
|
||||
else:
|
||||
return merkleize_chunks(leaves)
|
||||
|
||||
|
||||
@infer_input_type
|
||||
def signing_root(obj, typ):
|
||||
assert is_container_type(typ)
|
||||
# ignore last field
|
||||
leaves = [hash_tree_root(field_value, typ=field_typ) for field_value, field_typ in obj.get_typed_values()[:-1]]
|
||||
return merkleize_chunks(chunkify(b''.join(leaves)))
|
526
test_libs/pyspec/eth2spec/utils/ssz/ssz_typing.py
Normal file
526
test_libs/pyspec/eth2spec/utils/ssz/ssz_typing.py
Normal file
@ -0,0 +1,526 @@
|
||||
from inspect import isclass
|
||||
from typing import List, Iterable, TypeVar, Type, NewType
|
||||
from typing import Union
|
||||
from typing_inspect import get_origin
|
||||
|
||||
# SSZ integers
|
||||
# -----------------------------
|
||||
|
||||
|
||||
class uint(int):
|
||||
byte_len = 0
|
||||
|
||||
def __new__(cls, value, *args, **kwargs):
|
||||
if value < 0:
|
||||
raise ValueError("unsigned types must not be negative")
|
||||
return super().__new__(cls, value)
|
||||
|
||||
|
||||
class uint8(uint):
|
||||
byte_len = 1
|
||||
|
||||
def __new__(cls, value, *args, **kwargs):
|
||||
if value.bit_length() > 8:
|
||||
raise ValueError("value out of bounds for uint8")
|
||||
return super().__new__(cls, value)
|
||||
|
||||
|
||||
# Alias for uint8
|
||||
byte = NewType('byte', uint8)
|
||||
|
||||
|
||||
class uint16(uint):
|
||||
byte_len = 2
|
||||
|
||||
def __new__(cls, value, *args, **kwargs):
|
||||
if value.bit_length() > 16:
|
||||
raise ValueError("value out of bounds for uint16")
|
||||
return super().__new__(cls, value)
|
||||
|
||||
|
||||
class uint32(uint):
|
||||
byte_len = 4
|
||||
|
||||
def __new__(cls, value, *args, **kwargs):
|
||||
if value.bit_length() > 32:
|
||||
raise ValueError("value out of bounds for uint16")
|
||||
return super().__new__(cls, value)
|
||||
|
||||
|
||||
# We simply default to uint64. But do give it a name, for readability
|
||||
uint64 = NewType('uint64', int)
|
||||
|
||||
|
||||
class uint128(uint):
|
||||
byte_len = 16
|
||||
|
||||
def __new__(cls, value, *args, **kwargs):
|
||||
if value.bit_length() > 128:
|
||||
raise ValueError("value out of bounds for uint128")
|
||||
return super().__new__(cls, value)
|
||||
|
||||
|
||||
class uint256(uint):
|
||||
byte_len = 32
|
||||
|
||||
def __new__(cls, value, *args, **kwargs):
|
||||
if value.bit_length() > 256:
|
||||
raise ValueError("value out of bounds for uint256")
|
||||
return super().__new__(cls, value)
|
||||
|
||||
|
||||
def is_uint_type(typ):
|
||||
# All integers are uint in the scope of the spec here.
|
||||
# Since we default to uint64. Bounds can be checked elsewhere.
|
||||
# However, some are wrapped in a NewType
|
||||
if hasattr(typ, '__supertype__'):
|
||||
# get the type that the NewType is wrapping
|
||||
typ = typ.__supertype__
|
||||
|
||||
return isinstance(typ, type) and issubclass(typ, int) and not issubclass(typ, bool)
|
||||
|
||||
|
||||
def uint_byte_size(typ):
|
||||
if hasattr(typ, '__supertype__'):
|
||||
typ = typ.__supertype__
|
||||
|
||||
if isinstance(typ, type):
|
||||
if issubclass(typ, uint):
|
||||
return typ.byte_len
|
||||
elif issubclass(typ, int):
|
||||
# Default to uint64
|
||||
return 8
|
||||
else:
|
||||
raise TypeError("Type %s is not an uint (or int-default uint64) type" % typ)
|
||||
|
||||
|
||||
# SSZ Container base class
|
||||
# -----------------------------
|
||||
|
||||
# Note: importing ssz functionality locally, to avoid import loop
|
||||
|
||||
class Container(object):
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
cls = self.__class__
|
||||
for f, t in cls.get_fields():
|
||||
if f not in kwargs:
|
||||
setattr(self, f, get_zero_value(t))
|
||||
else:
|
||||
setattr(self, f, kwargs[f])
|
||||
|
||||
def serialize(self):
|
||||
from .ssz_impl import serialize
|
||||
return serialize(self, self.__class__)
|
||||
|
||||
def hash_tree_root(self):
|
||||
from .ssz_impl import hash_tree_root
|
||||
return hash_tree_root(self, self.__class__)
|
||||
|
||||
def signing_root(self):
|
||||
from .ssz_impl import signing_root
|
||||
return signing_root(self, self.__class__)
|
||||
|
||||
def get_field_values(self):
|
||||
cls = self.__class__
|
||||
return [getattr(self, field) for field in cls.get_field_names()]
|
||||
|
||||
def __repr__(self):
|
||||
return repr({field: getattr(self, field) for field in self.get_field_names()})
|
||||
|
||||
def __str__(self):
|
||||
output = []
|
||||
for field in self.get_field_names():
|
||||
output.append(f'{field}: {getattr(self, field)}')
|
||||
return "\n".join(output)
|
||||
|
||||
def __eq__(self, other):
|
||||
return self.hash_tree_root() == other.hash_tree_root()
|
||||
|
||||
def __hash__(self):
|
||||
return hash(self.hash_tree_root())
|
||||
|
||||
@classmethod
|
||||
def get_fields_dict(cls):
|
||||
return dict(cls.__annotations__)
|
||||
|
||||
@classmethod
|
||||
def get_fields(cls):
|
||||
return list(dict(cls.__annotations__).items())
|
||||
|
||||
def get_typed_values(self):
|
||||
return list(zip(self.get_field_values(), self.get_field_types()))
|
||||
|
||||
@classmethod
|
||||
def get_field_names(cls):
|
||||
return list(cls.__annotations__.keys())
|
||||
|
||||
@classmethod
|
||||
def get_field_types(cls):
|
||||
# values of annotations are the types corresponding to the fields, not instance values.
|
||||
return list(cls.__annotations__.values())
|
||||
|
||||
|
||||
# SSZ vector
|
||||
# -----------------------------
|
||||
|
||||
|
||||
def _is_vector_instance_of(a, b):
|
||||
# Other must not be a BytesN
|
||||
if issubclass(b, bytes):
|
||||
return False
|
||||
elif not hasattr(b, 'elem_type') or not hasattr(b, 'length'):
|
||||
# Vector (b) is not an instance of Vector[X, Y] (a)
|
||||
return False
|
||||
elif not hasattr(a, 'elem_type') or not hasattr(a, 'length'):
|
||||
# Vector[X, Y] (b) is an instance of Vector (a)
|
||||
return True
|
||||
else:
|
||||
# Vector[X, Y] (a) is an instance of Vector[X, Y] (b)
|
||||
return a.elem_type == b.elem_type and a.length == b.length
|
||||
|
||||
|
||||
def _is_equal_vector_type(a, b):
|
||||
# Other must not be a BytesN
|
||||
if issubclass(b, bytes):
|
||||
return False
|
||||
elif not hasattr(a, 'elem_type') or not hasattr(a, 'length'):
|
||||
if not hasattr(b, 'elem_type') or not hasattr(b, 'length'):
|
||||
# Vector == Vector
|
||||
return True
|
||||
else:
|
||||
# Vector != Vector[X, Y]
|
||||
return False
|
||||
elif not hasattr(b, 'elem_type') or not hasattr(b, 'length'):
|
||||
# Vector[X, Y] != Vector
|
||||
return False
|
||||
else:
|
||||
# Vector[X, Y] == Vector[X, Y]
|
||||
return a.elem_type == b.elem_type and a.length == b.length
|
||||
|
||||
|
||||
class VectorMeta(type):
|
||||
def __new__(cls, class_name, parents, attrs):
|
||||
out = type.__new__(cls, class_name, parents, attrs)
|
||||
if 'elem_type' in attrs and 'length' in attrs:
|
||||
setattr(out, 'elem_type', attrs['elem_type'])
|
||||
setattr(out, 'length', attrs['length'])
|
||||
return out
|
||||
|
||||
def __getitem__(self, params):
|
||||
if not isinstance(params, tuple) or len(params) != 2:
|
||||
raise Exception("Vector must be instantiated with two args: elem type and length")
|
||||
o = self.__class__(self.__name__, (Vector,), {'elem_type': params[0], 'length': params[1]})
|
||||
o._name = 'Vector'
|
||||
return o
|
||||
|
||||
def __subclasscheck__(self, sub):
|
||||
return _is_vector_instance_of(self, sub)
|
||||
|
||||
def __instancecheck__(self, other):
|
||||
return _is_vector_instance_of(self, other.__class__)
|
||||
|
||||
def __eq__(self, other):
|
||||
return _is_equal_vector_type(self, other)
|
||||
|
||||
def __ne__(self, other):
|
||||
return not _is_equal_vector_type(self, other)
|
||||
|
||||
def __hash__(self):
|
||||
return hash(self.__class__)
|
||||
|
||||
|
||||
class Vector(metaclass=VectorMeta):
|
||||
|
||||
def __init__(self, *args: Iterable):
|
||||
cls = self.__class__
|
||||
if not hasattr(cls, 'elem_type'):
|
||||
raise TypeError("Type Vector without elem_type data cannot be instantiated")
|
||||
elif not hasattr(cls, 'length'):
|
||||
raise TypeError("Type Vector without length data cannot be instantiated")
|
||||
|
||||
if len(args) != cls.length:
|
||||
if len(args) == 0:
|
||||
args = [get_zero_value(cls.elem_type) for _ in range(cls.length)]
|
||||
else:
|
||||
raise TypeError("Typed vector with length %d cannot hold %d items" % (cls.length, len(args)))
|
||||
|
||||
self.items = list(args)
|
||||
|
||||
# cannot check non-class objects
|
||||
if isclass(cls.elem_type):
|
||||
for i, item in enumerate(self.items):
|
||||
if not isinstance(item, cls.elem_type):
|
||||
raise TypeError("Typed vector cannot hold differently typed value"
|
||||
" at index %d. Got type: %s, expected type: %s" % (i, type(item), cls.elem_type))
|
||||
|
||||
def serialize(self):
|
||||
from .ssz_impl import serialize
|
||||
return serialize(self, self.__class__)
|
||||
|
||||
def hash_tree_root(self):
|
||||
from .ssz_impl import hash_tree_root
|
||||
return hash_tree_root(self, self.__class__)
|
||||
|
||||
def __repr__(self):
|
||||
return repr({'length': self.__class__.length, 'items': self.items})
|
||||
|
||||
def __getitem__(self, key):
|
||||
return self.items[key]
|
||||
|
||||
def __setitem__(self, key, value):
|
||||
self.items[key] = value
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self.items)
|
||||
|
||||
def __len__(self):
|
||||
return len(self.items)
|
||||
|
||||
def __eq__(self, other):
|
||||
return self.hash_tree_root() == other.hash_tree_root()
|
||||
|
||||
|
||||
# SSZ BytesN
|
||||
# -----------------------------
|
||||
|
||||
|
||||
def _is_bytes_n_instance_of(a, b):
|
||||
# Other has to be a Bytes derivative class to be a BytesN
|
||||
if not issubclass(b, bytes):
|
||||
return False
|
||||
elif not hasattr(b, 'length'):
|
||||
# BytesN (b) is not an instance of BytesN[X] (a)
|
||||
return False
|
||||
elif not hasattr(a, 'length'):
|
||||
# BytesN[X] (b) is an instance of BytesN (a)
|
||||
return True
|
||||
else:
|
||||
# BytesN[X] (a) is an instance of BytesN[X] (b)
|
||||
return a.length == b.length
|
||||
|
||||
|
||||
def _is_equal_bytes_n_type(a, b):
|
||||
# Other has to be a Bytes derivative class to be a BytesN
|
||||
if not issubclass(b, bytes):
|
||||
return False
|
||||
elif not hasattr(a, 'length'):
|
||||
if not hasattr(b, 'length'):
|
||||
# BytesN == BytesN
|
||||
return True
|
||||
else:
|
||||
# BytesN != BytesN[X]
|
||||
return False
|
||||
elif not hasattr(b, 'length'):
|
||||
# BytesN[X] != BytesN
|
||||
return False
|
||||
else:
|
||||
# BytesN[X] == BytesN[X]
|
||||
return a.length == b.length
|
||||
|
||||
|
||||
class BytesNMeta(type):
|
||||
def __new__(cls, class_name, parents, attrs):
|
||||
out = type.__new__(cls, class_name, parents, attrs)
|
||||
if 'length' in attrs:
|
||||
setattr(out, 'length', attrs['length'])
|
||||
out._name = 'BytesN'
|
||||
out.elem_type = byte
|
||||
return out
|
||||
|
||||
def __getitem__(self, n):
|
||||
return self.__class__(self.__name__, (BytesN,), {'length': n})
|
||||
|
||||
def __subclasscheck__(self, sub):
|
||||
return _is_bytes_n_instance_of(self, sub)
|
||||
|
||||
def __instancecheck__(self, other):
|
||||
return _is_bytes_n_instance_of(self, other.__class__)
|
||||
|
||||
def __eq__(self, other):
|
||||
return _is_equal_bytes_n_type(self, other)
|
||||
|
||||
def __ne__(self, other):
|
||||
return not _is_equal_bytes_n_type(self, other)
|
||||
|
||||
def __hash__(self):
|
||||
return hash(self.__class__)
|
||||
|
||||
|
||||
def parse_bytes(val):
|
||||
if val is None:
|
||||
return None
|
||||
elif isinstance(val, str):
|
||||
# TODO: import from eth-utils instead, and do: hexstr_if_str(to_bytes, val)
|
||||
return None
|
||||
elif isinstance(val, bytes):
|
||||
return val
|
||||
elif isinstance(val, int):
|
||||
return bytes([val])
|
||||
else:
|
||||
return None
|
||||
|
||||
|
||||
class BytesN(bytes, metaclass=BytesNMeta):
|
||||
def __new__(cls, *args):
|
||||
if not hasattr(cls, 'length'):
|
||||
return
|
||||
bytesval = None
|
||||
if len(args) == 1:
|
||||
val: Union[bytes, int, str] = args[0]
|
||||
bytesval = parse_bytes(val)
|
||||
elif len(args) > 1:
|
||||
# TODO: each int is 1 byte, check size, create bytesval
|
||||
bytesval = bytes(args)
|
||||
|
||||
if bytesval is None:
|
||||
if cls.length == 0:
|
||||
bytesval = b''
|
||||
else:
|
||||
bytesval = b'\x00' * cls.length
|
||||
if len(bytesval) != cls.length:
|
||||
raise TypeError("BytesN[%d] cannot be initialized with value of %d bytes" % (cls.length, len(bytesval)))
|
||||
return super().__new__(cls, bytesval)
|
||||
|
||||
def serialize(self):
|
||||
from .ssz_impl import serialize
|
||||
return serialize(self, self.__class__)
|
||||
|
||||
def hash_tree_root(self):
|
||||
from .ssz_impl import hash_tree_root
|
||||
return hash_tree_root(self, self.__class__)
|
||||
|
||||
|
||||
# SSZ Defaults
|
||||
# -----------------------------
|
||||
def get_zero_value(typ):
|
||||
if is_uint_type(typ):
|
||||
return 0
|
||||
elif is_list_type(typ):
|
||||
return []
|
||||
elif is_bool_type(typ):
|
||||
return False
|
||||
elif is_vector_type(typ):
|
||||
return typ()
|
||||
elif is_bytesn_type(typ):
|
||||
return typ()
|
||||
elif is_bytes_type(typ):
|
||||
return b''
|
||||
elif is_container_type(typ):
|
||||
return typ(**{f: get_zero_value(t) for f, t in typ.get_fields()})
|
||||
else:
|
||||
raise Exception("Type not supported: {}".format(typ))
|
||||
|
||||
|
||||
# Type helpers
|
||||
# -----------------------------
|
||||
|
||||
|
||||
def infer_type(obj):
|
||||
if is_uint_type(obj.__class__):
|
||||
return obj.__class__
|
||||
elif isinstance(obj, int):
|
||||
return uint64
|
||||
elif isinstance(obj, list):
|
||||
return List[infer_type(obj[0])]
|
||||
elif isinstance(obj, (Vector, Container, bool, BytesN, bytes)):
|
||||
return obj.__class__
|
||||
else:
|
||||
raise Exception("Unknown type for {}".format(obj))
|
||||
|
||||
|
||||
def infer_input_type(fn):
|
||||
"""
|
||||
Decorator to run infer_type on the obj if typ argument is None
|
||||
"""
|
||||
def infer_helper(obj, typ=None, **kwargs):
|
||||
if typ is None:
|
||||
typ = infer_type(obj)
|
||||
return fn(obj, typ=typ, **kwargs)
|
||||
return infer_helper
|
||||
|
||||
|
||||
def is_bool_type(typ):
|
||||
"""
|
||||
Check if the given type is a bool.
|
||||
"""
|
||||
if hasattr(typ, '__supertype__'):
|
||||
typ = typ.__supertype__
|
||||
return isinstance(typ, type) and issubclass(typ, bool)
|
||||
|
||||
|
||||
def is_list_type(typ):
|
||||
"""
|
||||
Check if the given type is a list.
|
||||
"""
|
||||
return get_origin(typ) is List or get_origin(typ) is list
|
||||
|
||||
|
||||
def is_bytes_type(typ):
|
||||
"""
|
||||
Check if the given type is a ``bytes``.
|
||||
"""
|
||||
# Do not accept subclasses of bytes here, to avoid confusion with BytesN
|
||||
return typ == bytes
|
||||
|
||||
|
||||
def is_bytesn_type(typ):
|
||||
"""
|
||||
Check if the given type is a BytesN.
|
||||
"""
|
||||
return isinstance(typ, type) and issubclass(typ, BytesN)
|
||||
|
||||
|
||||
def is_list_kind(typ):
|
||||
"""
|
||||
Check if the given type is a kind of list. Can be bytes.
|
||||
"""
|
||||
return is_list_type(typ) or is_bytes_type(typ)
|
||||
|
||||
|
||||
def is_vector_type(typ):
|
||||
"""
|
||||
Check if the given type is a vector.
|
||||
"""
|
||||
return isinstance(typ, type) and issubclass(typ, Vector)
|
||||
|
||||
|
||||
def is_vector_kind(typ):
|
||||
"""
|
||||
Check if the given type is a kind of vector. Can be BytesN.
|
||||
"""
|
||||
return is_vector_type(typ) or is_bytesn_type(typ)
|
||||
|
||||
|
||||
def is_container_type(typ):
|
||||
"""
|
||||
Check if the given type is a container.
|
||||
"""
|
||||
return isinstance(typ, type) and issubclass(typ, Container)
|
||||
|
||||
|
||||
T = TypeVar('T')
|
||||
L = TypeVar('L')
|
||||
|
||||
|
||||
def read_list_elem_type(list_typ: Type[List[T]]) -> T:
|
||||
if list_typ.__args__ is None or len(list_typ.__args__) != 1:
|
||||
raise TypeError("Supplied list-type is invalid, no element type found.")
|
||||
return list_typ.__args__[0]
|
||||
|
||||
|
||||
def read_vector_elem_type(vector_typ: Type[Vector[T, L]]) -> T:
|
||||
return vector_typ.elem_type
|
||||
|
||||
|
||||
def read_elem_type(typ):
|
||||
if typ == bytes:
|
||||
return byte
|
||||
elif is_list_type(typ):
|
||||
return read_list_elem_type(typ)
|
||||
elif is_vector_type(typ):
|
||||
return read_vector_elem_type(typ)
|
||||
elif issubclass(typ, bytes): # bytes or bytesN
|
||||
return byte
|
||||
else:
|
||||
raise TypeError("Unexpected type: {}".format(typ))
|
@ -1,3 +1,4 @@
|
||||
-r requirements.txt
|
||||
pytest>=3.6,<3.7
|
||||
../config_helpers
|
||||
flake8==3.7.7
|
||||
|
@ -2,3 +2,4 @@ eth-utils>=1.3.0,<2
|
||||
eth-typing>=2.1.0,<3.0.0
|
||||
pycryptodome==3.7.3
|
||||
py_ecc>=1.6.0
|
||||
typing_inspect==0.4.0
|
||||
|
@ -9,5 +9,6 @@ setup(
|
||||
"eth-typing>=2.1.0,<3.0.0",
|
||||
"pycryptodome==3.7.3",
|
||||
"py_ecc>=1.6.0",
|
||||
"typing_inspect==0.4.0"
|
||||
]
|
||||
)
|
||||
|
Loading…
x
Reference in New Issue
Block a user