Merge pull request #1594 from ethereum/master-copy

Backport v0.10.1 to dev
This commit is contained in:
Diederik Loerakker 2020-01-24 21:56:49 +01:00 committed by GitHub
commit 8e5c1763ba
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
9 changed files with 270 additions and 130 deletions

View File

@ -37,7 +37,7 @@ SSZObject = TypeVar('SSZObject', bound=SSZType)
PHASE1_IMPORTS = '''from eth2spec.phase0 import spec as phase0 PHASE1_IMPORTS = '''from eth2spec.phase0 import spec as phase0
from eth2spec.config.apply_config import apply_constants_preset from eth2spec.config.apply_config import apply_constants_preset
from typing import ( from typing import (
Any, Callable, Dict, Set, Sequence, NewType, Tuple, TypeVar Any, Callable, Dict, Set, Sequence, NewType, Optional, Tuple, TypeVar
) )
from dataclasses import ( from dataclasses import (

View File

@ -788,10 +788,12 @@ def compute_activation_exit_epoch(epoch: Epoch) -> Epoch:
#### `compute_domain` #### `compute_domain`
```python ```python
def compute_domain(domain_type: DomainType, fork_version: Version=GENESIS_FORK_VERSION) -> Domain: def compute_domain(domain_type: DomainType, fork_version: Optional[Version]=None) -> Domain:
""" """
Return the domain for the ``domain_type`` and ``fork_version``. Return the domain for the ``domain_type`` and ``fork_version``.
""" """
if fork_version is None:
fork_version = GENESIS_FORK_VERSION
return Domain(domain_type + fork_version) return Domain(domain_type + fork_version)
``` ```
@ -1036,7 +1038,7 @@ def initiate_validator_exit(state: BeaconState, index: ValidatorIndex) -> None:
# Compute exit queue epoch # Compute exit queue epoch
exit_epochs = [v.exit_epoch for v in state.validators if v.exit_epoch != FAR_FUTURE_EPOCH] exit_epochs = [v.exit_epoch for v in state.validators if v.exit_epoch != FAR_FUTURE_EPOCH]
exit_queue_epoch = max(exit_epochs, default=compute_activation_exit_epoch(get_current_epoch(state))) exit_queue_epoch = max(exit_epochs + [compute_activation_exit_epoch(get_current_epoch(state))])
exit_queue_churn = len([v for v in state.validators if v.exit_epoch == exit_queue_epoch]) exit_queue_churn = len([v for v in state.validators if v.exit_epoch == exit_queue_epoch])
if exit_queue_churn >= get_validator_churn_limit(state): if exit_queue_churn >= get_validator_churn_limit(state):
exit_queue_epoch += Epoch(1) exit_queue_epoch += Epoch(1)

View File

@ -146,7 +146,8 @@ def get_ancestor(store: Store, root: Root, slot: Slot) -> Root:
elif block.slot == slot: elif block.slot == slot:
return root return root
else: else:
return Bytes32() # root is older than queried slot: no results. # root is older than queried slot, thus a skip slot. Return earliest root prior to slot
return root
``` ```
#### `get_latest_attesting_balance` #### `get_latest_attesting_balance`
@ -249,13 +250,8 @@ def should_update_justified_checkpoint(store: Store, new_justified_checkpoint: C
if compute_slots_since_epoch_start(get_current_slot(store)) < SAFE_SLOTS_TO_UPDATE_JUSTIFIED: if compute_slots_since_epoch_start(get_current_slot(store)) < SAFE_SLOTS_TO_UPDATE_JUSTIFIED:
return True return True
new_justified_block = store.blocks[new_justified_checkpoint.root] justified_slot = compute_start_slot_at_epoch(store.justified_checkpoint.epoch)
if new_justified_block.slot <= compute_start_slot_at_epoch(store.justified_checkpoint.epoch): if not get_ancestor(store, new_justified_checkpoint.root, justified_slot) == store.justified_checkpoint.root:
return False
if not (
get_ancestor(store, new_justified_checkpoint.root, store.blocks[store.justified_checkpoint.root].slot)
== store.justified_checkpoint.root
):
return False return False
return True return True
@ -346,13 +342,13 @@ def on_block(store: Store, signed_block: SignedBeaconBlock) -> None:
assert get_current_slot(store) >= block.slot assert get_current_slot(store) >= block.slot
# Add new block to the store # Add new block to the store
store.blocks[hash_tree_root(block)] = block store.blocks[hash_tree_root(block)] = block
# Check block is a descendant of the finalized block
assert ( # Check that block is later than the finalized epoch slot (optimization to reduce calls to get_ancestor)
get_ancestor(store, hash_tree_root(block), store.blocks[store.finalized_checkpoint.root].slot) == finalized_slot = compute_start_slot_at_epoch(store.finalized_checkpoint.epoch)
store.finalized_checkpoint.root assert block.slot > finalized_slot
) # Check block is a descendant of the finalized block at the checkpoint finalized slot
# Check that block is later than the finalized epoch slot assert get_ancestor(store, hash_tree_root(block), finalized_slot) == store.finalized_checkpoint.root
assert block.slot > compute_start_slot_at_epoch(store.finalized_checkpoint.epoch)
# Check the block is valid and compute the post-state # Check the block is valid and compute the post-state
state = state_transition(pre_state, signed_block, True) state = state_transition(pre_state, signed_block, True)
# Add new state for this block to the store # Add new state for this block to the store
@ -368,6 +364,15 @@ def on_block(store: Store, signed_block: SignedBeaconBlock) -> None:
# Update finalized checkpoint # Update finalized checkpoint
if state.finalized_checkpoint.epoch > store.finalized_checkpoint.epoch: if state.finalized_checkpoint.epoch > store.finalized_checkpoint.epoch:
store.finalized_checkpoint = state.finalized_checkpoint store.finalized_checkpoint = state.finalized_checkpoint
finalized_slot = compute_start_slot_at_epoch(store.finalized_checkpoint.epoch)
# Update justified if new justified is later than store justified
# or if store justified is not in chain with finalized checkpoint
if (
state.current_justified_checkpoint.epoch > store.justified_checkpoint.epoch
or get_ancestor(store, store.justified_checkpoint.root, finalized_slot) != store.finalized_checkpoint.root
):
store.justified_checkpoint = state.current_justified_checkpoint
``` ```
#### `on_attestation` #### `on_attestation`

View File

@ -97,7 +97,7 @@ A validator must initialize many parameters locally before submitting a deposit
#### BLS public key #### BLS public key
Validator public keys are [G1 points](../bls_signature.md#g1-points) on the [BLS12-381 curve](https://z.cash/blog/new-snark-curve). A private key, `privkey`, must be securely generated along with the resultant `pubkey`. This `privkey` must be "hot", that is, constantly available to sign data throughout the lifetime of the validator. Validator public keys are [G1 points](beacon-chain.md#bls-signatures) on the [BLS12-381 curve](https://z.cash/blog/new-snark-curve). A private key, `privkey`, must be securely generated along with the resultant `pubkey`. This `privkey` must be "hot", that is, constantly available to sign data throughout the lifetime of the validator.
#### BLS withdrawal key #### BLS withdrawal key
@ -128,7 +128,7 @@ To submit a deposit:
### Process deposit ### Process deposit
Deposits cannot be processed into the beacon chain until the Eth1 block in which they were deposited or any of its descendants is added to the beacon chain `state.eth1_data`. This takes _a minimum_ of `ETH1_FOLLOW_DISTANCE` Eth1 blocks (~4 hours) plus `SLOTS_PER_ETH1_VOTING_PERIOD` slots (~1.7 hours). Once the requisite Eth1 data is added, the deposit will normally be added to a beacon chain block and processed into the `state.validators` within an epoch or two. The validator is then in a queue to be activated. Deposits cannot be processed into the beacon chain until the Eth1 block in which they were deposited or any of its descendants is added to the beacon chain `state.eth1_data`. This takes _a minimum_ of `ETH1_FOLLOW_DISTANCE` Eth1 blocks (~4 hours) plus `SLOTS_PER_ETH1_VOTING_PERIOD` slots (~3.4 hours). Once the requisite Eth1 data is added, the deposit will normally be added to a beacon chain block and processed into the `state.validators` within an epoch or two. The validator is then in a queue to be activated.
### Validator index ### Validator index

View File

@ -135,6 +135,48 @@ def test_on_block_before_finalized(spec, state):
run_on_block(spec, store, signed_block, False) run_on_block(spec, store, signed_block, False)
@with_all_phases
@spec_state_test
def test_on_block_finalized_skip_slots(spec, state):
# Initialization
store = spec.get_forkchoice_store(state)
time = 100
spec.on_tick(store, time)
store.finalized_checkpoint = spec.Checkpoint(
epoch=store.finalized_checkpoint.epoch + 2,
root=store.finalized_checkpoint.root
)
# Build block that includes the skipped slots up to finality in chain
block = build_empty_block(spec, state, spec.compute_start_slot_at_epoch(store.finalized_checkpoint.epoch) + 2)
signed_block = state_transition_and_sign_block(spec, state, block)
spec.on_tick(store, store.time + state.slot * spec.SECONDS_PER_SLOT)
run_on_block(spec, store, signed_block)
@with_all_phases
@spec_state_test
def test_on_block_finalized_skip_slots_not_in_skip_chain(spec, state):
# Initialization
store = spec.get_forkchoice_store(state)
store.finalized_checkpoint = spec.Checkpoint(
epoch=store.finalized_checkpoint.epoch + 2,
root=store.finalized_checkpoint.root
)
# First transition through the epoch to ensure no skipped slots
state, store, last_signed_block = apply_next_epoch_with_attestations(spec, state, store)
# Now build a block at later slot than finalized epoch
# Includes finalized block in chain, but not at appropriate skip slot
block = build_empty_block(spec, state, spec.compute_start_slot_at_epoch(store.finalized_checkpoint.epoch) + 2)
signed_block = state_transition_and_sign_block(spec, state, block)
spec.on_tick(store, store.time + state.slot * spec.SECONDS_PER_SLOT)
run_on_block(spec, store, signed_block, False)
@with_all_phases @with_all_phases
@spec_state_test @spec_state_test
def test_on_block_update_justified_checkpoint_within_safe_slots(spec, state): def test_on_block_update_justified_checkpoint_within_safe_slots(spec, state):
@ -214,3 +256,51 @@ def test_on_block_outside_safe_slots_and_multiple_better_justified(spec, state):
assert store.justified_checkpoint == previously_justified assert store.justified_checkpoint == previously_justified
# ensure the best from the series was stored # ensure the best from the series was stored
assert store.best_justified_checkpoint == best_justified_checkpoint assert store.best_justified_checkpoint == best_justified_checkpoint
@with_all_phases
@spec_state_test
def test_on_block_outside_safe_slots_but_finality(spec, state):
# Initialization
store = spec.get_forkchoice_store(state)
time = 100
spec.on_tick(store, time)
next_epoch(spec, state)
spec.on_tick(store, store.time + state.slot * spec.SECONDS_PER_SLOT)
state, store, last_signed_block = apply_next_epoch_with_attestations(spec, state, store)
next_epoch(spec, state)
spec.on_tick(store, store.time + state.slot * spec.SECONDS_PER_SLOT)
last_block_root = hash_tree_root(last_signed_block.message)
# Mock justified block in store
just_block = build_empty_block_for_next_slot(spec, state)
# Slot is same as justified checkpoint so does not trigger an override in the store
just_block.slot = spec.compute_start_slot_at_epoch(store.justified_checkpoint.epoch)
store.blocks[just_block.hash_tree_root()] = just_block
# Step time past safe slots
spec.on_tick(store, store.time + spec.SAFE_SLOTS_TO_UPDATE_JUSTIFIED * spec.SECONDS_PER_SLOT)
assert spec.get_current_slot(store) % spec.SLOTS_PER_EPOCH >= spec.SAFE_SLOTS_TO_UPDATE_JUSTIFIED
# Mock justified and finalized update in state
just_fin_state = store.block_states[last_block_root]
new_justified = spec.Checkpoint(
epoch=store.justified_checkpoint.epoch + 1,
root=just_block.hash_tree_root(),
)
new_finalized = spec.Checkpoint(
epoch=store.finalized_checkpoint.epoch + 1,
root=just_block.parent_root,
)
just_fin_state.current_justified_checkpoint = new_justified
just_fin_state.finalized_checkpoint = new_finalized
# Build and add block that includes the new justified/finalized info
block = build_empty_block_for_next_slot(spec, just_fin_state)
signed_block = state_transition_and_sign_block(spec, deepcopy(just_fin_state), block)
run_on_block(spec, store, signed_block)
assert store.finalized_checkpoint == new_finalized
assert store.justified_checkpoint == new_justified

View File

@ -46,6 +46,8 @@ def test_success(spec, state):
yield from run_voluntary_exit_processing(spec, state, signed_voluntary_exit) yield from run_voluntary_exit_processing(spec, state, signed_voluntary_exit)
assert state.validators[validator_index].exit_epoch == spec.compute_activation_exit_epoch(current_epoch)
@with_all_phases @with_all_phases
@spec_state_test @spec_state_test
@ -110,6 +112,28 @@ def test_success_exit_queue(spec, state):
) )
@with_all_phases
@spec_state_test
def test_default_exit_epoch_subsequent_exit(spec, state):
# move state forward PERSISTENT_COMMITTEE_PERIOD epochs to allow for exit
state.slot += spec.PERSISTENT_COMMITTEE_PERIOD * spec.SLOTS_PER_EPOCH
current_epoch = spec.get_current_epoch(state)
validator_index = spec.get_active_validator_indices(state, current_epoch)[0]
privkey = pubkey_to_privkey[state.validators[validator_index].pubkey]
signed_voluntary_exit = sign_voluntary_exit(
spec, state, spec.VoluntaryExit(epoch=current_epoch, validator_index=validator_index), privkey)
# Exit one validator prior to this new one
exited_index = spec.get_active_validator_indices(state, current_epoch)[-1]
state.validators[exited_index].exit_epoch = current_epoch - 1
yield from run_voluntary_exit_processing(spec, state, signed_voluntary_exit)
assert state.validators[validator_index].exit_epoch == spec.compute_activation_exit_epoch(current_epoch)
@with_all_phases @with_all_phases
@spec_state_test @spec_state_test
def test_validator_exit_in_future(spec, state): def test_validator_exit_in_future(spec, state):

View File

@ -9,7 +9,7 @@ The base unit is bytes48 of which only 381 bits are used
## Resources ## Resources
- [Eth2 spec](../../specs/bls_signature.md) - [Eth2 spec](../../../specs/phase0/beacon-chain.md#bls-signatures)
- [Finite Field Arithmetic](http://www.springeronline.com/sgw/cda/pageitems/document/cda_downloaddocument/0,11996,0-0-45-110359-0,00.pdf) - [Finite Field Arithmetic](http://www.springeronline.com/sgw/cda/pageitems/document/cda_downloaddocument/0,11996,0-0-45-110359-0,00.pdf)
- Chapter 2 of [Elliptic Curve Cryptography](http://cacr.uwaterloo.ca/ecc/). Darrel Hankerson, Alfred Menezes, and Scott Vanstone - Chapter 2 of [Elliptic Curve Cryptography](http://cacr.uwaterloo.ca/ecc/). Darrel Hankerson, Alfred Menezes, and Scott Vanstone
- [Zcash BLS parameters](https://github.com/zkcrypto/pairing/tree/master/src/bls12_381) - [Zcash BLS parameters](https://github.com/zkcrypto/pairing/tree/master/src/bls12_381)

View File

@ -20,6 +20,7 @@ def hash(x):
F2Q_COEFF_LEN = 48 F2Q_COEFF_LEN = 48
G2_COMPRESSED_Z_LEN = 48 G2_COMPRESSED_Z_LEN = 48
DST = bls.G2ProofOfPossession.DST
def int_to_hex(n: int, byte_length: int = None) -> str: def int_to_hex(n: int, byte_length: int = None) -> str:
@ -33,15 +34,6 @@ def hex_to_int(x: str) -> int:
return int(x, 16) return int(x, 16)
DOMAINS = [
b'\x00\x00\x00\x00\x00\x00\x00\x00',
b'\x00\x00\x00\x00\x00\x00\x00\x01',
b'\x01\x00\x00\x00\x00\x00\x00\x00',
b'\x80\x00\x00\x00\x00\x00\x00\x00',
b'\x01\x23\x45\x67\x89\xab\xcd\xef',
b'\xff\xff\xff\xff\xff\xff\xff\xff'
]
MESSAGES = [ MESSAGES = [
bytes(b'\x00' * 32), bytes(b'\x00' * 32),
bytes(b'\x56' * 32), bytes(b'\x56' * 32),
@ -57,116 +49,144 @@ PRIVKEYS = [
] ]
def hash_message(msg: bytes, def case01_sign():
domain: bytes) -> Tuple[Tuple[str, str], Tuple[str, str], Tuple[str, str]]: for privkey in PRIVKEYS:
""" for message in MESSAGES:
Hash message sig = bls.G2ProofOfPossession.Sign(privkey, message)
Input: full_name = f'{int_to_hex(privkey)}_{encode_hex(message)}'
- Message as bytes32 yield f'sign_case_{(hash(bytes(full_name, "utf-8"))[:8]).hex()}', {
- domain as bytes8
Output:
- Message hash as a G2 point
"""
return [
[
int_to_hex(fq2.coeffs[0], F2Q_COEFF_LEN),
int_to_hex(fq2.coeffs[1], F2Q_COEFF_LEN),
]
for fq2 in bls.utils.hash_to_G2(msg, domain)
]
def hash_message_compressed(msg: bytes, domain: bytes) -> Tuple[str, str]:
"""
Hash message
Input:
- Message as bytes32
- domain as bytes8
Output:
- Message hash as a compressed G2 point
"""
z1, z2 = bls.utils.compress_G2(bls.utils.hash_to_G2(msg, domain))
return [int_to_hex(z1, G2_COMPRESSED_Z_LEN), int_to_hex(z2, G2_COMPRESSED_Z_LEN)]
def case01_message_hash_G2_uncompressed():
for msg in MESSAGES:
for domain in DOMAINS:
yield f'uncom_g2_hash_{encode_hex(msg)}_{encode_hex(domain)}', {
'input': { 'input': {
'message': encode_hex(msg), 'privkey': int_to_hex(privkey),
'domain': encode_hex(domain), 'message': encode_hex(message),
}, },
'output': hash_message(msg, domain) 'output': encode_hex(sig)
} }
def case02_message_hash_G2_compressed(): def case02_verify():
for msg in MESSAGES: for i, privkey in enumerate(PRIVKEYS):
for domain in DOMAINS: for message in MESSAGES:
yield f'com_g2_hash_{encode_hex(msg)}_{encode_hex(domain)}', { # Valid signature
signature = bls.G2ProofOfPossession.Sign(privkey, message)
pubkey = bls.G2ProofOfPossession.PrivToPub(privkey)
full_name = f'{encode_hex(pubkey)}_{encode_hex(message)}_valid'
yield f'verify_case_{(hash(bytes(full_name, "utf-8"))[:8]).hex()}', {
'input': { 'input': {
'message': encode_hex(msg), 'pubkey': encode_hex(pubkey),
'domain': encode_hex(domain), 'message': encode_hex(message),
'signature': encode_hex(signature),
}, },
'output': hash_message_compressed(msg, domain) 'output': True,
}
# Invalid signatures -- wrong pubkey
wrong_pubkey = bls.G2ProofOfPossession.PrivToPub(PRIVKEYS[(i + 1) % len(PRIVKEYS)])
full_name = f'{encode_hex(wrong_pubkey)}_{encode_hex(message)}_wrong_pubkey'
yield f'verify_case_{(hash(bytes(full_name, "utf-8"))[:8]).hex()}', {
'input': {
'pubkey': encode_hex(wrong_pubkey),
'message': encode_hex(message),
'signature': encode_hex(signature),
},
'output': False,
}
# Invalid signature -- tampered with signature
tampered_signature = signature[:-4] + b'\xFF\xFF\xFF\xFF'
full_name = f'{encode_hex(pubkey)}_{encode_hex(message)}_tampered_signature'
yield f'verify_case_{(hash(bytes(full_name, "utf-8"))[:8]).hex()}', {
'input': {
'pubkey': encode_hex(pubkey),
'message': encode_hex(message),
'signature': encode_hex(tampered_signature),
},
'output': False,
} }
def case03_private_to_public_key(): def case03_aggregate():
pubkeys = [bls.privtopub(privkey) for privkey in PRIVKEYS] for message in MESSAGES:
pubkeys_serial = ['0x' + pubkey.hex() for pubkey in pubkeys] sigs = [bls.G2ProofOfPossession.Sign(privkey, message) for privkey in PRIVKEYS]
for privkey, pubkey_serial in zip(PRIVKEYS, pubkeys_serial): yield f'aggregate_{encode_hex(message)}', {
yield f'priv_to_pub_{int_to_hex(privkey)}', { 'input': [encode_hex(sig) for sig in sigs],
'input': int_to_hex(privkey), 'output': encode_hex(bls.G2ProofOfPossession.Aggregate(sigs)),
'output': pubkey_serial,
} }
def case04_sign_messages(): def case04_fast_aggregate_verify():
for privkey in PRIVKEYS: for i, message in enumerate(MESSAGES):
for message in MESSAGES: privkeys = PRIVKEYS[:i + 1]
for domain in DOMAINS: sigs = [bls.G2ProofOfPossession.Sign(privkey, message) for privkey in privkeys]
sig = bls.sign(message, privkey, domain) aggregate_signature = bls.G2ProofOfPossession.Aggregate(sigs)
full_name = f'{int_to_hex(privkey)}_{encode_hex(message)}_{encode_hex(domain)}' pubkeys = [bls.G2ProofOfPossession.PrivToPub(privkey) for privkey in privkeys]
yield f'sign_msg_case_{(hash(bytes(full_name, "utf-8"))[:8]).hex()}', { pubkeys_serial = [encode_hex(pubkey) for pubkey in pubkeys]
'input': {
'privkey': int_to_hex(privkey), # Valid signature
'message': encode_hex(message), full_name = f'{pubkeys_serial}_{encode_hex(message)}_valid'
'domain': encode_hex(domain), yield f'fast_aggregate_verify_{(hash(bytes(full_name, "utf-8"))[:8]).hex()}', {
}, 'input': {
'output': encode_hex(sig) 'pubkeys': pubkeys_serial,
} 'message': encode_hex(message),
'signature': encode_hex(aggregate_signature),
},
'output': True,
}
# Invalid signature -- extra pubkey
pubkeys_extra = pubkeys + [bls.G2ProofOfPossession.PrivToPub(PRIVKEYS[-1])]
pubkeys_extra_serial = [encode_hex(pubkey) for pubkey in pubkeys]
full_name = f'{pubkeys_extra_serial}_{encode_hex(message)}_extra_pubkey'
yield f'fast_aggregate_verify_{(hash(bytes(full_name, "utf-8"))[:8]).hex()}', {
'input': {
'pubkeys': pubkeys_extra_serial,
'message': encode_hex(message),
'signature': encode_hex(aggregate_signature),
},
'output': False,
}
# Invalid signature -- tampered with signature
tampered_signature = aggregate_signature[:-4] + b'\xff\xff\xff\xff'
full_name = f'{pubkeys_serial}_{encode_hex(message)}_tampered_signature'
yield f'fast_aggregate_verify_{(hash(bytes(full_name, "utf-8"))[:8]).hex()}', {
'input': {
'pubkeys': pubkeys_serial,
'message': encode_hex(message),
'signature': encode_hex(tampered_signature),
},
'output': False,
}
# TODO: case05_verify_messages: Verify messages signed in case04 def case05_aggregate_verify():
# It takes too long, empty for now pairs = []
sigs = []
for privkey, message in zip(PRIVKEYS, MESSAGES):
sig = bls.G2ProofOfPossession.Sign(privkey, message)
pubkey = bls.G2ProofOfPossession.PrivToPub(privkey)
pairs.append({
'pubkey': encode_hex(pubkey),
'message': encode_hex(message),
})
sigs.append(sig)
aggregate_signature = bls.G2ProofOfPossession.Aggregate(sigs)
def case06_aggregate_sigs(): yield f'fast_aggregate_verify_valid', {
for domain in DOMAINS: 'input': {
for message in MESSAGES: 'pairs': pairs,
sigs = [bls.sign(message, privkey, domain) for privkey in PRIVKEYS] 'signature': encode_hex(aggregate_signature),
yield f'agg_sigs_{encode_hex(message)}_{encode_hex(domain)}', { },
'input': [encode_hex(sig) for sig in sigs], 'output': True,
'output': encode_hex(bls.aggregate_signatures(sigs)),
}
def case07_aggregate_pubkeys():
pubkeys = [bls.privtopub(privkey) for privkey in PRIVKEYS]
pubkeys_serial = [encode_hex(pubkey) for pubkey in pubkeys]
yield f'agg_pub_keys', {
'input': pubkeys_serial,
'output': encode_hex(bls.aggregate_pubkeys(pubkeys)),
} }
tampered_signature = aggregate_signature[:4] + b'\xff\xff\xff\xff'
# TODO yield f'fast_aggregate_verify_tampered_signature', {
# Aggregate verify 'input': {
'pairs': pairs,
# TODO 'signature': encode_hex(tampered_signature),
# Proof-of-possession },
'output': False,
}
def create_provider(handler_name: str, def create_provider(handler_name: str,
@ -195,10 +215,9 @@ def create_provider(handler_name: str,
if __name__ == "__main__": if __name__ == "__main__":
gen_runner.run_generator("bls", [ gen_runner.run_generator("bls", [
create_provider('msg_hash_uncompressed', case01_message_hash_G2_uncompressed), create_provider('sign', case01_sign),
create_provider('msg_hash_compressed', case02_message_hash_G2_compressed), create_provider('verify', case02_verify),
create_provider('priv_to_pub', case03_private_to_public_key), create_provider('aggregate', case03_aggregate),
create_provider('sign_msg', case04_sign_messages), create_provider('fast_aggregate_verify', case04_fast_aggregate_verify),
create_provider('aggregate_sigs', case06_aggregate_sigs), create_provider('aggregate_verify', case05_aggregate_verify),
create_provider('aggregate_pubkeys', case07_aggregate_pubkeys),
]) ])

View File

@ -1,6 +1,6 @@
from ssz_test_case import invalid_test_case, valid_test_case from ssz_test_case import invalid_test_case, valid_test_case
from eth2spec.utils.ssz.ssz_typing import SSZType, Container, byte, uint8, uint16, \ from eth2spec.utils.ssz.ssz_typing import SSZType, Container, byte, uint8, uint16, \
uint32, uint64, List, Bytes, Vector, Bitvector, Bitlist uint32, uint64, List, ByteList, Vector, Bitvector, Bitlist
from eth2spec.utils.ssz.ssz_impl import serialize from eth2spec.utils.ssz.ssz_impl import serialize
from random import Random from random import Random
from typing import Dict, Tuple, Sequence, Callable from typing import Dict, Tuple, Sequence, Callable
@ -32,7 +32,7 @@ class ComplexTestStruct(Container):
A: uint16 A: uint16
B: List[uint16, 128] B: List[uint16, 128]
C: uint8 C: uint8
D: Bytes[256] D: ByteList[256]
E: VarTestStruct E: VarTestStruct
F: Vector[FixedTestStruct, 4] F: Vector[FixedTestStruct, 4]
G: Vector[VarTestStruct, 2] G: Vector[VarTestStruct, 2]