Merge pull request #1038 from ethereum/master

backport v0.6.1 into dev
This commit is contained in:
Danny Ryan 2019-05-03 16:28:24 -06:00 committed by GitHub
commit f57d6fa28e
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
3 changed files with 37 additions and 30 deletions

View File

@ -48,10 +48,8 @@ def compute_committee(indices: List[ValidatorIndex], seed: Bytes32, index: int,
param_hash = (hash_tree_root(indices), seed, index, count)
if param_hash in committee_cache:
print("Cache hit, param_hash: ", param_hash)
return committee_cache[param_hash]
else:
print("Cache miss, param_hash: ", param_hash)
ret = _compute_committee(indices, seed, index, count)
committee_cache[param_hash] = ret
return ret

View File

@ -799,8 +799,8 @@ def get_randao_mix(state: BeaconState,
epoch: Epoch) -> Bytes32:
"""
Return the randao mix at a recent ``epoch``.
``epoch`` expected to be between (current_epoch - LATEST_RANDAO_MIXES_LENGTH, current_epoch].
"""
assert get_current_epoch(state) - LATEST_RANDAO_MIXES_LENGTH < epoch <= get_current_epoch(state)
return state.latest_randao_mixes[epoch % LATEST_RANDAO_MIXES_LENGTH]
```
@ -811,8 +811,9 @@ def get_active_index_root(state: BeaconState,
epoch: Epoch) -> Bytes32:
"""
Return the index root at a recent ``epoch``.
``epoch`` expected to be between
(current_epoch - LATEST_ACTIVE_INDEX_ROOTS_LENGTH + ACTIVATION_EXIT_DELAY, current_epoch + ACTIVATION_EXIT_DELAY].
"""
assert get_current_epoch(state) - LATEST_ACTIVE_INDEX_ROOTS_LENGTH + ACTIVATION_EXIT_DELAY < epoch <= get_current_epoch(state) + ACTIVATION_EXIT_DELAY
return state.latest_active_index_roots[epoch % LATEST_ACTIVE_INDEX_ROOTS_LENGTH]
```
@ -825,7 +826,7 @@ def generate_seed(state: BeaconState,
Generate a seed for the given ``epoch``.
"""
return hash(
get_randao_mix(state, epoch - MIN_SEED_LOOKAHEAD) +
get_randao_mix(state, epoch + LATEST_RANDAO_MIXES_LENGTH - MIN_SEED_LOOKAHEAD) +
get_active_index_root(state, epoch) +
int_to_bytes32(epoch)
)
@ -1429,7 +1430,8 @@ def get_attestation_deltas(state: BeaconState) -> Tuple[List[Gwei], List[Gwei]]:
# Proposer and inclusion delay micro-rewards
for index in get_unslashed_attesting_indices(state, matching_source_attestations):
attestation = min([
a for a in attestations if index in get_attesting_indices(state, a.data, a.aggregation_bitfield)
a for a in matching_source_attestations
if index in get_attesting_indices(state, a.data, a.aggregation_bitfield)
], key=lambda a: a.inclusion_delay)
rewards[attestation.proposer_index] += get_base_reward(state, index) // PROPOSER_REWARD_QUOTIENT
rewards[index] += get_base_reward(state, index) * MIN_ATTESTATION_INCLUSION_DELAY // attestation.inclusion_delay

View File

@ -3,7 +3,7 @@ from typing import Any
from .hash_function import hash
BYTES_PER_CHUNK = 32
BYTES_PER_LENGTH_PREFIX = 4
BYTES_PER_LENGTH_OFFSET = 4
ZERO_CHUNK = b'\x00' * BYTES_PER_CHUNK
@ -111,19 +111,34 @@ def coerce_to_bytes(x):
raise Exception("Expecting bytes")
def encode_bytes(value):
serialized_bytes = coerce_to_bytes(value)
assert len(serialized_bytes) < 2 ** (8 * BYTES_PER_LENGTH_PREFIX)
serialized_length = len(serialized_bytes).to_bytes(BYTES_PER_LENGTH_PREFIX, 'little')
return serialized_length + serialized_bytes
def encode_series(values, types):
# Recursively serialize
parts = [(is_constant_sized(types[i]), serialize_value(values[i], types[i])) for i in range(len(values))]
# Compute and check lengths
fixed_lengths = [len(serialized) if constant_size else BYTES_PER_LENGTH_OFFSET
for (constant_size, serialized) in parts]
variable_lengths = [len(serialized) if not constant_size else 0
for (constant_size, serialized) in parts]
def encode_variable_size_container(values, types):
return encode_bytes(encode_fixed_size_container(values, types))
# Check if integer is not out of bounds (Python)
assert sum(fixed_lengths + variable_lengths) < 2 ** (BYTES_PER_LENGTH_OFFSET * 8)
# Interleave offsets of variable-size parts with fixed-size parts.
# Avoid quadratic complexity in calculation of offsets.
offset = sum(fixed_lengths)
variable_parts = []
fixed_parts = []
for (constant_size, serialized) in parts:
if constant_size:
fixed_parts.append(serialized)
else:
fixed_parts.append(offset.to_bytes(BYTES_PER_LENGTH_OFFSET, 'little'))
variable_parts.append(serialized)
offset += len(serialized)
def encode_fixed_size_container(values, types):
return b''.join([serialize_value(v, typ) for (v, typ) in zip(values, types)])
# Return the concatenation of the fixed-size parts (offsets interleaved) with the variable-size parts
return b"".join(fixed_parts + variable_parts)
def serialize_value(value, typ=None):
@ -142,18 +157,13 @@ def serialize_value(value, typ=None):
elif isinstance(typ, list) and len(typ) == 2:
# (regardless of element type, sanity-check if the length reported in the vector type matches the value length)
assert len(value) == typ[1]
# If value is fixed-size (i.e. element type is fixed-size):
if is_constant_sized(typ):
return encode_fixed_size_container(value, [typ[0]] * len(value))
# If value is variable-size (i.e. element type is variable-size)
else:
return encode_variable_size_container(value, [typ[0]] * len(value))
# "bytes" (variable size)
elif isinstance(typ, str) and typ == 'bytes':
return encode_bytes(value)
return encode_series(value, [typ[0]] * len(value))
# List
elif isinstance(typ, list) and len(typ) == 1:
return encode_variable_size_container(value, [typ[0]] * len(value))
return encode_series(value, [typ[0]] * len(value))
# "bytes" (variable size)
elif isinstance(typ, str) and typ == 'bytes':
return coerce_to_bytes(value)
# "bytesN" (fixed size)
elif isinstance(typ, str) and len(typ) > 5 and typ[:5] == 'bytes':
assert len(value) == int(typ[5:]), (value, int(typ[5:]))
@ -162,10 +172,7 @@ def serialize_value(value, typ=None):
elif hasattr(typ, 'fields'):
values = [getattr(value, field) for field in typ.fields.keys()]
types = list(typ.fields.values())
if is_constant_sized(typ):
return encode_fixed_size_container(values, types)
else:
return encode_variable_size_container(values, types)
return encode_series(values, types)
else:
print(value, typ)
raise Exception("Type not recognized")