mirror of
https://github.com/status-im/eth2.0-specs.git
synced 2025-03-01 18:50:35 +00:00
Adds missing functions
This commit is contained in:
parent
725dcf27f5
commit
cfed4bb41c
@ -5,7 +5,7 @@ from argparse import ArgumentParser
|
|||||||
from typing import Tuple, List
|
from typing import Tuple, List
|
||||||
|
|
||||||
|
|
||||||
IMPORTS = '''from typing import (
|
PHASE0_IMPORTS = '''from typing import (
|
||||||
Any,
|
Any,
|
||||||
Dict,
|
Dict,
|
||||||
List,
|
List,
|
||||||
@ -27,6 +27,31 @@ from eth2spec.utils.bls_stub import (
|
|||||||
|
|
||||||
from eth2spec.utils.hash_function import hash
|
from eth2spec.utils.hash_function import hash
|
||||||
'''
|
'''
|
||||||
|
PHASE1_IMPORTS = '''from typing import (
|
||||||
|
Any,
|
||||||
|
Dict,
|
||||||
|
List,
|
||||||
|
NewType,
|
||||||
|
Tuple,
|
||||||
|
)
|
||||||
|
|
||||||
|
from eth2spec.utils.minimal_ssz import (
|
||||||
|
SSZType,
|
||||||
|
hash_tree_root,
|
||||||
|
signing_root,
|
||||||
|
type_of,
|
||||||
|
empty,
|
||||||
|
)
|
||||||
|
|
||||||
|
from eth2spec.utils.bls_stub import (
|
||||||
|
bls_aggregate_pubkeys,
|
||||||
|
bls_verify,
|
||||||
|
bls_verify_multiple,
|
||||||
|
)
|
||||||
|
|
||||||
|
from eth2spec.utils.hash_function import hash
|
||||||
|
import math
|
||||||
|
'''
|
||||||
NEW_TYPE_DEFINITIONS = '''
|
NEW_TYPE_DEFINITIONS = '''
|
||||||
Slot = NewType('Slot', int) # uint64
|
Slot = NewType('Slot', int) # uint64
|
||||||
Epoch = NewType('Epoch', int) # uint64
|
Epoch = NewType('Epoch', int) # uint64
|
||||||
@ -82,74 +107,8 @@ def apply_constants_preset(preset: Dict[str, Any]):
|
|||||||
init_SSZ_types()
|
init_SSZ_types()
|
||||||
'''
|
'''
|
||||||
|
|
||||||
def split_and_label(regex_pattern: str, text: str) -> List[str]:
|
|
||||||
'''
|
|
||||||
Splits a string based on regex, but down not remove the matched text.
|
|
||||||
It subsequently labels the matches with their match
|
|
||||||
'''
|
|
||||||
find_pattern = r'''%s.*?(?=%s|$)''' % (regex_pattern, regex_pattern)
|
|
||||||
matches = re.findall(find_pattern, text, re.DOTALL)
|
|
||||||
return list(map(lambda x: [re.match(regex_pattern, x).group(0), x], matches))
|
|
||||||
|
|
||||||
|
def objects_to_spec(functions, constants, ssz_objects, imports):
|
||||||
def inserter(oldfile: str, newfile: str) -> Tuple[str, str]:
|
|
||||||
'''
|
|
||||||
Searches for occurrences of @LabelName in oldfile and replaces them with instances of code wraped as follows:
|
|
||||||
# begin insert @LabelName
|
|
||||||
def foo(bar):
|
|
||||||
return bar
|
|
||||||
# end insert @LabelName
|
|
||||||
'''
|
|
||||||
new_insert_objects = re.split(r"(# begin insert |# end insert @[\w\d_]*\n)", newfile)
|
|
||||||
# Retrieve label from insert objects
|
|
||||||
def get_labeled_object(labeled_text):
|
|
||||||
label = re.match(r"@[\w\d_]*\n", labeled_text)
|
|
||||||
if label is not None:
|
|
||||||
label = label.group(0)
|
|
||||||
labeled_text = re.sub(label, '', labeled_text)
|
|
||||||
return {'label': label, 'text': labeled_text}
|
|
||||||
new_insert_objects = map(get_labeled_object, new_insert_objects)
|
|
||||||
# Find and replace labels
|
|
||||||
newfile = ""
|
|
||||||
for item in new_insert_objects:
|
|
||||||
if item['label'] is not None:
|
|
||||||
oldfile, insertions = re.subn('# %s' % item['label'], item['text'], oldfile)
|
|
||||||
if insertions == 0:
|
|
||||||
newfile.join('# begin insert %s/n%s# end insert %s' % (item['label'], item['text'], item['label']))
|
|
||||||
elif re.match(r"(# begin insert |# end insert )", item['text']) is None:
|
|
||||||
newfile += item['text']
|
|
||||||
return oldfile, newfile
|
|
||||||
|
|
||||||
|
|
||||||
def merger(oldfile:str, newfile:str) -> str:
|
|
||||||
'''
|
|
||||||
Seeks out functions and objects in new and old files.
|
|
||||||
Replaces old objects with new ones if they exist.
|
|
||||||
'''
|
|
||||||
object_regex = r'''(?:\n[@\w]+[\s\w]*[='" "\.\w]*)|(?:\s{4}global_vars\["\w+"\])'''
|
|
||||||
ssz_object_regex = r'''(?:\w+|\s{4}global_vars\["\w+"\]) = SSZType\(\{\n'''
|
|
||||||
old_objects = split_and_label(object_regex, oldfile)
|
|
||||||
new_objects = split_and_label(object_regex, newfile)
|
|
||||||
for new_item in new_objects:
|
|
||||||
found_old = False
|
|
||||||
for index, old_item in enumerate(old_objects):
|
|
||||||
if old_item[0] == new_item[0]:
|
|
||||||
ssz_object_match = re.match(ssz_object_regex, new_item[1])
|
|
||||||
if ssz_object_match is not None:
|
|
||||||
new_item[1] = re.sub(ssz_object_regex, '', new_item[1])
|
|
||||||
old_item[1] = re.sub(r'\n\w*\}\)', '', old_item[1])
|
|
||||||
old_item[1] += new_item[1]
|
|
||||||
else:
|
|
||||||
old_item[1] = new_item[1]
|
|
||||||
found_old = True
|
|
||||||
old_objects[index] = old_item
|
|
||||||
break
|
|
||||||
if not found_old:
|
|
||||||
old_objects.append(new_item)
|
|
||||||
return ''.join(elem for elem in map(lambda x: x[1], old_objects))
|
|
||||||
|
|
||||||
|
|
||||||
def objects_to_spec(functions, constants, ssz_objects):
|
|
||||||
functions_spec = '\n\n'.join(functions.values())
|
functions_spec = '\n\n'.join(functions.values())
|
||||||
constants_spec = '\n'.join(map(lambda x: '%s = %s' % (x, constants[x]),constants))
|
constants_spec = '\n'.join(map(lambda x: '%s = %s' % (x, constants[x]),constants))
|
||||||
ssz_objects_instantiation_spec = '\n'.join(map(lambda x: '%s = SSZType(%s)' % (x, ssz_objects[x][:-1]), ssz_objects))
|
ssz_objects_instantiation_spec = '\n'.join(map(lambda x: '%s = SSZType(%s)' % (x, ssz_objects[x][:-1]), ssz_objects))
|
||||||
@ -160,7 +119,7 @@ def objects_to_spec(functions, constants, ssz_objects):
|
|||||||
+ ssz_objects_reinitialization_spec
|
+ ssz_objects_reinitialization_spec
|
||||||
)
|
)
|
||||||
return (
|
return (
|
||||||
IMPORTS
|
imports
|
||||||
+ '\n' + NEW_TYPE_DEFINITIONS
|
+ '\n' + NEW_TYPE_DEFINITIONS
|
||||||
+ '\n' + constants_spec
|
+ '\n' + constants_spec
|
||||||
+ '\n' + ssz_objects_instantiation_spec
|
+ '\n' + ssz_objects_instantiation_spec
|
||||||
@ -194,7 +153,7 @@ def combine_ssz_objects(old_objects, new_objects):
|
|||||||
|
|
||||||
def build_phase0_spec(sourcefile, outfile=None):
|
def build_phase0_spec(sourcefile, outfile=None):
|
||||||
functions, constants, ssz_objects = function_puller.get_spec(sourcefile)
|
functions, constants, ssz_objects = function_puller.get_spec(sourcefile)
|
||||||
spec = objects_to_spec(functions, constants, ssz_objects)
|
spec = objects_to_spec(functions, constants, ssz_objects, PHASE0_IMPORTS)
|
||||||
if outfile is not None:
|
if outfile is not None:
|
||||||
with open(outfile, 'w') as out:
|
with open(outfile, 'w') as out:
|
||||||
out.write(spec)
|
out.write(spec)
|
||||||
@ -208,7 +167,7 @@ def build_phase1_spec(phase0_sourcefile, phase1_sourcefile, outfile=None):
|
|||||||
functions = combine_functions(phase0_functions, phase1_functions)
|
functions = combine_functions(phase0_functions, phase1_functions)
|
||||||
constants = combine_constants(phase0_constants, phase1_constants)
|
constants = combine_constants(phase0_constants, phase1_constants)
|
||||||
ssz_objects = combine_functions(phase0_ssz_objects, phase1_ssz_objects)
|
ssz_objects = combine_functions(phase0_ssz_objects, phase1_ssz_objects)
|
||||||
spec = objects_to_spec(functions, constants, ssz_objects)
|
spec = objects_to_spec(functions, constants, ssz_objects, PHASE1_IMPORTS)
|
||||||
if outfile is not None:
|
if outfile is not None:
|
||||||
with open(outfile, 'w') as out:
|
with open(outfile, 'w') as out:
|
||||||
out.write(spec)
|
out.write(spec)
|
||||||
|
@ -15,7 +15,6 @@ def get_spec(file_name: str):
|
|||||||
constants = {}
|
constants = {}
|
||||||
ssz_objects = defaultdict(str)
|
ssz_objects = defaultdict(str)
|
||||||
function_matcher = re.compile(FUNCTION_REGEX)
|
function_matcher = re.compile(FUNCTION_REGEX)
|
||||||
# type_defs = []
|
|
||||||
for linenum, line in enumerate(open(file_name).readlines()):
|
for linenum, line in enumerate(open(file_name).readlines()):
|
||||||
line = line.rstrip()
|
line = line.rstrip()
|
||||||
if pulling_from is None and len(line) > 0 and line[0] == '#' and line[-1] == '`':
|
if pulling_from is None and len(line) > 0 and line[0] == '#' and line[-1] == '`':
|
||||||
@ -26,9 +25,6 @@ def get_spec(file_name: str):
|
|||||||
elif line[:3] == '```':
|
elif line[:3] == '```':
|
||||||
pulling_from = None
|
pulling_from = None
|
||||||
else:
|
else:
|
||||||
# # Handle SSZObjects
|
|
||||||
# if pulling_from == linenum and line == '{':
|
|
||||||
# code_lines.append('%s = SSZType({' % current_name)
|
|
||||||
# Handle function definitions
|
# Handle function definitions
|
||||||
if pulling_from is not None:
|
if pulling_from is not None:
|
||||||
match = function_matcher.match(line)
|
match = function_matcher.match(line)
|
||||||
|
@ -31,7 +31,7 @@
|
|||||||
- [`BeaconState`](#beaconstate)
|
- [`BeaconState`](#beaconstate)
|
||||||
- [`BeaconBlockBody`](#beaconblockbody)
|
- [`BeaconBlockBody`](#beaconblockbody)
|
||||||
- [Helpers](#helpers)
|
- [Helpers](#helpers)
|
||||||
- [`typeof`](#typeof)
|
- [`type_of`](#type_of)
|
||||||
- [`empty`](#empty)
|
- [`empty`](#empty)
|
||||||
- [`get_crosslink_chunk_count`](#get_crosslink_chunk_count)
|
- [`get_crosslink_chunk_count`](#get_crosslink_chunk_count)
|
||||||
- [`get_custody_chunk_bit`](#get_custody_chunk_bit)
|
- [`get_custody_chunk_bit`](#get_custody_chunk_bit)
|
||||||
@ -259,13 +259,13 @@ Add the following fields to the end of the specified container objects. Fields w
|
|||||||
|
|
||||||
## Helpers
|
## Helpers
|
||||||
|
|
||||||
### `typeof`
|
### `type_of`
|
||||||
|
|
||||||
The `typeof` function accepts and SSZ object as a single input and returns the corresponding SSZ type.
|
The `type_of` function accepts an SSZ object as a single input and returns the corresponding SSZ type.
|
||||||
|
|
||||||
### `empty`
|
### `empty`
|
||||||
|
|
||||||
The `empty` function accepts and SSZ type as input and returns an object of that type with all fields initialized to default values.
|
The `empty` function accepts an SSZ type as input and returns an object of that type with all fields initialized to default values.
|
||||||
|
|
||||||
### `get_crosslink_chunk_count`
|
### `get_crosslink_chunk_count`
|
||||||
|
|
||||||
@ -327,7 +327,7 @@ def get_validators_custody_reveal_period(state: BeaconState,
|
|||||||
```python
|
```python
|
||||||
def replace_empty_or_append(list: List[Any], new_element: Any) -> int:
|
def replace_empty_or_append(list: List[Any], new_element: Any) -> int:
|
||||||
for i in range(len(list)):
|
for i in range(len(list)):
|
||||||
if list[i] == empty(typeof(new_element)):
|
if list[i] == empty(type_of(new_element)):
|
||||||
list[i] = new_element
|
list[i] = new_element
|
||||||
return i
|
return i
|
||||||
list.append(new_element)
|
list.append(new_element)
|
||||||
@ -492,7 +492,7 @@ def process_chunk_challenge(state: BeaconState,
|
|||||||
record.chunk_index != challenge.chunk_index
|
record.chunk_index != challenge.chunk_index
|
||||||
)
|
)
|
||||||
# Verify depth
|
# Verify depth
|
||||||
depth = math.log2(next_power_of_two(get_custody_chunk_count(challenge.attestation)))
|
depth = math.ceil(math.log2(get_custody_chunk_count(challenge.attestation)))
|
||||||
assert challenge.chunk_index < 2**depth
|
assert challenge.chunk_index < 2**depth
|
||||||
# Add new chunk challenge record
|
# Add new chunk challenge record
|
||||||
new_record = CustodyChunkChallengeRecord(
|
new_record = CustodyChunkChallengeRecord(
|
||||||
@ -580,7 +580,7 @@ def process_bit_challenge(state: BeaconState,
|
|||||||
deadline=get_current_epoch(state) + CUSTODY_RESPONSE_DEADLINE,
|
deadline=get_current_epoch(state) + CUSTODY_RESPONSE_DEADLINE,
|
||||||
data_root=challenge.attestation.data.crosslink.data_root,
|
data_root=challenge.attestation.data.crosslink.data_root,
|
||||||
chunk_count=chunk_count,
|
chunk_count=chunk_count,
|
||||||
chunk_bits_merkle_root=merkle_root(pad_to_power_of_2((challenge.chunk_bits))),
|
chunk_bits_merkle_root=hash_tree_root(challenge.chunk_bits),
|
||||||
responder_key=challenge.responder_key,
|
responder_key=challenge.responder_key,
|
||||||
)
|
)
|
||||||
replace_empty_or_append(state.custody_bit_challenge_records, new_record)
|
replace_empty_or_append(state.custody_bit_challenge_records, new_record)
|
||||||
@ -646,7 +646,7 @@ def process_bit_challenge_response(state: BeaconState,
|
|||||||
assert verify_merkle_branch(
|
assert verify_merkle_branch(
|
||||||
leaf=hash_tree_root(response.chunk),
|
leaf=hash_tree_root(response.chunk),
|
||||||
branch=response.data_branch,
|
branch=response.data_branch,
|
||||||
depth=math.log2(next_power_of_two(challenge.chunk_count)),
|
depth=math.ceil(math.log2(challenge.chunk_count)),
|
||||||
index=response.chunk_index,
|
index=response.chunk_index,
|
||||||
root=challenge.data_root,
|
root=challenge.data_root,
|
||||||
)
|
)
|
||||||
@ -654,13 +654,13 @@ def process_bit_challenge_response(state: BeaconState,
|
|||||||
assert verify_merkle_branch(
|
assert verify_merkle_branch(
|
||||||
leaf=response.chunk_bits_leaf,
|
leaf=response.chunk_bits_leaf,
|
||||||
branch=response.chunk_bits_branch,
|
branch=response.chunk_bits_branch,
|
||||||
depth=math.log2(next_power_of_two(challenge.chunk_count) // 256),
|
depth=math.ceil(math.log2(challenge.chunk_count)) >> 8,
|
||||||
index=response.chunk_index // 256,
|
index=response.chunk_index // 256,
|
||||||
root=challenge.chunk_bits_merkle_root
|
root=challenge.chunk_bits_merkle_root
|
||||||
)
|
)
|
||||||
# Verify the chunk bit does not match the challenge chunk bit
|
# Verify the chunk bit does not match the challenge chunk bit
|
||||||
assert (get_custody_chunk_bit(challenge.responder_key, response.chunk)
|
assert (get_custody_chunk_bit(challenge.responder_key, response.chunk)
|
||||||
!= get_bitfield_bit(challenge.chunk_bits_leaf, response.chunk_index % 256))
|
!= get_bitfield_bit(challenge.chunk_bits_leaf, response.chunk_index % 256))
|
||||||
# Clear the challenge
|
# Clear the challenge
|
||||||
records = state.custody_bit_challenge_records
|
records = state.custody_bit_challenge_records
|
||||||
records[records.index(challenge)] = CustodyBitChallengeRecord()
|
records[records.index(challenge)] = CustodyBitChallengeRecord()
|
||||||
|
@ -58,6 +58,16 @@ class Vector():
|
|||||||
return self.length
|
return self.length
|
||||||
|
|
||||||
|
|
||||||
|
def type_of(obj):
|
||||||
|
return obj.__class__
|
||||||
|
|
||||||
|
|
||||||
|
def empty(obj):
|
||||||
|
for field in obj.fields:
|
||||||
|
field = get_zero_value(field)
|
||||||
|
return obj
|
||||||
|
|
||||||
|
|
||||||
def is_basic(typ):
|
def is_basic(typ):
|
||||||
# if not a string, it is a complex, and cannot be basic
|
# if not a string, it is a complex, and cannot be basic
|
||||||
if not isinstance(typ, str):
|
if not isinstance(typ, str):
|
||||||
|
Loading…
x
Reference in New Issue
Block a user