openarray -> openArray

This commit is contained in:
Zahary Karadjov 2020-10-28 20:35:31 +02:00 committed by zah
parent 18639c3eff
commit 14b2d4324d
26 changed files with 78 additions and 78 deletions

View File

@ -11,7 +11,7 @@ import
type
DbSeq*[T] = object
insertStmt: SqliteStmt[openarray[byte], void]
insertStmt: SqliteStmt[openArray[byte], void]
selectStmt: SqliteStmt[int64, seq[byte]]
recordCount: int64
@ -132,7 +132,7 @@ proc init*[T](Seq: type DbSeq[T], db: SqStoreRef, name: string): Seq =
let
insertStmt = db.prepareStmt(
"INSERT INTO " & name & "(value) VALUES (?);",
openarray[byte], void).expect("this is a valid statement")
openArray[byte], void).expect("this is a valid statement")
selectStmt = db.prepareStmt(
"SELECT value FROM " & name & " WHERE id = ?;",

View File

@ -61,7 +61,7 @@ const
MaxEmptySlotCount* = uint64(10*60) div SECONDS_PER_SLOT
# TODO stew/sequtils2
template findIt*(s: openarray, predicate: untyped): int =
template findIt*(s: openArray, predicate: untyped): int =
var res = -1
for i, it {.inject.} in s:
if predicate:

View File

@ -86,7 +86,7 @@ proc new*(T: type Eth2DiscoveryProtocol,
conf: BeaconNodeConf,
ip: Option[ValidIpAddress], tcpPort, udpPort: Port,
pk: PrivateKey,
enrFields: openarray[(string, seq[byte])], rng: ref BrHmacDrbgContext):
enrFields: openArray[(string, seq[byte])], rng: ref BrHmacDrbgContext):
T {.raises: [Exception, Defect].} =
# TODO
# Implement more configuration options:

View File

@ -33,12 +33,12 @@ export proto_array.len
# ----------------------------------------------------------------------
func compute_deltas(
deltas: var openarray[Delta],
deltas: var openArray[Delta],
indices: Table[Eth2Digest, Index],
indices_offset: Index,
votes: var openArray[VoteTracker],
old_balances: openarray[Gwei],
new_balances: openarray[Gwei]
old_balances: openArray[Gwei],
new_balances: openArray[Gwei]
): FcResult[void]
# TODO: raises [Defect] - once https://github.com/nim-lang/Nim/issues/12862 is fixed
# https://github.com/status-im/nimbus-eth2/pull/865#pullrequestreview-389117232
@ -377,12 +377,12 @@ func prune*(self: var ForkChoice): FcResult[void] =
self.backend.prune(self.checkpoints.finalized.root)
func compute_deltas(
deltas: var openarray[Delta],
deltas: var openArray[Delta],
indices: Table[Eth2Digest, Index],
indices_offset: Index,
votes: var openArray[VoteTracker],
old_balances: openarray[Gwei],
new_balances: openarray[Gwei]
old_balances: openArray[Gwei],
new_balances: openArray[Gwei]
): FcResult[void] =
## Update `deltas`
## between old and new balances

View File

@ -104,7 +104,7 @@ func init*(T: type ProtoArray,
func apply_score_changes*(
self: var ProtoArray,
deltas: var openarray[Delta],
deltas: var openArray[Delta],
justified_epoch: Epoch,
finalized_epoch: Epoch
): FcResult[void] =

View File

@ -23,7 +23,7 @@ import
const depositContractLimit* = Limit(1'u64 shl (DEPOSIT_CONTRACT_TREE_DEPTH - 1'u64))
func attachMerkleProofs*(deposits: var openarray[Deposit]) =
func attachMerkleProofs*(deposits: var openArray[Deposit]) =
let depositsRoots = mapIt(deposits, hash_tree_root(it.data))
var incrementalMerkleProofs = createMerkleizer(depositContractLimit)
@ -32,7 +32,7 @@ func attachMerkleProofs*(deposits: var openarray[Deposit]) =
incrementalMerkleProofs.addChunkAndGenMerkleProof(depositsRoots[i], deposits[i].proof)
deposits[i].proof[32].data[0..7] = toBytesLE uint64(i + 1)
template getProof*(proofs: seq[Eth2Digest], idxParam: int): openarray[Eth2Digest] =
template getProof*(proofs: seq[Eth2Digest], idxParam: int): openArray[Eth2Digest] =
let
idx = idxParam
startIdx = idx * DEPOSIT_CONTRACT_TREE_DEPTH

View File

@ -565,7 +565,7 @@ proc release*[A, B](pool: PeerPool[A, B], peer: A) =
dec(pool.acqOutPeersCount)
pool.fireNotEmptyEvent(item[])
proc release*[A, B](pool: PeerPool[A, B], peers: openarray[A]) {.inline.} =
proc release*[A, B](pool: PeerPool[A, B], peers: openArray[A]) {.inline.} =
## Release array of peers ``peers`` back to PeerPool ``pool``.
for item in peers:
pool.release(item)

View File

@ -15,7 +15,7 @@ import
../../nbench/bench_lab
# https://github.com/ethereum/eth2.0-specs/blob/v1.0.0-rc.0/specs/phase0/beacon-chain.md#is_valid_merkle_branch
func is_valid_merkle_branch*(leaf: Eth2Digest, branch: openarray[Eth2Digest],
func is_valid_merkle_branch*(leaf: Eth2Digest, branch: openArray[Eth2Digest],
depth: int, index: uint64,
root: Eth2Digest): bool {.nbench.}=
## Check if ``leaf`` at ``index`` verifies against the Merkle ``root`` and

View File

@ -81,7 +81,7 @@ func get_randao_mix*(state: BeaconState,
## Returns the randao mix at a recent ``epoch``.
state.randao_mixes[epoch mod EPOCHS_PER_HISTORICAL_VECTOR]
func bytes_to_uint64*(data: openarray[byte]): uint64 =
func bytes_to_uint64*(data: openArray[byte]): uint64 =
doAssert data.len == 8
# Little-endian data representation

View File

@ -303,8 +303,8 @@ template add(m: var Mnemonic, s: cstring) =
proc generateMnemonic*(
rng: var BrHmacDrbgContext,
words: openarray[cstring] = englishWords,
entropyParam: openarray[byte] = @[]): Mnemonic =
words: openArray[cstring] = englishWords,
entropyParam: openArray[byte] = @[]): Mnemonic =
## Generates a valid BIP-0039 mnenomic:
## https://github.com/bitcoin/bips/blob/master/bip-0039.mediawiki#generating-the-mnemonic
var entropy: seq[byte]
@ -413,7 +413,7 @@ proc keyFromPath*(mnemonic: Mnemonic,
path: KeyPath): ValidatorPrivKey =
deriveChildKey(deriveMasterKey(mnemonic, password), path)
proc shaChecksum(key, cipher: openarray[byte]): Sha256Digest =
proc shaChecksum(key, cipher: openArray[byte]): Sha256Digest =
var ctx: sha256
ctx.init()
ctx.update(key)
@ -421,7 +421,7 @@ proc shaChecksum(key, cipher: openarray[byte]): Sha256Digest =
result = ctx.finish()
ctx.clear()
proc writeJsonHexString(s: OutputStream, data: openarray[byte])
proc writeJsonHexString(s: OutputStream, data: openArray[byte])
{.raises: [IOError, Defect].} =
s.write '"'
s.write ncrutils.toHex(data, {HexFlags.LowerCase})
@ -624,10 +624,10 @@ proc decryptNetKeystore*(nkeystore: JsonString,
proc createCryptoField(kdfKind: KdfKind,
rng: var BrHmacDrbgContext,
secret: openarray[byte],
secret: openArray[byte],
password = KeystorePass.init "",
salt: openarray[byte] = @[],
iv: openarray[byte] = @[]): Crypto =
salt: openArray[byte] = @[],
iv: openArray[byte] = @[]): Crypto =
type AES = aes128
let kdfSalt =
@ -685,8 +685,8 @@ proc createNetKeystore*(kdfKind: KdfKind,
privKey: lcrypto.PrivateKey,
password = KeystorePass.init "",
description = "",
salt: openarray[byte] = @[],
iv: openarray[byte] = @[]): NetKeystore =
salt: openArray[byte] = @[],
iv: openArray[byte] = @[]): NetKeystore =
let
secret = privKey.getBytes().get()
cryptoField = createCryptoField(kdfKind, rng, secret, password, salt, iv)
@ -707,8 +707,8 @@ proc createKeystore*(kdfKind: KdfKind,
password = KeystorePass.init "",
path = KeyPath "",
description = "",
salt: openarray[byte] = @[],
iv: openarray[byte] = @[]): Keystore =
salt: openArray[byte] = @[],
iv: openArray[byte] = @[]): Keystore =
let
secret = privKey.toRaw[^32..^1]
cryptoField = createCryptoField(kdfKind, rng, secret, password, salt, iv)
@ -727,8 +727,8 @@ proc createWallet*(kdfKind: KdfKind,
rng: var BrHmacDrbgContext,
seed: KeySeed,
name = WalletName "",
salt: openarray[byte] = @[],
iv: openarray[byte] = @[],
salt: openArray[byte] = @[],
iv: openArray[byte] = @[],
password = KeystorePass.init "",
nextAccount = none(Natural),
pretty = true): Wallet =

View File

@ -123,7 +123,7 @@ proc getStabilitySubnetLength*(): uint64 =
rand(EPOCHS_PER_RANDOM_SUBNET_SUBSCRIPTION.int).uint64
proc get_attestation_subnet_changes*(
state: BeaconState, attachedValidators: openarray[ValidatorIndex],
state: BeaconState, attachedValidators: openArray[ValidatorIndex],
prevAttestationSubnets: AttestationSubnets, epoch: Epoch):
tuple[a: AttestationSubnets, b: set[uint8], c: set[uint8]] =
static: doAssert ATTESTATION_SUBNET_COUNT == 64 # Fits in a set[uint8]

View File

@ -23,7 +23,7 @@ type
BitArray*[bits: static int] = object
bytes*: array[(bits + 7) div 8, byte]
func bitsLen*(bytes: openarray[byte]): int =
func bitsLen*(bytes: openArray[byte]): int =
let
bytesCount = bytes.len
lastByte = bytes[bytesCount - 1]
@ -63,14 +63,14 @@ func toBytesLE(x: uint): array[sizeof(x), byte] =
else:
static: doAssert false, "requires a 32-bit or 64-bit platform"
func loadLEBytes(WordType: type, bytes: openarray[byte]): WordType =
func loadLEBytes(WordType: type, bytes: openArray[byte]): WordType =
# TODO: this is a temporary proc until the endians API is improved
var shift = 0
for b in bytes:
result = result or (WordType(b) shl shift)
shift += 8
func storeLEBytes(value: SomeUnsignedInt, dst: var openarray[byte]) =
func storeLEBytes(value: SomeUnsignedInt, dst: var openArray[byte]) =
doAssert dst.len <= sizeof(value)
let bytesLE = toBytesLE(value)
copyMem(addr dst[0], unsafeAddr bytesLE[0], dst.len)
@ -213,8 +213,8 @@ template clearBit*(a: var BitArray, pos: Natural) =
# TODO: Submit this to the standard library as `cmp`
# At the moment, it doesn't work quite well because Nim selects
# the generic cmp[T] from the system module instead of choosing
# the openarray overload
func compareArrays[T](a, b: openarray[T]): int =
# the openArray overload
func compareArrays[T](a, b: openArray[T]): int =
result = cmp(a.len, b.len)
if result != 0: return

View File

@ -22,7 +22,7 @@ proc setOutputSize(list: var List, length: int) {.raisesssz.} =
# fromSszBytes copies the wire representation to a Nim variable,
# assuming there's enough data in the buffer
func fromSszBytes*(T: type UintN, data: openarray[byte]): T {.raisesssz.} =
func fromSszBytes*(T: type UintN, data: openArray[byte]): T {.raisesssz.} =
## Convert directly to bytes the size of the int. (e.g. ``uint16 = 2 bytes``)
## All integers are serialized as **little endian**.
if data.len != sizeof(result):
@ -30,46 +30,46 @@ func fromSszBytes*(T: type UintN, data: openarray[byte]): T {.raisesssz.} =
T.fromBytesLE(data)
func fromSszBytes*(T: type bool, data: openarray[byte]): T {.raisesssz.} =
func fromSszBytes*(T: type bool, data: openArray[byte]): T {.raisesssz.} =
# Strict: only allow 0 or 1
if data.len != 1 or byte(data[0]) > byte(1):
raise newException(MalformedSszError, "invalid boolean value")
data[0] == 1
func fromSszBytes*(T: type Eth2Digest, data: openarray[byte]): T {.raisesssz.} =
func fromSszBytes*(T: type Eth2Digest, data: openArray[byte]): T {.raisesssz.} =
if data.len != sizeof(result.data):
raiseIncorrectSize T
copyMem(result.data.addr, unsafeAddr data[0], sizeof(result.data))
func fromSszBytes*(T: type GraffitiBytes, data: openarray[byte]): T {.raisesssz.} =
func fromSszBytes*(T: type GraffitiBytes, data: openArray[byte]): T {.raisesssz.} =
if data.len != sizeof(result):
raiseIncorrectSize T
copyMem(result.addr, unsafeAddr data[0], sizeof(result))
template fromSszBytes*(T: type Slot, bytes: openarray[byte]): Slot =
template fromSszBytes*(T: type Slot, bytes: openArray[byte]): Slot =
Slot fromSszBytes(uint64, bytes)
template fromSszBytes*(T: type Epoch, bytes: openarray[byte]): Epoch =
template fromSszBytes*(T: type Epoch, bytes: openArray[byte]): Epoch =
Epoch fromSszBytes(uint64, bytes)
func fromSszBytes*(T: type ForkDigest, bytes: openarray[byte]): T {.raisesssz.} =
func fromSszBytes*(T: type ForkDigest, bytes: openArray[byte]): T {.raisesssz.} =
if bytes.len != sizeof(result):
raiseIncorrectSize T
copyMem(result.addr, unsafeAddr bytes[0], sizeof(result))
func fromSszBytes*(T: type Version, bytes: openarray[byte]): T {.raisesssz.} =
func fromSszBytes*(T: type Version, bytes: openArray[byte]): T {.raisesssz.} =
if bytes.len != sizeof(result):
raiseIncorrectSize T
copyMem(result.addr, unsafeAddr bytes[0], sizeof(result))
template fromSszBytes*(T: type BitSeq, bytes: openarray[byte]): auto =
template fromSszBytes*(T: type BitSeq, bytes: openArray[byte]): auto =
BitSeq @bytes
proc `[]`[T, U, V](s: openArray[T], x: HSlice[U, V]) {.error:
"Please don't use openarray's [] as it allocates a result sequence".}
"Please don't use openArray's [] as it allocates a result sequence".}
template checkForForbiddenBits(ResulType: type,
input: openarray[byte],
input: openArray[byte],
expectedBits: static int64) =
## This checks if the input contains any bits set above the maximum
## sized allowed. We only need to check the last byte to verify this:
@ -84,7 +84,7 @@ template checkForForbiddenBits(ResulType: type,
if (input[^1] and forbiddenBitsMask) != 0:
raiseIncorrectSize ResulType
func readSszValue*[T](input: openarray[byte],
func readSszValue*[T](input: openArray[byte],
val: var T, updateRoot: bool = true) {.raisesssz.} =
mixin fromSszBytes, toSszType

View File

@ -133,12 +133,12 @@ func navigatePath*(n: DynamicSszNavigator, path: string): DynamicSszNavigator {.
raises: [Defect, IOError, ValueError, MalformedSszError, SszSizeMismatchError] .} =
navigatePathImpl n, split(path, '/')
func navigatePath*(n: DynamicSszNavigator, path: openarray[string]): DynamicSszNavigator {.
func navigatePath*(n: DynamicSszNavigator, path: openArray[string]): DynamicSszNavigator {.
raises: [Defect, IOError, ValueError, MalformedSszError, SszSizeMismatchError] .} =
navigatePathImpl n, path
func init*(T: type DynamicSszNavigator,
bytes: openarray[byte], Navigated: type): T =
bytes: openArray[byte], Navigated: type): T =
T(m: MemRange(startAddr: unsafeAddr bytes[0], length: bytes.len),
typ: typeInfo(Navigated))

View File

@ -34,7 +34,7 @@ type
totalChunks: uint64
topIndex: int
template chunks*(m: SszChunksMerkleizer): openarray[Eth2Digest] =
template chunks*(m: SszChunksMerkleizer): openArray[Eth2Digest] =
m.combinedChunks.toOpenArray(0, m.topIndex)
func digest(a, b: openArray[byte]): Eth2Digest =
@ -56,7 +56,7 @@ func digest(a, b, c: openArray[byte]): Eth2Digest =
h.update c
trs "HASH RESULT ", result
func mergeBranches(existing: Eth2Digest, newData: openarray[byte]): Eth2Digest =
func mergeBranches(existing: Eth2Digest, newData: openArray[byte]): Eth2Digest =
trs "MERGING BRANCHES OPEN ARRAY"
let paddingBytes = bytesPerChunk - newData.len
@ -77,7 +77,7 @@ func computeZeroHashes: array[sizeof(Limit) * 8, Eth2Digest] =
const zeroHashes* = computeZeroHashes()
func addChunk*(merkleizer: var SszChunksMerkleizer, data: openarray[byte]) =
func addChunk*(merkleizer: var SszChunksMerkleizer, data: openArray[byte]) =
doAssert data.len > 0 and data.len <= bytesPerChunk
if getBitLE(merkleizer.totalChunks, 0):
@ -109,7 +109,7 @@ template isOdd(x: SomeNumber): bool =
func addChunkAndGenMerkleProof*(merkleizer: var SszChunksMerkleizer,
hash: Eth2Digest,
outProof: var openarray[Eth2Digest]) =
outProof: var openArray[Eth2Digest]) =
var
hashWrittenToMerkleizer = false
hash = hash
@ -146,7 +146,7 @@ func completeStartedChunk(merkleizer: var SszChunksMerkleizer,
break
func addChunksAndGenMerkleProofs*(merkleizer: var SszChunksMerkleizer,
chunks: openarray[Eth2Digest]): seq[Eth2Digest] =
chunks: openArray[Eth2Digest]): seq[Eth2Digest] =
doAssert chunks.len > 0 and merkleizer.topIndex > 0
let proofHeight = merkleizer.topIndex + 1
@ -300,7 +300,7 @@ proc init*(S: type SszMerkleizer): S =
totalChunks: 0)
proc init*(S: type SszMerkleizer,
combinedChunks: openarray[Eth2Digest],
combinedChunks: openArray[Eth2Digest],
totalChunks: uint64): S =
new result.combinedChunks
result.combinedChunks[][0 ..< combinedChunks.len] = combinedChunks
@ -321,10 +321,10 @@ proc clone*[L: static[Limit]](cloned: SszMerkleizer[L]): SszMerkleizer[L] =
template addChunksAndGenMerkleProofs*(
merkleizer: var SszMerkleizer,
chunks: openarray[Eth2Digest]): seq[Eth2Digest] =
chunks: openArray[Eth2Digest]): seq[Eth2Digest] =
addChunksAndGenMerkleProofs(merkleizer.m, chunks)
template addChunk*(merkleizer: var SszMerkleizer, data: openarray[byte]) =
template addChunk*(merkleizer: var SszMerkleizer, data: openArray[byte]) =
addChunk(merkleizer.m, data)
template totalChunks*(merkleizer: SszMerkleizer): uint64 =
@ -409,7 +409,7 @@ template writeBytesLE(chunk: var array[bytesPerChunk, byte], atParam: int,
chunk[at ..< at + sizeof(val)] = toBytesLE(val)
func chunkedHashTreeRootForBasicTypes[T](merkleizer: var SszChunksMerkleizer,
arr: openarray[T]): Eth2Digest =
arr: openArray[T]): Eth2Digest =
static:
doAssert T is BasicType
@ -687,7 +687,7 @@ func hash_tree_root*(x: auto): Eth2Digest {.raises: [Defect].} =
trs "HASH TREE ROOT FOR ", name(type x), " = ", "0x", $result
iterator hash_tree_roots_prefix*[T](lst: openarray[T], limit: static Limit): Eth2Digest =
iterator hash_tree_roots_prefix*[T](lst: openArray[T], limit: static Limit): Eth2Digest =
# This is a particular type's instantiation of a general fold, reduce,
# accumulation, prefix sums, etc family of operations. As long as that
# Eth1 deposit case is the only notable example -- the usual uses of a

View File

@ -15,7 +15,7 @@ type
SszNavigator*[T] = object
m: MemRange
func sszMount*(data: openarray[byte], T: type): SszNavigator[T] =
func sszMount*(data: openArray[byte], T: type): SszNavigator[T] =
let startAddr = unsafeAddr data[0]
SszNavigator[T](m: MemRange(startAddr: startAddr, length: data.len))

View File

@ -175,7 +175,7 @@ proc writeValue*(w: var SszWriter, x: auto) {.gcsafe, raises: [Defect, IOError].
func sszSize*(value: auto): int {.gcsafe, raises: [Defect].}
func sszSizeForVarSizeList[T](value: openarray[T]): int =
func sszSizeForVarSizeList[T](value: openArray[T]): int =
result = len(value) * offsetSize
for elem in value:
result += sszSize(toSszType elem)

View File

@ -141,7 +141,7 @@ proc validate*[T](sq: SyncQueue[T],
return await sblock.resfut
proc getShortMap*[T](req: SyncRequest[T],
data: openarray[SignedBeaconBlock]): string =
data: openArray[SignedBeaconBlock]): string =
## Returns all slot numbers in ``data`` as placement map.
var res = newStringOfCap(req.count)
var slider = req.slot
@ -169,7 +169,7 @@ proc cmp*[T](a, b: SyncRequest[T]): int =
result = cmp(uint64(a.slot), uint64(b.slot))
proc checkResponse*[T](req: SyncRequest[T],
data: openarray[SignedBeaconBlock]): bool =
data: openArray[SignedBeaconBlock]): bool =
if len(data) == 0:
# Impossible to verify empty response.
return true
@ -199,7 +199,7 @@ proc checkResponse*[T](req: SyncRequest[T],
return false
proc getFullMap*[T](req: SyncRequest[T],
data: openarray[SignedBeaconBlock]): string =
data: openArray[SignedBeaconBlock]): string =
# Returns all slot numbers in ``data`` as comma-delimeted string.
result = mapIt(data, $it.message.slot).join(", ")

View File

@ -263,14 +263,14 @@ func subkey(kind: static SlashingKeyKind, valIndex: uint32): array[5, byte] =
result[1..<5] = toBytesBE(valIndex)
result[0] = byte ord(kind)
proc put(db: SlashingProtectionDB, key: openarray[byte], v: auto) =
proc put(db: SlashingProtectionDB, key: openArray[byte], v: auto) =
db.backend.put(
key,
SSZ.encode(v)
).expect("working database")
proc get(db: SlashingProtectionDB,
key: openarray[byte],
key: openArray[byte],
T: typedesc): Opt[T] =
const ExpectedNodeSszSize = block:

View File

@ -140,8 +140,8 @@ proc main(nb_samples: Natural) =
# TODO: update with IETF API (Eth2 v0.11.1)
# func fastAggregateVerify*[T: byte|char](
# publicKeys: openarray[PublicKey],
# message: openarray[T],
# publicKeys: openArray[PublicKey],
# message: openArray[T],
# signature: Signature # Aggregated signature
# ): bool

View File

@ -44,7 +44,7 @@ var
sort(validators)
proc findOrDefault[K, V](tupleList: openarray[(K, V)], key: K, default: V): V =
proc findOrDefault[K, V](tupleList: openArray[(K, V)], key: K, default: V): V =
for t in tupleList:
if t[0] == key:
return t[1]

View File

@ -9,7 +9,7 @@ a pointer + an environment that stores the captured state necessary
to execute the function.
The Nim compiler has a limited form of borrow checking and prevents
capturing mutable variable or openarray (pointer+length pair).
capturing mutable variable or openArray (pointer+length pair).
It otherwise copies the capture variables in case of objects with value semantics
or increment the reference count in case of ref object.

View File

@ -33,14 +33,14 @@ proc newKeyPair(rng: var BrHmacDrbgContext): BlsResult[tuple[pub: ValidatorPubKe
# this is being indexed inside "mock_deposits.nim" by a value up to `validatorCount`
# which is `num_validators` which is `MIN_GENESIS_ACTIVE_VALIDATOR_COUNT`
proc genMockPrivKeys(privkeys: var openarray[ValidatorPrivKey]) =
proc genMockPrivKeys(privkeys: var openArray[ValidatorPrivKey]) =
let rng = newRng()
for i in 0 ..< privkeys.len:
let pair = newKeyPair(rng[])[]
privkeys[i] = pair.priv
func genMockPubKeys(pubkeys: var openarray[ValidatorPubKey],
privkeys: openarray[ValidatorPrivKey]) =
func genMockPubKeys(pubkeys: var openArray[ValidatorPubKey],
privkeys: openArray[ValidatorPrivKey]) =
for i in 0 ..< privkeys.len:
pubkeys[i] = toPubKey(privkeys[i])

View File

@ -57,7 +57,7 @@ proc parseTest*(path: string, Format: typedesc[Json or SSZ], T: typedesc): T =
template readFileBytes*(path: string): seq[byte] =
cast[seq[byte]](readFile(path))
proc sszDecodeEntireInput*(input: openarray[byte], Decoded: type): Decoded =
proc sszDecodeEntireInput*(input: openArray[byte], Decoded: type): Decoded =
var stream = unsafeMemoryInput(input)
var reader = init(SszReader, stream)
reader.readValue(result)

View File

@ -86,7 +86,7 @@ proc getCheckpoints*(epoch: Epoch): tuple[c1, c2, c3, c4, c5: Checkpoint] =
proc putCheckpointsInBlockRoots*(
state: var BeaconState,
checkpoints: openarray[Checkpoint]) =
checkpoints: openArray[Checkpoint]) =
for c in checkpoints:
let idx = c.epoch.compute_start_slot_at_epoch() mod SLOTS_PER_HISTORICAL_ROOT
state.block_roots[idx] = c.root

View File

@ -21,7 +21,7 @@ type SparseMerkleTree[Depth: static int] = object
nnznodes*: array[Depth+1, seq[Eth2Digest]] # nodes that leads to non-zero leaves
func merkleTreeFromLeaves(
values: openarray[Eth2Digest],
values: openArray[Eth2Digest],
Depth: static[int] = DEPOSIT_CONTRACT_TREE_DEPTH
): SparseMerkleTree[Depth] =
## Depth should be the same as is_valid_merkle_branch
@ -167,7 +167,7 @@ proc testMerkleMinimal*(): bool =
doAssert testMerkleMinimal()
proc compareTreeVsMerkleizer(hashes: openarray[Eth2Digest], limit: static Limit) =
proc compareTreeVsMerkleizer(hashes: openArray[Eth2Digest], limit: static Limit) =
const treeHeight = binaryTreeHeight(limit)
let tree = merkleTreeFromLeaves(hashes, treeHeight)
@ -228,7 +228,7 @@ for prelude in [0, 1, 2, 5, 6, 12, 13, 16]:
testMultiProofsGeneration(prelude, proofs, followUpHashes, 128)
testMultiProofsGeneration(prelude, proofs, followUpHashes, 5000)
func attachMerkleProofsReferenceImpl(deposits: var openarray[Deposit]) =
func attachMerkleProofsReferenceImpl(deposits: var openArray[Deposit]) =
let
deposit_data_roots = mapIt(deposits, it.data.hash_tree_root)
merkle_tree = merkleTreeFromLeaves(deposit_data_roots)