Add seedhash computation + seed fixes
This commit is contained in:
parent
b8af327118
commit
237b01f62e
|
@ -5,7 +5,7 @@ import math, sequtils, algorithm,
|
|||
keccak_tiny
|
||||
|
||||
import ./private/[primes, casting, functional, intmath, concat]
|
||||
export toHex, hexToSeqBytesBE
|
||||
export toHex, hexToSeqBytesBE, toByteArrayBE
|
||||
|
||||
# TODO: Switching from default int to uint64
|
||||
# Note: array/seq indexing requires an Ordinal, uint64 are not.
|
||||
|
@ -64,7 +64,7 @@ proc get_cachesize_lut*(block_number: Natural): uint64 {.noSideEffect, inline.}
|
|||
# ###############################################################################
|
||||
# Cache generation
|
||||
|
||||
proc mkcache*(cache_size: int, seed: seq[byte]): seq[Hash[512]] {.noSideEffect.}=
|
||||
proc mkcache*(cache_size: int, seed: Hash[256]): seq[Hash[512]] {.noSideEffect.}=
|
||||
|
||||
# The starting cache size is a set of 524288 64-byte values
|
||||
|
||||
|
@ -72,7 +72,7 @@ proc mkcache*(cache_size: int, seed: seq[byte]): seq[Hash[512]] {.noSideEffect.}
|
|||
|
||||
# Sequentially produce the initial dataset
|
||||
result = newSeq[Hash[512]](n)
|
||||
result[0] = keccak512 seed
|
||||
result[0] = keccak512 seed.toByteArrayBE
|
||||
|
||||
for i in 1 ..< n:
|
||||
result[i] = keccak512 result[i-1].toU512
|
||||
|
@ -149,7 +149,7 @@ proc calc_dataset(full_size: Natural, cache: seq[Hash[512]]): seq[Hash[512]] {.n
|
|||
# ###############################################################################
|
||||
# Main loop
|
||||
|
||||
type HashimotoHash = tuple[mix_digest: array[4, uint32], result: Hash[256]]
|
||||
type HashimotoHash = tuple[mix_digest: array[8, uint32], value: Hash[256]]
|
||||
type DatasetLookup = proc(i: Natural): Hash[512] {.noSideEffect.}
|
||||
|
||||
proc initMix(s: U512): array[MIX_BYTES div HASH_BYTES * 512 div 32, uint32] {.noInit, noSideEffect,inline.}=
|
||||
|
@ -192,7 +192,7 @@ proc hashimoto(header: Hash[256],
|
|||
let idx = i*4
|
||||
result.mix_digest[i] = mix[idx].fnv(mix[idx+1]).fnv(mix[idx+2]).fnv(mix[idx+3])
|
||||
|
||||
result.result = keccak256 concat_hash(s, result.mix_digest)
|
||||
result.value = keccak256 concat_hash(s, result.mix_digest)
|
||||
|
||||
|
||||
proc hashimoto_light(full_size:Natural, cache: seq[Hash[512]],
|
||||
|
@ -214,3 +214,8 @@ proc hashimoto_full(full_size:Natural, dataset: seq[Hash[512]],
|
|||
full)
|
||||
|
||||
# ###############################################################################
|
||||
# Defining the seed hash
|
||||
|
||||
proc get_seedhash*(block_number: uint32): Hash[256] {.noSideEffect.} =
|
||||
for i in 0'u32 ..< block_number div EPOCH_LENGTH:
|
||||
result = keccak256 result.toByteArrayBE
|
|
@ -106,3 +106,6 @@ proc toByteArrayBE*[T: SomeInteger](num: T): ByteArrayBE[T.sizeof] {.noSideEffec
|
|||
|
||||
proc toByteArrayBE*(x: U512): ByteArrayBE[64] {.inline, noSideEffect, noInit.}=
|
||||
cast[type result](x)
|
||||
|
||||
proc toByteArrayBE*[N: static[int]](x: Hash[N]): ByteArrayBE[N div 8] {.inline, noSideEffect, noInit.}=
|
||||
cast[type result](x)
|
|
@ -20,7 +20,7 @@ proc concat_hash*(header: Hash[256], nonce: uint64): Hash[512] {.noSideEffect, i
|
|||
result = keccak512 cat
|
||||
|
||||
|
||||
proc concat_hash*(s: U512, cmix: array[4, uint32]): array[(512 + 4 * 32) div 8, byte] {.noSideEffect, inline, noInit.} =
|
||||
proc concat_hash*(s: U512, cmix: array[8, uint32]): array[(512 + 8 * 32) div 8, byte] {.noSideEffect, inline, noInit.} =
|
||||
|
||||
|
||||
# TODO: Do we need to convert cmix to Big Endian??
|
||||
|
@ -31,4 +31,4 @@ proc concat_hash*(s: U512, cmix: array[4, uint32]): array[(512 + 4 * 32) div 8,
|
|||
|
||||
for i, b in cmix:
|
||||
let offset = s.sizeof + i
|
||||
result[offset ..< offset + 4] = cast[array[4, byte]](b)
|
||||
result[offset ..< offset + 4] = cast[array[8, byte]](b)
|
||||
|
|
|
@ -1,13 +1,13 @@
|
|||
# Copyright (c) 2018 Status Research & Development GmbH
|
||||
# Distributed under the Apache v2 License (license terms are at http://www.apache.org/licenses/LICENSE-2.0).
|
||||
|
||||
import ../src/ethash, unittest, strutils,
|
||||
import ../src/ethash, unittest, strutils, algorithm,
|
||||
keccak_tiny
|
||||
|
||||
|
||||
suite "Base hashing algorithm":
|
||||
test "FNV hashing":
|
||||
|
||||
# https://github.com/ethereum/ethash/blob/f5f0a8b1962544d2b6f40df8e4b0d9a32faf8f8e/test/c/test.cpp#L104-L116
|
||||
let
|
||||
x = 1235'u32
|
||||
y = 9999999'u32
|
||||
|
@ -17,7 +17,7 @@ suite "Base hashing algorithm":
|
|||
|
||||
|
||||
test "Keccak-256 - Note: spec mentions sha3 but it is Keccak":
|
||||
|
||||
# https://github.com/ethereum/ethash/blob/f5f0a8b1962544d2b6f40df8e4b0d9a32faf8f8e/test/c/test.cpp#L118-L129
|
||||
let
|
||||
input = "~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~"
|
||||
expected = "2b5ddf6f4d21c23de216f44d5e4bdc68e044b71897837ea74c83908be7037cd7".toUpperASCII
|
||||
|
@ -28,7 +28,7 @@ suite "Base hashing algorithm":
|
|||
check: expected == actual2
|
||||
|
||||
test "Keccak-512 - Note: spec mentions sha3 but it is Keccak":
|
||||
|
||||
# https://github.com/ethereum/ethash/blob/f5f0a8b1962544d2b6f40df8e4b0d9a32faf8f8e/test/c/test.cpp#L131-L141
|
||||
let
|
||||
input = "~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~"
|
||||
expected = "0be8a1d334b4655fe58c6b38789f984bb13225684e86b20517a55ab2386c7b61c306f25e0627c60064cecd6d80cd67a82b3890bd1289b7ceb473aad56a359405".toUpperASCII
|
||||
|
@ -43,6 +43,7 @@ suite "Endianness (not implemented)":
|
|||
discard
|
||||
|
||||
suite "Genesis parameters":
|
||||
# https://github.com/ethereum/ethash/blob/f5f0a8b1962544d2b6f40df8e4b0d9a32faf8f8e/test/c/test.cpp#L155-L180
|
||||
let
|
||||
full_size = get_datasize(0)
|
||||
cache_size = get_cachesize(0)
|
||||
|
@ -62,6 +63,8 @@ suite "Genesis parameters":
|
|||
test "Cache size == 16776896":
|
||||
check: cache_size == 16776896
|
||||
|
||||
suite "Epoch change":
|
||||
# https://github.com/paritytech/parity/blob/05f47b635951f942b493747ca3bc71de90a95d5d/ethash/src/compute.rs#L319-L342
|
||||
test "Full dataset size at the change of epochs":
|
||||
check: get_data_size(EPOCH_LENGTH - 1) == 1073739904'u
|
||||
check: get_data_size(EPOCH_LENGTH) == 1082130304'u
|
||||
|
@ -91,3 +94,42 @@ suite "Genesis parameters":
|
|||
check: get_cache_size_lut(EPOCH_LENGTH * 2046) == 284950208'u
|
||||
check: get_cache_size_lut(EPOCH_LENGTH * 2047) == 285081536'u
|
||||
check: get_cache_size_lut(EPOCH_LENGTH * 2048 - 1) == 285081536'u
|
||||
|
||||
suite "Seed hash":
|
||||
# https://github.com/ethereum/ethash/blob/f5f0a8b1962544d2b6f40df8e4b0d9a32faf8f8e/test/python/test_pyethash.py#L97-L105
|
||||
test "Seed hash of block 0":
|
||||
var zeroHex = newString(64)
|
||||
zeroHex.fill('0')
|
||||
|
||||
check: $get_seedhash(0) == zeroHex
|
||||
|
||||
test "Seed hash of the next 2048 blocks":
|
||||
var expected: Hash[256]
|
||||
for i in countup(0'u32, 30000 * 2048, 30000):
|
||||
check: get_seedhash(i) == expected
|
||||
expected = keccak_256(expected.toByteArrayBE)
|
||||
|
||||
suite "[Not Implemented] Dagger hashimoto computation":
|
||||
test "Light compute":
|
||||
# Taken from https://github.com/paritytech/parity/blob/05f47b635951f942b493747ca3bc71de90a95d5d/ethash/src/compute.rs#L372-L394
|
||||
|
||||
let hash = cast[Hash[256]]([
|
||||
byte 0xf5, 0x7e, 0x6f, 0x3a, 0xcf, 0xc0, 0xdd, 0x4b, 0x5b, 0xf2, 0xbe, 0xe4, 0x0a, 0xb3,
|
||||
0x35, 0x8a, 0xa6, 0x87, 0x73, 0xa8, 0xd0, 0x9f, 0x5e, 0x59, 0x5e, 0xab, 0x55, 0x94,
|
||||
0x05, 0x52, 0x7d, 0x72
|
||||
])
|
||||
|
||||
let expected_mix_hash = cast[array[8, uint32]]([
|
||||
byte 0x1f, 0xff, 0x04, 0xce, 0xc9, 0x41, 0x73, 0xfd, 0x59, 0x1e, 0x3d, 0x89, 0x60, 0xce,
|
||||
0x6b, 0xdf, 0x8b, 0x19, 0x71, 0x04, 0x8c, 0x71, 0xff, 0x93, 0x7b, 0xb2, 0xd3, 0x2a,
|
||||
0x64, 0x31, 0xab, 0x6d
|
||||
])
|
||||
|
||||
let expected_boundary = cast[Hash[256]]([
|
||||
byte 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x3e, 0x9b, 0x6c, 0x69, 0xbc, 0x2c, 0xe2, 0xa2,
|
||||
0x4a, 0x8e, 0x95, 0x69, 0xef, 0xc7, 0xd7, 0x1b, 0x33, 0x35, 0xdf, 0x36, 0x8c, 0x9a,
|
||||
0xe9, 0x7e, 0x53, 0x84
|
||||
])
|
||||
|
||||
let nonce = 0xd7b3ac70a301a249'u64
|
||||
## difficulty = 0x085657254bd9u64
|
Loading…
Reference in New Issue