Update ethash to use nimcrypto

This commit is contained in:
mratsim 2018-03-24 18:08:53 +01:00
parent 60a143ad0f
commit caa457210a
6 changed files with 55 additions and 57 deletions

View File

@ -7,7 +7,7 @@ srcDir = "src"
### Dependencies
requires "nim >= 0.18.0", "keccak_tiny >= 0.1.0"
requires "nim >= 0.18.0", "nimcrypto >= 0.1.0"
proc test(name: string, lang: string = "c") =
if not dirExists "build":

View File

@ -2,7 +2,7 @@
# Distributed under the Apache v2 License (license terms are at http://www.apache.org/licenses/LICENSE-2.0).
import ./proof_of_work, ./private/conversion
import endians, random, math
import endians, random, math, nimcrypto
proc mulCarry(a, b: uint64): tuple[carry, unit: uint64] =
## Multiplication in extended precision
@ -63,8 +63,8 @@ proc mulCarry(a, b: uint64): tuple[carry, unit: uint64] =
proc isValid(nonce: uint64,
difficulty: uint64,
full_size: Natural,
dataset: seq[Hash[512]],
header: Hash[256]): bool {.noSideEffect.}=
dataset: seq[MDigest[512]],
header: MDigest[256]): bool {.noSideEffect.}=
# Boundary is 2^256/difficulty
# A valid nonce will have: hashimoto < 2^256/difficulty
# We can't represent 2^256 as an uint256 so as a workaround we use:
@ -113,14 +113,13 @@ proc isValid(nonce: uint64,
result = carry == 0
# const High_uint64 = not 0'u64 # TODO: Nim random does not work on uint64 range.
proc mine*(full_size: Natural, dataset: seq[Hash[512]], header: Hash[256], difficulty: uint64): uint64 =
proc mine*(full_size: Natural, dataset: seq[MDigest[512]], header: MDigest[256], difficulty: uint64): uint64 =
# Returns a valid nonce
randomize() # Start with a completely random seed
result = uint64 random(high(int)) # TODO: Nim random does not work on uint64 range.
# Also random is deprecated in devel and does not include the end of the range.
result = uint64 rand(high(int)) # TODO: Nim rand does not work on uint64 range.
while not result.isValid(difficulty, full_size, dataset, header):
inc(result) # we rely on uint overflow (mod 2^64) here.

View File

@ -1,9 +1,9 @@
# Copyright (c) 2018 Status Research & Development GmbH
# Distributed under the Apache v2 License (license terms are at http://www.apache.org/licenses/LICENSE-2.0).
import keccak_tiny
import nimcrypto
proc as_u32_words*[N: static[int]](x: Hash[N]): array[N div 32, uint32] {.inline, noSideEffect, noInit.}=
proc as_u32_words*[bits: static[int]](x: MDigest[bits]): array[bits div 32, uint32] {.inline, noSideEffect, noInit.}=
# Convert an hash to its uint32 representation
cast[type result](x)
@ -78,5 +78,5 @@ proc toByteArrayBE*[T: SomeInteger](num: T): array[T.sizeof, byte] {.noSideEffec
for i in 0 ..< N:
result[i] = byte(num shr T((N-1-i) * 8))
proc toByteArrayBE*[N: static[int]](x: Hash[N]): array[N div 8, byte] {.inline, noSideEffect, noInit.}=
proc toByteArrayBE*[bits: static[int]](x: MDigest[bits]): array[bits div 8, byte] {.inline, noSideEffect, noInit.}=
cast[type result](x.data)

View File

@ -2,11 +2,10 @@
# Distributed under the Apache v2 License (license terms are at http://www.apache.org/licenses/LICENSE-2.0).
import math, endians,
keccak_tiny
nimcrypto
import ./private/[primes, conversion, functional, intmath]
export toHex, hexToByteArrayBE, hexToSeqBytesBE, toByteArrayBE # debug functions
export keccak_tiny
# ###############################################################################
# Definitions
@ -58,17 +57,17 @@ proc get_cachesize_lut*(block_number: Natural): uint64 {.noSideEffect, inline.}
# ###############################################################################
# Cache generation
proc mkcache*(cache_size: uint64, seed: Hash[256]): seq[Hash[512]] {.noSideEffect.}=
proc mkcache*(cache_size: uint64, seed: MDigest[256]): seq[MDigest[512]] {.noSideEffect.}=
# Cache size
let n = int(cache_size div HASH_BYTES)
# Sequentially produce the initial dataset
result = newSeq[Hash[512]](n)
result[0] = keccak512 seed.data
result = newSeq[MDigest[512]](n)
result[0] = keccak512.digest seed.data
for i in 1 ..< n:
result[i] = keccak512 result[i-1].data
result[i] = keccak512.digest result[i-1].data
# Use a low-round version of randmemohash
for _ in 0 ..< CACHE_ROUNDS:
@ -77,7 +76,7 @@ proc mkcache*(cache_size: uint64, seed: Hash[256]): seq[Hash[512]] {.noSideEffec
v = result[i].as_u32_words[0] mod n.uint32
a = result[(i-1+n) mod n].data
b = result[v.int].data
result[i] = keccak512 zipMap(a, b, x xor y)
result[i] = keccak512.digest zipMap(a, b, x xor y)
# ###############################################################################
# Data aggregation function
@ -105,7 +104,7 @@ proc fnv*[T: SomeUnsignedInt or Natural](v1, v2: T): uint32 {.inline, noSideEffe
# ###############################################################################
# Full dataset calculation
proc calc_dataset_item*(cache: seq[Hash[512]], i: Natural): Hash[512] {.noSideEffect, noInit.} =
proc calc_dataset_item*(cache: seq[MDigest[512]], i: Natural): MDigest[512] {.noSideEffect, noInit.} =
let n = cache.len
const r: uint32 = HASH_BYTES div WORD_BYTES
@ -117,21 +116,21 @@ proc calc_dataset_item*(cache: seq[Hash[512]], i: Natural): Hash[512] {.noSideEf
mix[0] = mix[0] xor i.uint32
else:
mix[high(mix)] = mix[high(mix)] xor i.uint32
result = keccak512 mix[]
result = keccak512.digest mix[]
# FNV with a lots of random cache nodes based on i
for j in 0'u32 ..< DATASET_PARENTS:
let cache_index = fnv(i.uint32 xor j, mix[j mod r])
mix[] = zipMap(mix[], cache[cache_index.int mod n].as_u32_words, fnv(x, y))
result = keccak512 mix[]
result = keccak512.digest mix[]
when defined(openmp):
# Remove stacktraces when using OpenMP, heap alloc from strings will crash.
{.push stacktrace: off.}
proc calc_dataset*(full_size: Natural, cache: seq[Hash[512]]): seq[Hash[512]] =
proc calc_dataset*(full_size: Natural, cache: seq[MDigest[512]]): seq[MDigest[512]] =
result = newSeq[Hash[512]](full_size div HASH_BYTES)
result = newSeq[MDigest[512]](full_size div HASH_BYTES)
for i in `||`(0, result.len - 1, "simd"):
# OpenMP loop
result[i] = calc_dataset_item(cache, i)
@ -143,9 +142,9 @@ when defined(openmp):
# ###############################################################################
# Main loop
type HashimotoHash = tuple[mix_digest, value: Hash[256]]
type HashimotoHash = tuple[mix_digest, value: MDigest[256]]
template hashimoto(header: Hash[256],
template hashimoto(header: MDigest[256],
nonce: uint64,
full_size: Natural,
dataset_lookup_p: untyped,
@ -161,7 +160,8 @@ template hashimoto(header: Hash[256],
assert MIX_BYTES mod HASH_BYTES == 0
# combine header+nonce into a 64 byte seed
var s{.noInit.}: Hash[512]
{.pragma: align64, codegenDecl: "$# $# __attribute__((aligned(64)))".}
var s{.align64, noInit.}: MDigest[512]
let s_bytes = cast[ptr array[64, byte]](addr s) # Alias for to interpret s as a byte array
let s_words = cast[ptr array[16, uint32]](addr s) # Alias for to interpret s as an uint32 array
@ -172,11 +172,11 @@ template hashimoto(header: Hash[256],
littleEndian64(addr nonceLE, unsafeAddr nonce)
s_bytes[][32..<40] = cast[array[8,byte]](nonceLE)
s = keccak_512 s_bytes[][0..<40] # TODO: Does this allocate a seq?
s = keccak_512.digest s_bytes[][0..<40] # TODO: Does this slicing allocate a seq?
# start the mix with replicated s
assert MIX_BYTES div HASH_BYTES == 2
var mix{.noInit.}: array[32, uint32]
var mix{.align64, noInit.}: array[32, uint32]
mix[0..<16] = s_words[]
mix[16..<32] = s_words[]
@ -203,10 +203,10 @@ template hashimoto(header: Hash[256],
var concat{.noInit.}: array[64 + 32, byte]
concat[0..<64] = s_bytes[]
concat[64..<96] = cast[array[32, byte]](result.mix_digest)
result.value = keccak_256(concat)
result.value = keccak_256.digest concat
proc hashimoto_light*(full_size:Natural, cache: seq[Hash[512]],
header: Hash[256], nonce: uint64): HashimotoHash {.noSideEffect.} =
proc hashimoto_light*(full_size:Natural, cache: seq[MDigest[512]],
header: MDigest[256], nonce: uint64): HashimotoHash {.noSideEffect.} =
hashimoto(header,
nonce,
@ -215,8 +215,8 @@ proc hashimoto_light*(full_size:Natural, cache: seq[Hash[512]],
calc_data_set_item(cache, p1),
result)
proc hashimoto_full*(full_size:Natural, dataset: seq[Hash[512]],
header: Hash[256], nonce: uint64): HashimotoHash {.noSideEffect.} =
proc hashimoto_full*(full_size:Natural, dataset: seq[MDigest[512]],
header: MDigest[256], nonce: uint64): HashimotoHash {.noSideEffect.} =
# TODO spec mentions full_size but I don't think we need it (retrieve it from dataset.len)
hashimoto(header,
nonce,
@ -227,6 +227,6 @@ proc hashimoto_full*(full_size:Natural, dataset: seq[Hash[512]],
# ###############################################################################
# Defining the seed hash
proc get_seedhash*(block_number: uint64): Hash[256] {.noSideEffect.} =
proc get_seedhash*(block_number: uint64): MDigest[256] {.noSideEffect.} =
for i in 0 ..< int(block_number div EPOCH_LENGTH):
result = keccak256 result.data
result = keccak256.digest result.data

View File

@ -1,7 +1,7 @@
# Copyright (c) 2018 Status Research & Development GmbH
# Distributed under the Apache v2 License (license terms are at http://www.apache.org/licenses/LICENSE-2.0).
import ../src/ethash, unittest, keccak_tiny, times, strutils
import ../src/ethash, unittest, times, strutils, nimcrypto
suite "Test mining":
@ -12,7 +12,7 @@ suite "Test mining":
let
blck = 22'u # block number
cache = mkcache(get_cachesize(blck), get_seedhash(blck))
header = cast[Hash[256]](
header = cast[MDigest[256]](
hexToByteArrayBE[32]("372eca2454ead349c3df0ab5d00b0b706b23e49d469387db91811cee0358fc6d")
)
difficulty = 132416'u64

View File

@ -1,8 +1,7 @@
# Copyright (c) 2018 Status Research & Development GmbH
# Distributed under the Apache v2 License (license terms are at http://www.apache.org/licenses/LICENSE-2.0).
import ../src/ethash, unittest, strutils, algorithm, random, sequtils,
keccak_tiny
import ../src/ethash, unittest, strutils, algorithm, random, sequtils, nimcrypto
suite "Base hashing algorithm":
@ -21,8 +20,8 @@ suite "Base hashing algorithm":
let
input = "~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~"
expected = "2b5ddf6f4d21c23de216f44d5e4bdc68e044b71897837ea74c83908be7037cd7".toUpperASCII
actual = toUpperASCII($input.keccak_256) # using keccak built-in conversion proc
actual2 = cast[array[256 div 8, byte]](input.keccak_256).toHex.toUpperAscii
actual = toUpperASCII($keccak256.digest(input)) # using keccak built-in conversion proc
actual2 = cast[array[256 div 8, byte]](keccak_256.digest(input)).toHex.toUpperAscii
check: expected == actual
check: expected == actual2
@ -32,8 +31,8 @@ suite "Base hashing algorithm":
let
input = "~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~"
expected = "0be8a1d334b4655fe58c6b38789f984bb13225684e86b20517a55ab2386c7b61c306f25e0627c60064cecd6d80cd67a82b3890bd1289b7ceb473aad56a359405".toUpperASCII
actual = toUpperASCII($input.keccak_512) # using keccak built-in conversion proc
actual2 = cast[array[512 div 8, byte]](input.keccak_512).toHex.toUpperAscii
actual = toUpperASCII($keccak512.digest(input)) # using keccak built-in conversion proc
actual2 = cast[array[512 div 8, byte]](keccak_512.digest(input)).toHex.toUpperAscii
check: expected == actual
check: expected == actual2
@ -117,7 +116,7 @@ suite "Cache initialization":
# https://github.com/ethereum/ethash/blob/f5f0a8b1962544d2b6f40df8e4b0d9a32faf8f8e/test/python/test_pyethash.py#L31-L36
test "Mkcache":
let actual_str = "~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~"
var actual_hash: Hash[256]
var actual_hash: MDigest[256]
copyMem(addr actual_hash, unsafeAddr actual_str[0], 256 div 8)
let
@ -136,10 +135,10 @@ suite "Seed hash":
check: $get_seedhash(0) == zeroHex
test "Seed hash of the next 2048 epochs (2048 * 30000 blocks)":
var expected: Hash[256]
var expected: MDigest[256]
for i in countup(0'u32, 30000 * 2048, 30000):
check: get_seedhash(i) == expected
expected = keccak_256(expected.data)
expected = keccak_256.digest(expected.data)
suite "Dagger hashimoto computation":
# We can't replicate Python's dynamic typing here
@ -151,11 +150,11 @@ suite "Dagger hashimoto computation":
cache_str = "~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~"
header_str = "~~~~~X~~~~~~~~~~~~~~~~~~~~~~~~~~"
var cache_hash: Hash[256]
var cache_hash: MDigest[256]
copyMem(addr cache_hash, unsafeAddr cache_str[0], 256 div 8)
let cache = mkcache(cache_size, cache_hash)
var header: Hash[256]
var header: MDigest[256]
copyMem(addr header, unsafeAddr header_str[0], 256 div 8)
let full = calc_dataset(full_size, cache)
@ -168,7 +167,7 @@ suite "Dagger hashimoto computation":
test "Real dataset and recomputation from cache matches":
# https://github.com/ethereum/ethash/blob/f5f0a8b1962544d2b6f40df8e4b0d9a32faf8f8e/test/c/test.cpp#L360-L374
for i in 0 ..< full_size div sizeof(Hash[512]):
for i in 0 ..< full_size div sizeof(MDigest[512]):
for j in 0 ..< 32:
let expected = calc_dataset_item(cache, j)
check: full[j] == expected
@ -183,7 +182,7 @@ suite "Dagger hashimoto computation":
let full_result = hashimoto_full(full_size, dataset, header, 0)
# Check not null
var zero_hash : Hash[256]
var zero_hash : MDigest[256]
check: light_result.mix_digest != zero_hash
check: light_result.value != zero_hash
check: light_result == full_result
@ -194,7 +193,7 @@ suite "Real blocks test":
# POC-9 testnet, epoch 0
let blck = 22'u # block number
let cache = mkcache(get_cachesize(blck), get_seedhash(blck))
let header = cast[Hash[256]](
let header = cast[MDigest[256]](
hexToByteArrayBE[32]("372eca2454ead349c3df0ab5d00b0b706b23e49d469387db91811cee0358fc6d")
)
@ -205,10 +204,10 @@ suite "Real blocks test":
0x495732e0ed7a801c'u
)
check: light.value == cast[Hash[256]](
check: light.value == cast[MDigest[256]](
hexToByteArrayBE[32]("00000b184f1fdd88bfd94c86c39e65db0c36144d5e43f745f722196e730cb614")
)
check: light.mixDigest == cast[Hash[256]](
check: light.mixDigest == cast[MDigest[256]](
hexToByteArrayBE[32]("2f74cdeb198af0b9abe65d22d372e22fb2d474371774a9583c1cc427a07939f5")
)
@ -217,7 +216,7 @@ suite "Real blocks test":
# POC-9 testnet, epoch 1
let blck = 30001'u # block number
let cache = mkcache(get_cachesize(blck), get_seedhash(blck))
let header = cast[Hash[256]](
let header = cast[MDigest[256]](
hexToByteArrayBE[32]("7e44356ee3441623bc72a683fd3708fdf75e971bbe294f33e539eedad4b92b34")
)
@ -228,7 +227,7 @@ suite "Real blocks test":
0x318df1c8adef7e5e'u
)
check: light.mixDigest == cast[Hash[256]](
check: light.mixDigest == cast[MDigest[256]](
hexToByteArrayBE[32]("144b180aad09ae3c81fb07be92c8e6351b5646dda80e6844ae1b697e55ddde84")
)
@ -237,7 +236,7 @@ suite "Real blocks test":
# POC-9 testnet, epoch 2
let blck = 60000'u # block number
let cache = mkcache(get_cachesize(blck), get_seedhash(blck))
let header = cast[Hash[256]](
let header = cast[MDigest[256]](
hexToByteArrayBE[32]("5fc898f16035bf5ac9c6d9077ae1e3d5fc1ecc3c9fd5bee8bb00e810fdacbaa0")
)
@ -248,6 +247,6 @@ suite "Real blocks test":
0x50377003e5d830ca'u
)
check: light.mixDigest == cast[Hash[256]](
check: light.mixDigest == cast[MDigest[256]](
hexToByteArrayBE[32]("ab546a5b73c452ae86dadd36f0ed83a6745226717d3798832d1b20b489e82063")
)