nim-libp2p-experimental/libp2p/multihash.nim

594 lines
24 KiB
Nim
Raw Normal View History

2022-07-01 18:19:57 +00:00
# Nim-Libp2p
2023-01-20 14:47:40 +00:00
# Copyright (c) 2023 Status Research & Development GmbH
2022-07-01 18:19:57 +00:00
# Licensed under either of
# * Apache License, version 2.0, ([LICENSE-APACHE](LICENSE-APACHE))
# * MIT license ([LICENSE-MIT](LICENSE-MIT))
# at your option.
# This file may not be copied, modified, or distributed except according to
# those terms.
2018-12-04 17:53:36 +00:00
## This module implements MultiHash.
## Supported hashes are:
## 1. IDENTITY
2018-12-04 21:11:13 +00:00
## 2. SHA2-256/SHA2-512
2018-12-04 17:53:36 +00:00
## 3. DBL-SHA2-256
## 4. SHA3/KECCAK
2018-12-04 21:11:13 +00:00
## 5. SHAKE-128/SHAKE-256
2018-12-04 17:53:36 +00:00
## 6. BLAKE2s/BLAKE2s
## 7. SHA1
2018-12-04 17:53:36 +00:00
##
## Hashes which are not yet supported
## 1. SKEIN
## 2. MURMUR
2023-06-07 11:12:49 +00:00
{.push raises: [].}
2018-12-04 17:53:36 +00:00
import tables
import nimcrypto/[sha, sha2, keccak, blake2, hash, utils]
2019-12-13 09:42:47 +00:00
import varint, vbuffer, multicodec, multibase
import stew/base58
import stew/results
export results
# This is workaround for Nim `import` bug.
export sha, sha2, keccak, blake2, hash, utils
2018-12-04 17:53:36 +00:00
const
MaxHashSize* = 128
ErrIncorrectName = "Incorrect hash name"
ErrNotSupported = "Hash not supported"
ErrWrongDigestSize = "Incorrect digest size"
ErrDecodeError = "Decoding error from bytes"
ErrParseError = "Parse error fromHex"
2018-12-04 17:53:36 +00:00
type
2021-12-16 10:05:20 +00:00
MHashCoderProc* = proc(data: openArray[byte],
2023-06-07 11:12:49 +00:00
output: var openArray[byte]) {.nimcall, gcsafe, noSideEffect, raises: [].}
2018-12-04 17:53:36 +00:00
MHash* = object
mcodec*: MultiCodec
2018-12-04 17:53:36 +00:00
size*: int
coder*: MHashCoderProc
MultiHash* = object
data*: VBuffer
mcodec*: MultiCodec
2018-12-04 17:53:36 +00:00
size*: int
dpos*: int
MhResult*[T] = Result[T, cstring]
2018-12-04 17:53:36 +00:00
2021-12-16 10:05:20 +00:00
proc identhash(data: openArray[byte], output: var openArray[byte]) =
2018-12-04 17:53:36 +00:00
if len(output) > 0:
var length = if len(data) > len(output): len(output)
else: len(data)
copyMem(addr output[0], unsafeAddr data[0], length)
2021-12-16 10:05:20 +00:00
proc sha1hash(data: openArray[byte], output: var openArray[byte]) =
if len(output) > 0:
var digest = sha1.digest(data)
var length = if sha1.sizeDigest > len(output): len(output)
else: sha1.sizeDigest
copyMem(addr output[0], addr digest.data[0], length)
2021-12-16 10:05:20 +00:00
proc dblsha2_256hash(data: openArray[byte], output: var openArray[byte]) =
2018-12-04 17:53:36 +00:00
if len(output) > 0:
var digest1 = sha256.digest(data)
var digest2 = sha256.digest(digest1.data)
var length = if sha256.sizeDigest > len(output): len(output)
else: sha256.sizeDigest
copyMem(addr output[0], addr digest2.data[0], length)
2021-12-16 10:05:20 +00:00
proc blake2Bhash(data: openArray[byte], output: var openArray[byte]) =
2018-12-04 17:53:36 +00:00
if len(output) > 0:
var digest = blake2_512.digest(data)
var length = if blake2_512.sizeDigest > len(output): len(output)
else: blake2_512.sizeDigest
copyMem(addr output[0], addr digest.data[0], length)
2021-12-16 10:05:20 +00:00
proc blake2Shash(data: openArray[byte], output: var openArray[byte]) =
2018-12-04 17:53:36 +00:00
if len(output) > 0:
var digest = blake2_256.digest(data)
var length = if blake2_256.sizeDigest > len(output): len(output)
else: blake2_256.sizeDigest
copyMem(addr output[0], addr digest.data[0], length)
2021-12-16 10:05:20 +00:00
proc sha2_256hash(data: openArray[byte], output: var openArray[byte]) =
2018-12-04 17:53:36 +00:00
if len(output) > 0:
var digest = sha256.digest(data)
var length = if sha256.sizeDigest > len(output): len(output)
else: sha256.sizeDigest
copyMem(addr output[0], addr digest.data[0], length)
2021-12-16 10:05:20 +00:00
proc sha2_512hash(data: openArray[byte], output: var openArray[byte]) =
2018-12-04 17:53:36 +00:00
if len(output) > 0:
var digest = sha512.digest(data)
var length = if sha512.sizeDigest > len(output): len(output)
else: sha512.sizeDigest
copyMem(addr output[0], addr digest.data[0], length)
2021-12-16 10:05:20 +00:00
proc sha3_224hash(data: openArray[byte], output: var openArray[byte]) =
2018-12-04 17:53:36 +00:00
if len(output) > 0:
var digest = sha3_224.digest(data)
var length = if sha3_224.sizeDigest > len(output): len(output)
else: sha3_224.sizeDigest
copyMem(addr output[0], addr digest.data[0], length)
2021-12-16 10:05:20 +00:00
proc sha3_256hash(data: openArray[byte], output: var openArray[byte]) =
2018-12-04 17:53:36 +00:00
if len(output) > 0:
var digest = sha3_256.digest(data)
var length = if sha3_256.sizeDigest > len(output): len(output)
else: sha3_256.sizeDigest
copyMem(addr output[0], addr digest.data[0], length)
2021-12-16 10:05:20 +00:00
proc sha3_384hash(data: openArray[byte], output: var openArray[byte]) =
2018-12-04 17:53:36 +00:00
if len(output) > 0:
var digest = sha3_384.digest(data)
var length = if sha3_384.sizeDigest > len(output): len(output)
else: sha3_384.sizeDigest
copyMem(addr output[0], addr digest.data[0], length)
2021-12-16 10:05:20 +00:00
proc sha3_512hash(data: openArray[byte], output: var openArray[byte]) =
2018-12-04 17:53:36 +00:00
if len(output) > 0:
var digest = sha3_512.digest(data)
var length = if sha3_512.sizeDigest > len(output): len(output)
else: sha3_512.sizeDigest
copyMem(addr output[0], addr digest.data[0], length)
2021-12-16 10:05:20 +00:00
proc keccak_224hash(data: openArray[byte], output: var openArray[byte]) =
2018-12-04 17:53:36 +00:00
if len(output) > 0:
var digest = keccak224.digest(data)
var length = if keccak224.sizeDigest > len(output): len(output)
else: keccak224.sizeDigest
copyMem(addr output[0], addr digest.data[0], length)
2021-12-16 10:05:20 +00:00
proc keccak_256hash(data: openArray[byte], output: var openArray[byte]) =
2018-12-04 17:53:36 +00:00
if len(output) > 0:
var digest = keccak256.digest(data)
var length = if keccak256.sizeDigest > len(output): len(output)
else: keccak256.sizeDigest
copyMem(addr output[0], addr digest.data[0], length)
2021-12-16 10:05:20 +00:00
proc keccak_384hash(data: openArray[byte], output: var openArray[byte]) =
2018-12-04 17:53:36 +00:00
if len(output) > 0:
var digest = keccak384.digest(data)
var length = if keccak384.sizeDigest > len(output): len(output)
else: keccak384.sizeDigest
copyMem(addr output[0], addr digest.data[0], length)
2021-12-16 10:05:20 +00:00
proc keccak_512hash(data: openArray[byte], output: var openArray[byte]) =
2018-12-04 17:53:36 +00:00
if len(output) > 0:
var digest = keccak512.digest(data)
var length = if keccak512.sizeDigest > len(output): len(output)
else: keccak512.sizeDigest
copyMem(addr output[0], addr digest.data[0], length)
2021-12-16 10:05:20 +00:00
proc shake_128hash(data: openArray[byte], output: var openArray[byte]) =
2018-12-04 17:53:36 +00:00
var sctx: shake128
if len(output) > 0:
sctx.init()
sctx.update(cast[ptr uint8](unsafeAddr data[0]), uint(len(data)))
sctx.xof()
discard sctx.output(addr output[0], uint(len(output)))
sctx.clear()
2021-12-16 10:05:20 +00:00
proc shake_256hash(data: openArray[byte], output: var openArray[byte]) =
2018-12-04 17:53:36 +00:00
var sctx: shake256
if len(output) > 0:
sctx.init()
sctx.update(cast[ptr uint8](unsafeAddr data[0]), uint(len(data)))
sctx.xof()
discard sctx.output(addr output[0], uint(len(output)))
sctx.clear()
const
HashesList = [
MHash(mcodec: multiCodec("identity"), size: 0,
coder: identhash),
MHash(mcodec: multiCodec("sha1"), size: sha1.sizeDigest,
coder: sha1hash),
MHash(mcodec: multiCodec("dbl-sha2-256"), size: sha256.sizeDigest,
2018-12-04 21:11:13 +00:00
coder: dblsha2_256hash
),
MHash(mcodec: multiCodec("sha2-256"), size: sha256.sizeDigest,
2018-12-04 17:53:36 +00:00
coder: sha2_256hash
),
MHash(mcodec: multiCodec("sha2-512"), size: sha512.sizeDigest,
2018-12-04 17:53:36 +00:00
coder: sha2_512hash
),
MHash(mcodec: multiCodec("sha3-224"), size: sha3_224.sizeDigest,
2018-12-04 17:53:36 +00:00
coder: sha3_224hash
),
MHash(mcodec: multiCodec("sha3-256"), size: sha3_256.sizeDigest,
2018-12-04 17:53:36 +00:00
coder: sha3_256hash
),
MHash(mcodec: multiCodec("sha3-384"), size: sha3_384.sizeDigest,
2018-12-04 17:53:36 +00:00
coder: sha3_384hash
),
MHash(mcodec: multiCodec("sha3-512"), size: sha3_512.sizeDigest,
2018-12-04 17:53:36 +00:00
coder: sha3_512hash
),
MHash(mcodec: multiCodec("shake-128"), size: 32, coder: shake_128hash),
MHash(mcodec: multiCodec("shake-256"), size: 64, coder: shake_256hash),
2021-12-16 10:05:20 +00:00
MHash(mcodec: multiCodec("keccak-224"), size: keccak224.sizeDigest,
2018-12-04 17:53:36 +00:00
coder: keccak_224hash
),
2021-12-16 10:05:20 +00:00
MHash(mcodec: multiCodec("keccak-256"), size: keccak256.sizeDigest,
2018-12-04 17:53:36 +00:00
coder: keccak_256hash
),
2021-12-16 10:05:20 +00:00
MHash(mcodec: multiCodec("keccak-384"), size: keccak384.sizeDigest,
2018-12-04 17:53:36 +00:00
coder: keccak_384hash
),
2021-12-16 10:05:20 +00:00
MHash(mcodec: multiCodec("keccak-512"), size: keccak512.sizeDigest,
2018-12-04 17:53:36 +00:00
coder: keccak_512hash
),
MHash(mcodec: multiCodec("blake2b-8"), size: 1, coder: blake2Bhash),
MHash(mcodec: multiCodec("blake2b-16"), size: 2, coder: blake2Bhash),
MHash(mcodec: multiCodec("blake2b-24"), size: 3, coder: blake2Bhash),
MHash(mcodec: multiCodec("blake2b-32"), size: 4, coder: blake2Bhash),
MHash(mcodec: multiCodec("blake2b-40"), size: 5, coder: blake2Bhash),
MHash(mcodec: multiCodec("blake2b-48"), size: 6, coder: blake2Bhash),
MHash(mcodec: multiCodec("blake2b-56"), size: 7, coder: blake2Bhash),
MHash(mcodec: multiCodec("blake2b-64"), size: 8, coder: blake2Bhash),
MHash(mcodec: multiCodec("blake2b-72"), size: 9, coder: blake2Bhash),
MHash(mcodec: multiCodec("blake2b-80"), size: 10, coder: blake2Bhash),
MHash(mcodec: multiCodec("blake2b-88"), size: 11, coder: blake2Bhash),
MHash(mcodec: multiCodec("blake2b-96"), size: 12, coder: blake2Bhash),
MHash(mcodec: multiCodec("blake2b-104"), size: 13, coder: blake2Bhash),
MHash(mcodec: multiCodec("blake2b-112"), size: 14, coder: blake2Bhash),
MHash(mcodec: multiCodec("blake2b-120"), size: 15, coder: blake2Bhash),
MHash(mcodec: multiCodec("blake2b-128"), size: 16, coder: blake2Bhash),
MHash(mcodec: multiCodec("blake2b-136"), size: 17, coder: blake2Bhash),
MHash(mcodec: multiCodec("blake2b-144"), size: 18, coder: blake2Bhash),
MHash(mcodec: multiCodec("blake2b-152"), size: 19, coder: blake2Bhash),
MHash(mcodec: multiCodec("blake2b-160"), size: 20, coder: blake2Bhash),
MHash(mcodec: multiCodec("blake2b-168"), size: 21, coder: blake2Bhash),
MHash(mcodec: multiCodec("blake2b-176"), size: 22, coder: blake2Bhash),
MHash(mcodec: multiCodec("blake2b-184"), size: 23, coder: blake2Bhash),
MHash(mcodec: multiCodec("blake2b-192"), size: 24, coder: blake2Bhash),
MHash(mcodec: multiCodec("blake2b-200"), size: 25, coder: blake2Bhash),
MHash(mcodec: multiCodec("blake2b-208"), size: 26, coder: blake2Bhash),
MHash(mcodec: multiCodec("blake2b-216"), size: 27, coder: blake2Bhash),
MHash(mcodec: multiCodec("blake2b-224"), size: 28, coder: blake2Bhash),
MHash(mcodec: multiCodec("blake2b-232"), size: 29, coder: blake2Bhash),
MHash(mcodec: multiCodec("blake2b-240"), size: 30, coder: blake2Bhash),
MHash(mcodec: multiCodec("blake2b-248"), size: 31, coder: blake2Bhash),
MHash(mcodec: multiCodec("blake2b-256"), size: 32, coder: blake2Bhash),
MHash(mcodec: multiCodec("blake2b-264"), size: 33, coder: blake2Bhash),
MHash(mcodec: multiCodec("blake2b-272"), size: 34, coder: blake2Bhash),
MHash(mcodec: multiCodec("blake2b-280"), size: 35, coder: blake2Bhash),
MHash(mcodec: multiCodec("blake2b-288"), size: 36, coder: blake2Bhash),
MHash(mcodec: multiCodec("blake2b-296"), size: 37, coder: blake2Bhash),
MHash(mcodec: multiCodec("blake2b-304"), size: 38, coder: blake2Bhash),
MHash(mcodec: multiCodec("blake2b-312"), size: 39, coder: blake2Bhash),
MHash(mcodec: multiCodec("blake2b-320"), size: 40, coder: blake2Bhash),
MHash(mcodec: multiCodec("blake2b-328"), size: 41, coder: blake2Bhash),
MHash(mcodec: multiCodec("blake2b-336"), size: 42, coder: blake2Bhash),
MHash(mcodec: multiCodec("blake2b-344"), size: 43, coder: blake2Bhash),
MHash(mcodec: multiCodec("blake2b-352"), size: 44, coder: blake2Bhash),
MHash(mcodec: multiCodec("blake2b-360"), size: 45, coder: blake2Bhash),
MHash(mcodec: multiCodec("blake2b-368"), size: 46, coder: blake2Bhash),
MHash(mcodec: multiCodec("blake2b-376"), size: 47, coder: blake2Bhash),
MHash(mcodec: multiCodec("blake2b-384"), size: 48, coder: blake2Bhash),
MHash(mcodec: multiCodec("blake2b-392"), size: 49, coder: blake2Bhash),
MHash(mcodec: multiCodec("blake2b-400"), size: 50, coder: blake2Bhash),
MHash(mcodec: multiCodec("blake2b-408"), size: 51, coder: blake2Bhash),
MHash(mcodec: multiCodec("blake2b-416"), size: 52, coder: blake2Bhash),
MHash(mcodec: multiCodec("blake2b-424"), size: 53, coder: blake2Bhash),
MHash(mcodec: multiCodec("blake2b-432"), size: 54, coder: blake2Bhash),
MHash(mcodec: multiCodec("blake2b-440"), size: 55, coder: blake2Bhash),
MHash(mcodec: multiCodec("blake2b-448"), size: 56, coder: blake2Bhash),
MHash(mcodec: multiCodec("blake2b-456"), size: 57, coder: blake2Bhash),
MHash(mcodec: multiCodec("blake2b-464"), size: 58, coder: blake2Bhash),
MHash(mcodec: multiCodec("blake2b-472"), size: 59, coder: blake2Bhash),
MHash(mcodec: multiCodec("blake2b-480"), size: 60, coder: blake2Bhash),
MHash(mcodec: multiCodec("blake2b-488"), size: 61, coder: blake2Bhash),
MHash(mcodec: multiCodec("blake2b-496"), size: 62, coder: blake2Bhash),
MHash(mcodec: multiCodec("blake2b-504"), size: 63, coder: blake2Bhash),
MHash(mcodec: multiCodec("blake2b-512"), size: 64, coder: blake2Bhash),
MHash(mcodec: multiCodec("blake2s-8"), size: 1, coder: blake2Shash),
MHash(mcodec: multiCodec("blake2s-16"), size: 2, coder: blake2Shash),
MHash(mcodec: multiCodec("blake2s-24"), size: 3, coder: blake2Shash),
MHash(mcodec: multiCodec("blake2s-32"), size: 4, coder: blake2Shash),
MHash(mcodec: multiCodec("blake2s-40"), size: 5, coder: blake2Shash),
MHash(mcodec: multiCodec("blake2s-48"), size: 6, coder: blake2Shash),
MHash(mcodec: multiCodec("blake2s-56"), size: 7, coder: blake2Shash),
MHash(mcodec: multiCodec("blake2s-64"), size: 8, coder: blake2Shash),
MHash(mcodec: multiCodec("blake2s-72"), size: 9, coder: blake2Shash),
MHash(mcodec: multiCodec("blake2s-80"), size: 10, coder: blake2Shash),
MHash(mcodec: multiCodec("blake2s-88"), size: 11, coder: blake2Shash),
MHash(mcodec: multiCodec("blake2s-96"), size: 12, coder: blake2Shash),
MHash(mcodec: multiCodec("blake2s-104"), size: 13, coder: blake2Shash),
MHash(mcodec: multiCodec("blake2s-112"), size: 14, coder: blake2Shash),
MHash(mcodec: multiCodec("blake2s-120"), size: 15, coder: blake2Shash),
MHash(mcodec: multiCodec("blake2s-128"), size: 16, coder: blake2Shash),
MHash(mcodec: multiCodec("blake2s-136"), size: 17, coder: blake2Shash),
MHash(mcodec: multiCodec("blake2s-144"), size: 18, coder: blake2Shash),
MHash(mcodec: multiCodec("blake2s-152"), size: 19, coder: blake2Shash),
MHash(mcodec: multiCodec("blake2s-160"), size: 20, coder: blake2Shash),
MHash(mcodec: multiCodec("blake2s-168"), size: 21, coder: blake2Shash),
MHash(mcodec: multiCodec("blake2s-176"), size: 22, coder: blake2Shash),
MHash(mcodec: multiCodec("blake2s-184"), size: 23, coder: blake2Shash),
MHash(mcodec: multiCodec("blake2s-192"), size: 24, coder: blake2Shash),
MHash(mcodec: multiCodec("blake2s-200"), size: 25, coder: blake2Shash),
MHash(mcodec: multiCodec("blake2s-208"), size: 26, coder: blake2Shash),
MHash(mcodec: multiCodec("blake2s-216"), size: 27, coder: blake2Shash),
MHash(mcodec: multiCodec("blake2s-224"), size: 28, coder: blake2Shash),
MHash(mcodec: multiCodec("blake2s-232"), size: 29, coder: blake2Shash),
MHash(mcodec: multiCodec("blake2s-240"), size: 30, coder: blake2Shash),
MHash(mcodec: multiCodec("blake2s-248"), size: 31, coder: blake2Shash),
MHash(mcodec: multiCodec("blake2s-256"), size: 32, coder: blake2Shash)
2018-12-04 17:53:36 +00:00
]
proc initMultiHashCodeTable(): Table[MultiCodec, MHash] {.compileTime.} =
2018-12-04 17:53:36 +00:00
for item in HashesList:
result[item.mcodec] = item
2018-12-04 17:53:36 +00:00
const
CodeHashes = initMultiHashCodeTable()
2021-12-16 10:05:20 +00:00
proc digestImplWithHash(hash: MHash, data: openArray[byte]): MultiHash =
2018-12-04 17:53:36 +00:00
var buffer: array[MaxHashSize, byte]
result.data = initVBuffer()
result.mcodec = hash.mcodec
result.data.write(hash.mcodec)
2018-12-04 17:53:36 +00:00
if hash.size == 0:
result.data.writeVarint(uint(len(data)))
2018-12-04 21:11:13 +00:00
result.dpos = len(result.data.buffer)
2018-12-04 17:53:36 +00:00
result.data.writeArray(data)
result.size = len(data)
else:
result.data.writeVarint(uint(hash.size))
2018-12-04 21:11:13 +00:00
result.dpos = len(result.data.buffer)
2018-12-04 17:53:36 +00:00
hash.coder(data, buffer.toOpenArray(0, hash.size - 1))
result.data.writeArray(buffer.toOpenArray(0, hash.size - 1))
result.size = hash.size
result.data.finish()
2018-12-04 17:53:36 +00:00
2021-12-16 10:05:20 +00:00
proc digestImplWithoutHash(hash: MHash, data: openArray[byte]): MultiHash =
2018-12-04 17:53:36 +00:00
result.data = initVBuffer()
result.mcodec = hash.mcodec
2018-12-04 17:53:36 +00:00
result.size = len(data)
result.data.write(hash.mcodec)
2018-12-04 17:53:36 +00:00
result.data.writeVarint(uint(len(data)))
2018-12-04 21:11:13 +00:00
result.dpos = len(result.data.buffer)
2018-12-04 17:53:36 +00:00
result.data.writeArray(data)
result.data.finish()
2018-12-04 17:53:36 +00:00
proc digest*(mhtype: typedesc[MultiHash], hashname: string,
2021-12-16 10:05:20 +00:00
data: openArray[byte]): MhResult[MultiHash] {.inline.} =
2018-12-04 17:53:36 +00:00
## Perform digest calculation using hash algorithm with name ``hashname`` on
## data array ``data``.
let mc = MultiCodec.codec(hashname)
if mc == InvalidMultiCodec:
err(ErrIncorrectName)
else:
let hash = CodeHashes.getOrDefault(mc)
if isNil(hash.coder):
err(ErrNotSupported)
else:
ok(digestImplWithHash(hash, data))
2018-12-04 17:53:36 +00:00
proc digest*(mhtype: typedesc[MultiHash], hashcode: int,
2021-12-16 10:05:20 +00:00
data: openArray[byte]): MhResult[MultiHash] {.inline.} =
2018-12-04 17:53:36 +00:00
## Perform digest calculation using hash algorithm with code ``hashcode`` on
## data array ``data``.
let hash = CodeHashes.getOrDefault(hashcode)
if isNil(hash.coder):
err(ErrNotSupported)
else:
ok(digestImplWithHash(hash, data))
2018-12-04 17:53:36 +00:00
proc init*[T](mhtype: typedesc[MultiHash], hashname: string,
mdigest: MDigest[T]): MhResult[MultiHash] {.inline.} =
2018-12-04 17:53:36 +00:00
## Create MultiHash from nimcrypto's `MDigest` object and hash algorithm name
## ``hashname``.
let mc = MultiCodec.codec(hashname)
if mc == InvalidMultiCodec:
err(ErrIncorrectName)
else:
let hash = CodeHashes.getOrDefault(mc)
if isNil(hash.coder):
err(ErrNotSupported)
elif hash.size != len(mdigest.data):
err(ErrWrongDigestSize)
else:
ok(digestImplWithoutHash(hash, mdigest.data))
2018-12-04 17:53:36 +00:00
proc init*[T](mhtype: typedesc[MultiHash], hashcode: MultiCodec,
mdigest: MDigest[T]): MhResult[MultiHash] {.inline.} =
2018-12-04 17:53:36 +00:00
## Create MultiHash from nimcrypto's `MDigest` and hash algorithm code
## ``hashcode``.
let hash = CodeHashes.getOrDefault(hashcode)
if isNil(hash.coder):
err(ErrNotSupported)
elif (hash.size != 0) and (hash.size != len(mdigest.data)):
err(ErrWrongDigestSize)
else:
ok(digestImplWithoutHash(hash, mdigest.data))
2018-12-04 17:53:36 +00:00
2018-12-04 21:11:13 +00:00
proc init*(mhtype: typedesc[MultiHash], hashname: string,
2021-12-16 10:05:20 +00:00
bdigest: openArray[byte]): MhResult[MultiHash] {.inline.} =
2018-12-04 17:53:36 +00:00
## Create MultiHash from array of bytes ``bdigest`` and hash algorithm code
## ``hashcode``.
let mc = MultiCodec.codec(hashname)
if mc == InvalidMultiCodec:
err(ErrIncorrectName)
else:
let hash = CodeHashes.getOrDefault(mc)
if isNil(hash.coder):
err(ErrNotSupported)
elif (hash.size != 0) and (hash.size != len(bdigest)):
err(ErrWrongDigestSize)
else:
ok(digestImplWithoutHash(hash, bdigest))
2018-12-04 17:53:36 +00:00
proc init*(mhtype: typedesc[MultiHash], hashcode: MultiCodec,
2021-12-16 10:05:20 +00:00
bdigest: openArray[byte]): MhResult[MultiHash] {.inline.} =
2018-12-04 17:53:36 +00:00
## Create MultiHash from array of bytes ``bdigest`` and hash algorithm code
## ``hashcode``.
let hash = CodeHashes.getOrDefault(hashcode)
if isNil(hash.coder):
err(ErrNotSupported)
elif (hash.size != 0) and (hash.size != len(bdigest)):
err(ErrWrongDigestSize)
else:
ok(digestImplWithoutHash(hash, bdigest))
2018-12-04 17:53:36 +00:00
2021-12-16 10:05:20 +00:00
proc decode*(mhtype: typedesc[MultiHash], data: openArray[byte],
mhash: var MultiHash): MhResult[int] =
2018-12-04 21:11:13 +00:00
## Decode MultiHash value from array of bytes ``data``.
##
## On success decoded MultiHash will be stored into ``mhash`` and number of
## bytes consumed will be returned.
##
## On error ``-1`` will be returned.
var code, size: uint64
var res, dpos: int
if len(data) < 2:
return err(ErrDecodeError)
2018-12-04 21:11:13 +00:00
var vb = initVBuffer(data)
if vb.isEmpty():
return err(ErrDecodeError)
2018-12-04 21:11:13 +00:00
res = vb.readVarint(code)
2018-12-04 17:53:36 +00:00
if res == -1:
return err(ErrDecodeError)
2018-12-04 21:11:13 +00:00
dpos += res
res = vb.readVarint(size)
2018-12-04 17:53:36 +00:00
if res == -1:
return err(ErrDecodeError)
2018-12-04 17:53:36 +00:00
dpos += res
if size > 0x7FFF_FFFF'u64:
return err(ErrDecodeError)
let hash = CodeHashes.getOrDefault(MultiCodec(code))
2018-12-04 17:53:36 +00:00
if isNil(hash.coder):
return err(ErrDecodeError)
2018-12-04 17:53:36 +00:00
if (hash.size != 0) and (hash.size != int(size)):
return err(ErrDecodeError)
2018-12-04 21:11:13 +00:00
if not vb.isEnough(int(size)):
return err(ErrDecodeError)
mhash = ? MultiHash.init(MultiCodec(code),
2018-12-04 21:11:13 +00:00
vb.buffer.toOpenArray(vb.offset,
vb.offset + int(size) - 1))
ok(vb.offset + int(size))
2018-12-04 21:11:13 +00:00
2021-12-16 10:05:20 +00:00
proc validate*(mhtype: typedesc[MultiHash], data: openArray[byte]): bool =
## Returns ``true`` if array of bytes ``data`` has correct MultiHash inside.
var code, size: uint64
var res: VarintResult[void]
if len(data) < 2:
return false
let last = data.high
var offset = 0
var length = 0
res = LP.getUVarint(data.toOpenArray(offset, last), length, code)
if res.isErr():
return false
offset += length
if offset >= len(data):
return false
res = LP.getUVarint(data.toOpenArray(offset, last), length, size)
if res.isErr():
return false
offset += length
if size > 0x7FFF_FFFF'u64:
return false
2018-12-16 02:54:07 +00:00
let hash = CodeHashes.getOrDefault(cast[MultiCodec](code))
if isNil(hash.coder):
return false
if (hash.size != 0) and (hash.size != int(size)):
return false
2018-12-16 02:54:07 +00:00
if offset + int(size) > len(data):
return false
result = true
2018-12-04 21:11:13 +00:00
proc init*(mhtype: typedesc[MultiHash],
2021-12-16 10:05:20 +00:00
data: openArray[byte]): MhResult[MultiHash] {.inline.} =
2018-12-04 21:11:13 +00:00
## Create MultiHash from bytes array ``data``.
var hash: MultiHash
discard ? MultiHash.decode(data, hash)
ok(hash)
2018-12-04 21:11:13 +00:00
proc init*(mhtype: typedesc[MultiHash], data: string): MhResult[MultiHash] {.inline.} =
2018-12-04 21:11:13 +00:00
## Create MultiHash from hexadecimal string representation ``data``.
var hash: MultiHash
try:
discard ? MultiHash.decode(fromHex(data), hash)
ok(hash)
except ValueError:
err(ErrParseError)
2018-12-04 21:11:13 +00:00
proc init58*(mhtype: typedesc[MultiHash],
data: string): MultiHash {.inline.} =
## Create MultiHash from BASE58 encoded string representation ``data``.
if MultiHash.decode(Base58.decode(data), result) == -1:
raise newException(MultihashError, "Incorrect MultiHash binary format")
2021-12-16 10:05:20 +00:00
proc cmp(a: openArray[byte], b: openArray[byte]): bool {.inline.} =
2018-12-04 21:11:13 +00:00
if len(a) != len(b):
2018-12-04 17:53:36 +00:00
return false
2018-12-04 21:11:13 +00:00
var n = len(a)
var res, diff: int
while n > 0:
dec(n)
diff = int(a[n]) - int(b[n])
res = (res and -not(diff)) or diff
result = (res == 0)
proc `==`*[T](mh: MultiHash, mdigest: MDigest[T]): bool =
## Compares MultiHash with nimcrypto's MDigest[T], returns ``true`` if
## hashes are equal, ``false`` otherwise.
if mh.dpos == 0:
2018-12-04 17:53:36 +00:00
return false
2018-12-04 21:11:13 +00:00
if len(mdigest.data) != mh.size:
2018-12-04 17:53:36 +00:00
return false
2018-12-04 21:11:13 +00:00
result = cmp(mh.data.buffer.toOpenArray(mh.dpos, mh.dpos + mh.size - 1),
mdigest.data.toOpenArray(0, mdigest.data.high))
2018-12-04 21:11:13 +00:00
proc `==`*[T](mdigest: MDigest[T], mh: MultiHash): bool {.inline.} =
## Compares MultiHash with nimcrypto's MDigest[T], returns ``true`` if
2018-12-05 01:01:00 +00:00
## hashes are equal, ``false`` otherwise.
2018-12-04 21:11:13 +00:00
result = `==`(mh, mdigest)
proc `==`*(a: MultiHash, b: MultiHash): bool =
2018-12-16 02:54:07 +00:00
## Compares MultiHashes ``a`` and ``b``, returns ``true`` if hashes are equal,
## ``false`` otherwise.
2018-12-04 21:11:13 +00:00
if a.dpos == 0 and b.dpos == 0:
return true
if a.mcodec != b.mcodec:
2018-12-04 21:11:13 +00:00
return false
if a.size != b.size:
return false
result = cmp(a.data.buffer.toOpenArray(a.dpos, a.dpos + a.size - 1),
b.data.buffer.toOpenArray(b.dpos, b.dpos + b.size - 1))
2018-12-04 17:53:36 +00:00
proc hex*(value: MultiHash): string =
## Return hexadecimal string representation of MultiHash ``value``.
result = $(value.data)
proc base58*(value: MultiHash): string =
## Return Base58 encoded string representation of MultiHash ``value``.
result = Base58.encode(value.data.buffer)
proc `$`*(mh: MultiHash): string =
2018-12-04 17:53:36 +00:00
## Return string representation of MultiHash ``value``.
let digest = toHex(mh.data.buffer.toOpenArray(mh.dpos,
mh.dpos + mh.size - 1))
result = $(mh.mcodec) & "/" & digest
proc write*(vb: var VBuffer, mh: MultiHash) {.inline.} =
## Write MultiHash value ``mh`` to buffer ``vb``.
vb.writeArray(mh.data.buffer)
2018-12-16 02:54:07 +00:00
proc encode*(mbtype: typedesc[MultiBase], encoding: string,
mh: MultiHash): string {.inline.} =
## Get MultiBase encoded representation of ``mh`` using encoding
## ``encoding``.
result = MultiBase.encode(encoding, mh.data.buffer)