2018-04-06 14:52:10 +00:00
|
|
|
# Nimbus
|
|
|
|
# Copyright (c) 2018 Status Research & Development GmbH
|
|
|
|
# Licensed under either of
|
|
|
|
# * Apache License, version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or http://www.apache.org/licenses/LICENSE-2.0)
|
|
|
|
# * MIT license ([LICENSE-MIT](LICENSE-MIT) or http://opensource.org/licenses/MIT)
|
|
|
|
# at your option. This file may not be copied, modified, or distributed except according to those terms.
|
|
|
|
|
2018-07-05 12:41:01 +00:00
|
|
|
import
|
2019-11-13 14:49:39 +00:00
|
|
|
sequtils, algorithm,
|
2020-04-20 18:12:44 +00:00
|
|
|
stew/[byteutils], eth/trie/[hexary, db],
|
2019-07-07 10:12:01 +00:00
|
|
|
eth/[common, rlp], chronicles,
|
2019-02-26 07:04:12 +00:00
|
|
|
../errors, ../constants, ./storage_types,
|
2019-04-06 14:50:43 +00:00
|
|
|
../utils, ../config
|
2018-02-12 16:59:12 +00:00
|
|
|
|
2018-01-17 12:57:50 +00:00
|
|
|
type
|
2018-08-29 08:49:01 +00:00
|
|
|
BaseChainDB* = ref object
|
2018-12-31 03:27:02 +00:00
|
|
|
db* : TrieDatabaseRef
|
2018-11-30 10:07:20 +00:00
|
|
|
pruneTrie*: bool
|
2019-04-06 14:50:43 +00:00
|
|
|
config* : ChainConfig
|
2021-05-12 01:55:48 +00:00
|
|
|
networkId*: PublicNetwork
|
2018-01-17 12:57:50 +00:00
|
|
|
|
2020-07-22 16:51:26 +00:00
|
|
|
# startingBlock, currentBlock, and highestBlock
|
|
|
|
# are progress indicator
|
|
|
|
startingBlock*: BlockNumber
|
|
|
|
currentBlock*: BlockNumber
|
|
|
|
highestBlock*: BlockNumber
|
|
|
|
|
2018-06-20 12:33:57 +00:00
|
|
|
TransactionKey = tuple
|
|
|
|
blockNumber: BlockNumber
|
|
|
|
index: int
|
|
|
|
|
2019-04-06 14:50:43 +00:00
|
|
|
proc newBaseChainDB*(db: TrieDatabaseRef, pruneTrie: bool = true, id: PublicNetwork = MainNet): BaseChainDB =
|
2018-02-14 16:38:01 +00:00
|
|
|
new(result)
|
2018-02-12 16:59:12 +00:00
|
|
|
result.db = db
|
2018-11-30 10:07:20 +00:00
|
|
|
result.pruneTrie = pruneTrie
|
2019-04-06 14:50:43 +00:00
|
|
|
result.config = publicChainConfig(id)
|
2021-05-12 01:55:48 +00:00
|
|
|
result.networkId = id
|
2018-01-17 12:57:50 +00:00
|
|
|
|
2018-02-14 16:38:01 +00:00
|
|
|
proc `$`*(db: BaseChainDB): string =
|
|
|
|
result = "BaseChainDB"
|
|
|
|
|
2019-09-09 05:27:17 +00:00
|
|
|
proc exists*(self: BaseChainDB, hash: Hash256): bool =
|
|
|
|
self.db.contains(hash.data)
|
|
|
|
|
2018-08-03 11:56:49 +00:00
|
|
|
proc getBlockHeader*(self: BaseChainDB; blockHash: Hash256, output: var BlockHeader): bool =
|
2020-04-20 18:12:44 +00:00
|
|
|
let data = self.db.get(genericHashKey(blockHash).toOpenArray)
|
2018-09-06 17:05:22 +00:00
|
|
|
if data.len != 0:
|
|
|
|
output = rlp.decode(data, BlockHeader)
|
|
|
|
result = true
|
2018-08-03 11:56:49 +00:00
|
|
|
|
|
|
|
proc getBlockHeader*(self: BaseChainDB, blockHash: Hash256): BlockHeader =
|
|
|
|
## Returns the requested block header as specified by block hash.
|
|
|
|
##
|
|
|
|
## Raises BlockNotFound if it is not present in the db.
|
|
|
|
if not self.getBlockHeader(blockHash, result):
|
2018-05-30 16:11:15 +00:00
|
|
|
raise newException(BlockNotFound, "No block with hash " & blockHash.data.toHex)
|
|
|
|
|
2018-08-03 11:56:49 +00:00
|
|
|
proc getHash(self: BaseChainDB, key: DbKey, output: var Hash256): bool {.inline.} =
|
2020-04-20 18:12:44 +00:00
|
|
|
let data = self.db.get(key.toOpenArray)
|
2018-09-06 17:05:22 +00:00
|
|
|
if data.len != 0:
|
|
|
|
output = rlp.decode(data, Hash256)
|
2018-08-03 11:56:49 +00:00
|
|
|
result = true
|
2018-06-20 12:33:57 +00:00
|
|
|
|
|
|
|
proc getCanonicalHead*(self: BaseChainDB): BlockHeader =
|
2018-08-03 11:56:49 +00:00
|
|
|
var headHash: Hash256
|
|
|
|
if not self.getHash(canonicalHeadHashKey(), headHash) or
|
|
|
|
not self.getBlockHeader(headHash, result):
|
2018-06-20 12:33:57 +00:00
|
|
|
raise newException(CanonicalHeadNotFound,
|
|
|
|
"No canonical head set for this chain")
|
2018-01-17 12:57:50 +00:00
|
|
|
|
2020-07-22 16:51:26 +00:00
|
|
|
proc populateProgress*(self: BaseChainDB) =
|
|
|
|
try:
|
|
|
|
self.startingBlock = self.getCanonicalHead().blockNumber
|
|
|
|
except CanonicalHeadNotFound:
|
|
|
|
self.startingBlock = toBlockNumber(0)
|
|
|
|
|
|
|
|
self.currentBlock = self.startingBlock
|
|
|
|
self.highestBlock = self.startingBlock
|
|
|
|
|
2018-08-03 11:56:49 +00:00
|
|
|
proc getBlockHash*(self: BaseChainDB, n: BlockNumber, output: var Hash256): bool {.inline.} =
|
|
|
|
## Return the block hash for the given block number.
|
|
|
|
self.getHash(blockNumberToHashKey(n), output)
|
2018-05-30 16:11:15 +00:00
|
|
|
|
2018-08-03 11:56:49 +00:00
|
|
|
proc getBlockHash*(self: BaseChainDB, n: BlockNumber): Hash256 {.inline.} =
|
|
|
|
## Return the block hash for the given block number.
|
|
|
|
if not self.getHash(blockNumberToHashKey(n), result):
|
|
|
|
raise newException(BlockNotFound, "No block hash for number " & $n)
|
|
|
|
|
|
|
|
proc getBlockHeader*(self: BaseChainDB; n: BlockNumber, output: var BlockHeader): bool =
|
|
|
|
## Returns the block header with the given number in the canonical chain.
|
|
|
|
var blockHash: Hash256
|
|
|
|
if self.getBlockHash(n, blockHash):
|
|
|
|
result = self.getBlockHeader(blockHash, output)
|
|
|
|
|
|
|
|
proc getBlockHeader*(self: BaseChainDB; n: BlockNumber): BlockHeader =
|
|
|
|
## Returns the block header with the given number in the canonical chain.
|
|
|
|
## Raises BlockNotFound error if the block is not in the DB.
|
|
|
|
self.getBlockHeader(self.getBlockHash(n))
|
2018-01-17 12:57:50 +00:00
|
|
|
|
2019-04-05 09:29:05 +00:00
|
|
|
proc getScore*(self: BaseChainDB; blockHash: Hash256): Uint256 =
|
2020-04-20 18:12:44 +00:00
|
|
|
rlp.decode(self.db.get(blockHashToScoreKey(blockHash).toOpenArray), Uint256)
|
2018-01-17 12:57:50 +00:00
|
|
|
|
2019-09-09 05:27:17 +00:00
|
|
|
proc getAncestorsHashes*(self: BaseChainDB, limit: Uint256, header: BlockHeader): seq[Hash256] =
|
|
|
|
var ancestorCount = min(header.blockNumber, limit).truncate(int)
|
|
|
|
var h = header
|
|
|
|
|
|
|
|
result = newSeq[Hash256](ancestorCount)
|
|
|
|
while ancestorCount > 0:
|
|
|
|
h = self.getBlockHeader(h.parentHash)
|
|
|
|
result[ancestorCount - 1] = h.hash
|
|
|
|
dec ancestorCount
|
|
|
|
|
2018-06-20 12:33:57 +00:00
|
|
|
iterator findNewAncestors(self: BaseChainDB; header: BlockHeader): BlockHeader =
|
2018-05-30 16:11:15 +00:00
|
|
|
## Returns the chain leading up from the given header until the first ancestor it has in
|
|
|
|
## common with our canonical chain.
|
|
|
|
var h = header
|
2018-08-03 11:56:49 +00:00
|
|
|
var orig: BlockHeader
|
2018-05-30 16:11:15 +00:00
|
|
|
while true:
|
2018-08-03 11:56:49 +00:00
|
|
|
if self.getBlockHeader(h.blockNumber, orig) and orig.hash == h.hash:
|
|
|
|
break
|
2018-06-20 12:33:57 +00:00
|
|
|
|
2018-05-30 16:11:15 +00:00
|
|
|
yield h
|
2018-06-20 12:33:57 +00:00
|
|
|
|
2018-05-30 16:11:15 +00:00
|
|
|
if h.parentHash == GENESIS_PARENT_HASH:
|
|
|
|
break
|
2018-06-20 12:33:57 +00:00
|
|
|
else:
|
2018-08-03 11:56:49 +00:00
|
|
|
h = self.getBlockHeader(h.parentHash)
|
2018-06-20 12:33:57 +00:00
|
|
|
|
2018-11-28 02:25:36 +00:00
|
|
|
proc addBlockNumberToHashLookup*(self: BaseChainDB; header: BlockHeader) =
|
2018-07-05 12:41:01 +00:00
|
|
|
self.db.put(blockNumberToHashKey(header.blockNumber).toOpenArray,
|
2018-09-25 12:52:28 +00:00
|
|
|
rlp.encode(header.hash))
|
2018-06-20 12:33:57 +00:00
|
|
|
|
2020-07-30 07:21:11 +00:00
|
|
|
proc persistTransactions*(self: BaseChainDB, blockNumber:
|
2020-07-28 16:48:45 +00:00
|
|
|
BlockNumber, transactions: openArray[Transaction]): Hash256 =
|
2018-11-28 02:25:36 +00:00
|
|
|
var trie = initHexaryTrie(self.db)
|
|
|
|
for idx, tx in transactions:
|
|
|
|
let
|
2020-04-20 18:12:44 +00:00
|
|
|
encodedTx = rlp.encode(tx)
|
|
|
|
txHash = keccakHash(encodedTx)
|
2018-11-28 02:25:36 +00:00
|
|
|
txKey: TransactionKey = (blockNumber, idx)
|
2020-04-20 18:12:44 +00:00
|
|
|
trie.put(rlp.encode(idx), encodedTx)
|
2018-11-28 02:25:36 +00:00
|
|
|
self.db.put(transactionHashToBlockKey(txHash).toOpenArray, rlp.encode(txKey))
|
2020-07-30 07:21:11 +00:00
|
|
|
trie.rootHash
|
|
|
|
|
|
|
|
proc getTransaction*(self: BaseChainDB, txRoot: Hash256, txIndex: int, res: var Transaction): bool =
|
|
|
|
var db = initHexaryTrie(self.db, txRoot)
|
|
|
|
let txData = db.get(rlp.encode(txIndex))
|
|
|
|
if txData.len > 0:
|
|
|
|
res = rlp.decode(txData, Transaction)
|
|
|
|
result = true
|
2018-11-24 01:00:58 +00:00
|
|
|
|
2020-07-22 16:51:26 +00:00
|
|
|
iterator getBlockTransactionData*(self: BaseChainDB, transactionRoot: Hash256): seq[byte] =
|
2018-11-24 01:00:58 +00:00
|
|
|
var transactionDb = initHexaryTrie(self.db, transactionRoot)
|
|
|
|
var transactionIdx = 0
|
|
|
|
while true:
|
2020-04-20 18:12:44 +00:00
|
|
|
let transactionKey = rlp.encode(transactionIdx)
|
2018-11-24 01:00:58 +00:00
|
|
|
if transactionKey in transactionDb:
|
|
|
|
yield transactionDb.get(transactionKey)
|
|
|
|
else:
|
|
|
|
break
|
|
|
|
inc transactionIdx
|
|
|
|
|
2020-07-30 07:21:11 +00:00
|
|
|
iterator getBlockTransactions*(self: BaseChainDB, header: BlockHeader): Transaction =
|
|
|
|
for encodedTx in self.getBlockTransactionData(header.txRoot):
|
|
|
|
yield rlp.decode(encodedTx, Transaction)
|
|
|
|
|
|
|
|
iterator getBlockTransactionHashes*(self: BaseChainDB, blockHeader: BlockHeader): Hash256 =
|
2018-06-20 12:33:57 +00:00
|
|
|
## Returns an iterable of the transaction hashes from th block specified
|
|
|
|
## by the given block header.
|
2018-11-24 01:00:58 +00:00
|
|
|
for encodedTx in self.getBlockTransactionData(blockHeader.txRoot):
|
2020-04-20 18:12:44 +00:00
|
|
|
yield keccakHash(encodedTx)
|
2018-11-24 01:00:58 +00:00
|
|
|
|
2020-07-23 07:54:32 +00:00
|
|
|
proc getTransactionCount*(chain: BaseChainDB, txRoot: Hash256): int =
|
|
|
|
var trie = initHexaryTrie(chain.db, txRoot)
|
|
|
|
var txCount = 0
|
|
|
|
while true:
|
|
|
|
let txKey = rlp.encode(txCount)
|
|
|
|
if txKey notin trie:
|
|
|
|
break
|
|
|
|
inc txCount
|
2021-04-24 03:47:37 +00:00
|
|
|
txCount
|
2020-07-23 07:54:32 +00:00
|
|
|
|
|
|
|
proc getUnclesCount*(self: BaseChainDB, ommersHash: Hash256): int =
|
|
|
|
if ommersHash != EMPTY_UNCLE_HASH:
|
|
|
|
let encodedUncles = self.db.get(genericHashKey(ommersHash).toOpenArray)
|
|
|
|
if encodedUncles.len != 0:
|
|
|
|
let r = rlpFromBytes(encodedUncles)
|
|
|
|
result = r.listLen
|
2020-07-22 16:51:26 +00:00
|
|
|
|
2020-07-30 07:21:11 +00:00
|
|
|
proc getUncles*(self: BaseChainDB, ommersHash: Hash256): seq[BlockHeader] =
|
|
|
|
if ommersHash != EMPTY_UNCLE_HASH:
|
|
|
|
let encodedUncles = self.db.get(genericHashKey(ommersHash).toOpenArray)
|
|
|
|
if encodedUncles.len != 0:
|
|
|
|
result = rlp.decode(encodedUncles, seq[BlockHeader])
|
|
|
|
|
2018-11-24 01:00:58 +00:00
|
|
|
proc getBlockBody*(self: BaseChainDB, blockHash: Hash256, output: var BlockBody): bool =
|
|
|
|
var header: BlockHeader
|
|
|
|
if self.getBlockHeader(blockHash, header):
|
|
|
|
result = true
|
|
|
|
output.transactions = @[]
|
|
|
|
output.uncles = @[]
|
|
|
|
for encodedTx in self.getBlockTransactionData(header.txRoot):
|
|
|
|
output.transactions.add(rlp.decode(encodedTx, Transaction))
|
|
|
|
|
|
|
|
if header.ommersHash != EMPTY_UNCLE_HASH:
|
2020-04-20 18:12:44 +00:00
|
|
|
let encodedUncles = self.db.get(genericHashKey(header.ommersHash).toOpenArray)
|
2018-11-24 01:00:58 +00:00
|
|
|
if encodedUncles.len != 0:
|
|
|
|
output.uncles = rlp.decode(encodedUncles, seq[BlockHeader])
|
|
|
|
else:
|
|
|
|
result = false
|
2018-06-20 12:33:57 +00:00
|
|
|
|
2018-12-14 07:32:45 +00:00
|
|
|
proc getBlockBody*(self: BaseChainDB, hash: Hash256): BlockBody =
|
|
|
|
if not self.getBlockBody(hash, result):
|
|
|
|
raise newException(ValueError, "Error when retrieving block body")
|
|
|
|
|
2019-09-09 05:27:17 +00:00
|
|
|
proc getUncleHashes*(self: BaseChainDB, blockHashes: openArray[Hash256]): seq[Hash256] =
|
|
|
|
for blockHash in blockHashes:
|
|
|
|
var blockBody = self.getBlockBody(blockHash)
|
|
|
|
for uncle in blockBody.uncles:
|
|
|
|
result.add uncle.hash
|
|
|
|
|
2020-07-30 07:21:11 +00:00
|
|
|
proc getUncleHashes*(self: BaseChainDB, header: BlockHeader): seq[Hash256] =
|
|
|
|
if header.ommersHash != EMPTY_UNCLE_HASH:
|
|
|
|
let encodedUncles = self.db.get(genericHashKey(header.ommersHash).toOpenArray)
|
|
|
|
if encodedUncles.len != 0:
|
|
|
|
let uncles = rlp.decode(encodedUncles, seq[BlockHeader])
|
|
|
|
for x in uncles:
|
|
|
|
result.add x.hash
|
|
|
|
|
2018-08-17 18:52:48 +00:00
|
|
|
proc getTransactionKey*(self: BaseChainDB, transactionHash: Hash256): tuple[blockNumber: BlockNumber, index: int] {.inline.} =
|
2020-07-30 07:21:11 +00:00
|
|
|
let tx = self.db.get(transactionHashToBlockKey(transactionHash).toOpenArray)
|
|
|
|
|
|
|
|
if tx.len > 0:
|
|
|
|
let key = rlp.decode(tx, TransactionKey)
|
|
|
|
result = (key.blockNumber, key.index)
|
|
|
|
else:
|
|
|
|
result = (0.toBlockNumber, -1)
|
2018-06-20 12:33:57 +00:00
|
|
|
|
|
|
|
proc removeTransactionFromCanonicalChain(self: BaseChainDB, transactionHash: Hash256) {.inline.} =
|
|
|
|
## Removes the transaction specified by the given hash from the canonical chain.
|
2018-07-05 12:41:01 +00:00
|
|
|
self.db.del(transactionHashToBlockKey(transactionHash).toOpenArray)
|
2018-06-20 12:33:57 +00:00
|
|
|
|
|
|
|
proc setAsCanonicalChainHead(self: BaseChainDB; headerHash: Hash256): seq[BlockHeader] =
|
|
|
|
## Sets the header as the canonical chain HEAD.
|
2018-08-03 11:56:49 +00:00
|
|
|
let header = self.getBlockHeader(headerHash)
|
2018-06-20 12:33:57 +00:00
|
|
|
|
|
|
|
var newCanonicalHeaders = sequtils.toSeq(findNewAncestors(self, header))
|
|
|
|
reverse(newCanonicalHeaders)
|
|
|
|
for h in newCanonicalHeaders:
|
|
|
|
var oldHash: Hash256
|
2018-08-03 11:56:49 +00:00
|
|
|
if not self.getBlockHash(h.blockNumber, oldHash):
|
2018-06-20 12:33:57 +00:00
|
|
|
break
|
|
|
|
|
2018-08-03 11:56:49 +00:00
|
|
|
let oldHeader = self.getBlockHeader(oldHash)
|
2018-06-20 12:33:57 +00:00
|
|
|
for txHash in self.getBlockTransactionHashes(oldHeader):
|
|
|
|
self.removeTransactionFromCanonicalChain(txHash)
|
|
|
|
# TODO re-add txn to internal pending pool (only if local sender)
|
|
|
|
|
|
|
|
for h in newCanonicalHeaders:
|
|
|
|
self.addBlockNumberToHashLookup(h)
|
|
|
|
|
2018-09-25 12:52:28 +00:00
|
|
|
self.db.put(canonicalHeadHashKey().toOpenArray, rlp.encode(headerHash))
|
2018-07-05 12:41:01 +00:00
|
|
|
|
2018-06-20 12:33:57 +00:00
|
|
|
return newCanonicalHeaders
|
2018-05-30 16:11:15 +00:00
|
|
|
|
2019-01-01 03:55:40 +00:00
|
|
|
proc setHead*(self: BaseChainDB, header: BlockHeader, writeHeader = false) =
|
|
|
|
var headerHash = rlpHash(header)
|
|
|
|
if writeHeader:
|
|
|
|
self.db.put(genericHashKey(headerHash).toOpenArray, rlp.encode(header))
|
|
|
|
self.addBlockNumberToHashLookup(header)
|
|
|
|
self.db.put(canonicalHeadHashKey().toOpenArray, rlp.encode(headerHash))
|
|
|
|
|
2018-05-30 16:11:15 +00:00
|
|
|
proc headerExists*(self: BaseChainDB; blockHash: Hash256): bool =
|
|
|
|
## Returns True if the header with the given block hash is in our DB.
|
2018-09-06 17:05:22 +00:00
|
|
|
self.db.contains(genericHashKey(blockHash).toOpenArray)
|
2018-01-17 12:57:50 +00:00
|
|
|
|
2020-07-28 16:48:45 +00:00
|
|
|
proc persistReceipts*(self: BaseChainDB, receipts: openArray[Receipt]): Hash256 =
|
2018-12-10 12:04:34 +00:00
|
|
|
var trie = initHexaryTrie(self.db)
|
|
|
|
for idx, rec in receipts:
|
2020-04-20 18:12:44 +00:00
|
|
|
trie.put(rlp.encode(idx), rlp.encode(rec))
|
2020-07-28 16:48:45 +00:00
|
|
|
trie.rootHash
|
2018-12-10 12:04:34 +00:00
|
|
|
|
2021-04-24 03:51:05 +00:00
|
|
|
iterator getReceipts*(self: BaseChainDB; receiptRoot: Hash256): Receipt =
|
|
|
|
var receiptDb = initHexaryTrie(self.db, receiptRoot)
|
2018-07-05 12:41:01 +00:00
|
|
|
var receiptIdx = 0
|
|
|
|
while true:
|
2020-04-20 18:12:44 +00:00
|
|
|
let receiptKey = rlp.encode(receiptIdx)
|
2018-07-05 12:41:01 +00:00
|
|
|
if receiptKey in receiptDb:
|
|
|
|
let receiptData = receiptDb.get(receiptKey)
|
2018-12-10 12:04:34 +00:00
|
|
|
yield rlp.decode(receiptData, Receipt)
|
2018-07-05 12:41:01 +00:00
|
|
|
else:
|
|
|
|
break
|
|
|
|
inc receiptIdx
|
2018-04-06 14:52:10 +00:00
|
|
|
|
2018-06-20 12:33:57 +00:00
|
|
|
proc persistHeaderToDb*(self: BaseChainDB; header: BlockHeader): seq[BlockHeader] =
|
|
|
|
let isGenesis = header.parentHash == GENESIS_PARENT_HASH
|
2018-09-06 17:05:22 +00:00
|
|
|
let headerHash = header.blockHash
|
2018-06-20 12:33:57 +00:00
|
|
|
if not isGenesis and not self.headerExists(header.parentHash):
|
|
|
|
raise newException(ParentNotFound, "Cannot persist block header " &
|
2018-09-06 17:05:22 +00:00
|
|
|
$headerHash & " with unknown parent " & $header.parentHash)
|
2018-09-25 12:52:28 +00:00
|
|
|
self.db.put(genericHashKey(headerHash).toOpenArray, rlp.encode(header))
|
2018-07-05 12:41:01 +00:00
|
|
|
|
2018-06-20 12:33:57 +00:00
|
|
|
let score = if isGenesis: header.difficulty
|
2019-04-05 09:29:05 +00:00
|
|
|
else: self.getScore(header.parentHash) + header.difficulty
|
2018-09-25 12:52:28 +00:00
|
|
|
self.db.put(blockHashToScoreKey(headerHash).toOpenArray, rlp.encode(score))
|
2018-07-05 12:41:01 +00:00
|
|
|
|
2018-08-01 12:50:44 +00:00
|
|
|
self.addBlockNumberToHashLookup(header)
|
|
|
|
|
2019-04-05 09:29:05 +00:00
|
|
|
var headScore: Uint256
|
2018-06-20 12:33:57 +00:00
|
|
|
try:
|
|
|
|
headScore = self.getScore(self.getCanonicalHead().hash)
|
|
|
|
except CanonicalHeadNotFound:
|
2018-09-06 17:05:22 +00:00
|
|
|
return self.setAsCanonicalChainHead(headerHash)
|
2018-06-20 12:33:57 +00:00
|
|
|
|
2019-04-05 09:29:05 +00:00
|
|
|
if score > headScore:
|
2018-09-06 17:05:22 +00:00
|
|
|
result = self.setAsCanonicalChainHead(headerHash)
|
2018-06-20 12:33:57 +00:00
|
|
|
|
|
|
|
proc persistUncles*(self: BaseChainDB, uncles: openarray[BlockHeader]): Hash256 =
|
|
|
|
## Persists the list of uncles to the database.
|
|
|
|
## Returns the uncles hash.
|
|
|
|
let enc = rlp.encode(uncles)
|
2019-03-07 15:53:09 +00:00
|
|
|
result = keccakHash(enc)
|
2018-09-25 12:52:28 +00:00
|
|
|
self.db.put(genericHashKey(result).toOpenArray, enc)
|