nimbus-eth1/nimbus/db/geth_db.nim
Jordan Hrycaj 221e6c9e2f
Unified database frontend integration (#1670)
* Nimbus folder environment update

details:
* Integrated `CoreDbRef` for the sources in the `nimbus` sub-folder.
* The `nimbus` program does not compile yet as it needs the updates
  in the parallel `stateless` sub-folder.

* Stateless environment update

details:
* Integrated `CoreDbRef` for the sources in the `stateless` sub-folder.
* The `nimbus` program compiles now.

* Premix environment update

details:
* Integrated `CoreDbRef` for the sources in the `premix` sub-folder.

* Fluffy environment update

details:
* Integrated `CoreDbRef` for the sources in the `fluffy` sub-folder.

* Tools environment update

details:
* Integrated `CoreDbRef` for the sources in the `tools` sub-folder.

* Nodocker environment update

details:
* Integrated `CoreDbRef` for the sources in the
  `hive_integration/nodocker` sub-folder.

* Tests environment update

details:
* Integrated `CoreDbRef` for the sources in the `tests` sub-folder.
* The unit tests compile and run cleanly now.

* Generalise `CoreDbRef` to any `select_backend` supported database

why:
  Generalisation was just missed due to overcoming some compiler oddity
  which was tied to rocksdb for testing.

* Suppress compiler warning for `newChainDB()`

why:
  Warning was added to this function which must be wrapped so that
  any `CatchableError` is re-raised as `Defect`.

* Split off persistent `CoreDbRef` constructor into separate file

why:
  This allows to compile a memory only database version without linking
  the backend library.

* Use memory `CoreDbRef` database by default

detail:
 Persistent DB constructor needs to import `db/core_db/persistent

why:
 Most tests use memory DB anyway. This avoids linking `-lrocksdb` or
 any other backend by default.

* fix `toLegacyBackend()` availability check

why:
  got garbled after memory/persistent split.

* Clarify raw access to MPT for snap sync handler

why:
  Logically, `kvt` is not the raw access for the hexary trie (although
  this holds for the legacy database)
2023-08-04 12:10:09 +01:00

36 lines
1.2 KiB
Nim

import eth/[rlp, common], core_db
const
headerPrefix = 'h'.byte # headerPrefix + num (uint64 big endian) + hash -> header
headerHashSuffix = 'n'.byte # headerPrefix + num (uint64 big endian) + headerHashSuffix -> hash
blockBodyPrefix = 'b'.byte # blockBodyPrefix + num (uint64 big endian) + hash -> block body
proc headerHash*(db: CoreDbRef, number: uint64): Hash256 =
var key: array[10, byte]
key[0] = headerPrefix
key[1..8] = toBytesBE(number)[0..^1]
key[^1] = headerHashSuffix
let res = db.kvt.get(key)
doAssert(res.len == 32)
result.data[0..31] = res[0..31]
proc blockHeader*(db: CoreDbRef, hash: Hash256, number: uint64): BlockHeader =
var key: array[41, byte]
key[0] = headerPrefix
key[1..8] = toBytesBE(number)[0..^1]
key[9..40] = hash.data[0..^1]
let res = db.kvt.get(key)
result = rlp.decode(res, BlockHeader)
proc blockHeader*(db: CoreDbRef, number: uint64): BlockHeader =
let hash = db.headerHash(number)
db.blockHeader(hash, number)
proc blockBody*(db: CoreDbRef, hash: Hash256, number: uint64): BlockBody =
var key: array[41, byte]
key[0] = blockBodyPrefix
key[1..8] = toBytesBE(number)[0..^1]
key[9..40] = hash.data[0..^1]
let res = db.kvt.get(key)
result = rlp.decode(res, BlockBody)