mirror of
https://github.com/status-im/nimbus-eth1.git
synced 2025-01-12 13:24:21 +00:00
Remove hunter
(#2697)
* Remove `hunter` why: Neither functional anymore, nor used * Remove obsolete premix * Remove obsolete launcher --------- Co-authored-by: jangko <jangko128@gmail.com>
This commit is contained in:
parent
6565544d35
commit
3822c57ddc
5
Makefile
5
Makefile
@ -60,13 +60,10 @@ EXCLUDED_NIM_PACKAGES := \
|
||||
# debugging tools + testing tools
|
||||
TOOLS := \
|
||||
test_tools_build \
|
||||
persist \
|
||||
hunter \
|
||||
nrpc
|
||||
TOOLS_DIRS := \
|
||||
nrpc \
|
||||
tests \
|
||||
premix
|
||||
tests
|
||||
# comma-separated values for the "clean" target
|
||||
TOOLS_CSV := $(subst $(SPACE),$(COMMA),$(TOOLS))
|
||||
|
||||
|
@ -21,7 +21,7 @@ NIM_FLAGS="c -d:release"
|
||||
|
||||
echo "## ${1}" > simulators.md
|
||||
|
||||
# more suites: engine, graphql, rpc
|
||||
# more suites: graphql, rpc
|
||||
suites=(consensus pyspec engine)
|
||||
for suite in "${suites[@]}"
|
||||
do
|
||||
|
@ -9,9 +9,11 @@
|
||||
# according to those terms.
|
||||
|
||||
import
|
||||
eth/common/eth_types,
|
||||
eth/common,
|
||||
eth/common/hashes,
|
||||
stint,
|
||||
kzg4844/kzg,
|
||||
kzg4844/kzg_abi,
|
||||
stew/endians2,
|
||||
nimcrypto/sha2,
|
||||
results,
|
||||
@ -138,9 +140,9 @@ proc generateBlob(blobid: BlobID): BlobCommitment =
|
||||
doAssert(false, res.error)
|
||||
result.commitment = res.get
|
||||
|
||||
proc getVersionedHash*(blobid: BlobID, commitmentVersion: byte): Hash256 =
|
||||
proc getVersionedHash*(blobid: BlobID, commitmentVersion: byte): Hash32 =
|
||||
let res = blobid.generateBlob()
|
||||
result = sha256.digest(res.commitment.bytes)
|
||||
result = Hash32 sha256.digest(res.commitment.bytes).data
|
||||
result.data[0] = commitmentVersion
|
||||
|
||||
proc blobDataGenerator*(startBlobId: BlobID, blobCount: int): BlobTxWrapData =
|
||||
|
@ -126,7 +126,7 @@ method getPayloadAttributes(cust: BasePayloadAttributesCustomizer, basePayloadAt
|
||||
customPayloadAttributes.timestamp = w3Qty cust.timestamp.get
|
||||
|
||||
if cust.prevRandao.isSome:
|
||||
customPayloadAttributes.prevRandao = w3Hash cust.prevRandao.get
|
||||
customPayloadAttributes.prevRandao = cust.prevRandao.get
|
||||
|
||||
if cust.suggestedFeeRecipient.isSome:
|
||||
customPayloadAttributes.suggestedFeeRecipient = w3Addr cust.suggestedFeeRecipient.get
|
||||
@ -338,7 +338,7 @@ func getTimestamp*(cust: CustomPayloadData, basePayload: ExecutionPayload): uint
|
||||
proc customizePayload*(cust: CustomPayloadData, data: ExecutableData): ExecutableData {.gcsafe.} =
|
||||
var customHeader = blockHeader(data.basePayload, beaconRoot = data.beaconRoot)
|
||||
if cust.transactions.isSome:
|
||||
customHeader.txRoot = calcTxRoot(cust.transactions.get)
|
||||
customHeader.transactionsRoot = calcTxRoot(cust.transactions.get)
|
||||
|
||||
# Overwrite custom information
|
||||
if cust.parentHash.isSome:
|
||||
@ -533,13 +533,13 @@ type
|
||||
ExtraVersionedHashes
|
||||
InvalidWithdrawals
|
||||
|
||||
func scramble(data: Web3Hash): Opt[common.Hash256] =
|
||||
func scramble(data: Web3Hash): Opt[Hash32] =
|
||||
var h = ethHash data
|
||||
h.data[^1] = byte(255 - h.data[^1])
|
||||
Opt.some(h)
|
||||
|
||||
func scramble(data: common.Hash256): Opt[common.Hash256] =
|
||||
var h = data
|
||||
func scramble(data: Bytes32): Opt[Hash32] =
|
||||
var h = Hash32 data
|
||||
h.data[0] = byte(255 - h.data[0])
|
||||
Opt.some(h)
|
||||
|
||||
@ -585,9 +585,9 @@ proc generateInvalidPayload*(sender: TxSender, data: ExecutableData, payloadFiel
|
||||
of InvalidPrevRandao:
|
||||
# This option potentially requires a transaction that uses the PREVRANDAO opcode.
|
||||
# Otherwise the payload will still be valid.
|
||||
let randomHash = common.Hash256.randomBytes()
|
||||
let randomHash = common.Hash32.randomBytes()
|
||||
customPayloadMod = CustomPayloadData(
|
||||
prevRandao: Opt.some(randomHash),
|
||||
prevRandao: Opt.some(Bytes32 randomHash.data),
|
||||
)
|
||||
of InvalidParentBeaconBlockRoot:
|
||||
doAssert(data.beaconRoot.isSome,
|
||||
|
@ -654,10 +654,10 @@ proc produceSingleBlock*(cl: CLMocker, cb: BlockProcessCallbacks): bool {.gcsafe
|
||||
return false
|
||||
|
||||
# mixHash == prevRandao
|
||||
if newHeader.mixHash != cl.prevRandaoHistory[cl.latestHeadNumber]:
|
||||
if newHeader.mixHash != Bytes32 cl.prevRandaoHistory[cl.latestHeadNumber]:
|
||||
error "CLMocker: Client produced a new header with incorrect mixHash",
|
||||
get = newHeader.mixHash.data.toHex,
|
||||
expect = cl.prevRandaoHistory[cl.latestHeadNumber].data.toHex
|
||||
get = newHeader.mixHash,
|
||||
expect = cl.prevRandaoHistory[cl.latestHeadNumber]
|
||||
return false
|
||||
|
||||
# nonce == 0x0000000000000000
|
||||
|
@ -23,6 +23,7 @@ import
|
||||
core/tx_pool/tx_item,
|
||||
core/block_import,
|
||||
rpc,
|
||||
sync/handlers,
|
||||
beacon/beacon_engine,
|
||||
beacon/web3_eth_conv,
|
||||
common
|
||||
|
@ -198,8 +198,8 @@ proc makeTxOfType(params: MakeTxParams, tc: BaseTx): PooledTransaction =
|
||||
),
|
||||
networkPayload: NetworkPayload(
|
||||
blobs: blobData.blobs.mapIt(it.bytes),
|
||||
commitments: blobData.commitments.mapIt(it.bytes),
|
||||
proofs: blobData.proofs.mapIt(it.bytes),
|
||||
commitments: blobData.commitments.mapIt(KzgCommitment it.bytes),
|
||||
proofs: blobData.proofs.mapIt(KzgProof it.bytes),
|
||||
)
|
||||
)
|
||||
else:
|
||||
@ -342,8 +342,8 @@ proc makeTx*(params: MakeTxParams, tc: BlobTx): PooledTransaction =
|
||||
tx: signTransaction(unsignedTx, params.key),
|
||||
networkPayload: NetworkPayload(
|
||||
blobs : data.blobs.mapIt(it.bytes),
|
||||
commitments: data.commitments.mapIt(it.bytes),
|
||||
proofs : data.proofs.mapIt(it.bytes),
|
||||
commitments: data.commitments.mapIt(KzgCommitment it.bytes),
|
||||
proofs : data.proofs.mapIt(KzgProof it.bytes),
|
||||
),
|
||||
)
|
||||
|
||||
|
@ -79,10 +79,10 @@ const ZeroAddr* = default(EthAddress)
|
||||
func toHash*(x: UInt256): common.Hash256 =
|
||||
common.Hash32(x.toByteArrayBE)
|
||||
|
||||
func timestampToBeaconRoot*(timestamp: Quantity): Web3FixedBytes[32] =
|
||||
func timestampToBeaconRoot*(timestamp: Quantity): Hash32 =
|
||||
# Generates a deterministic hash from the timestamp
|
||||
let h = sha2.sha256.digest(timestamp.uint64.toBytesBE)
|
||||
Web3FixedBytes[32](h.data)
|
||||
Hash32(h.data)
|
||||
|
||||
proc randomBytes*(_: type common.Hash256): common.Hash256 =
|
||||
doAssert randomBytes(result.data) == 32
|
||||
|
@ -158,12 +158,6 @@ proc persistBlocksImpl(
|
||||
skipUncles = NoPersistUncles in flags,
|
||||
)
|
||||
|
||||
# when defined(nimbusDumpDebuggingMetaData):
|
||||
# if validationResult == ValidationResult.Error and
|
||||
# body.transactions.calcTxRoot == header.txRoot:
|
||||
# vmState.dumpDebuggingMetaData(header, body)
|
||||
# warn "Validation error. Debugging metadata dumped."
|
||||
|
||||
let blockHash = header.blockHash()
|
||||
if NoPersistHeader notin flags:
|
||||
if not c.db.persistHeader(
|
||||
|
@ -1,57 +0,0 @@
|
||||
# Nimbus
|
||||
# Copyright (c) 2019-2023 Status Research & Development GmbH
|
||||
# Licensed under either of
|
||||
# * Apache License, version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or
|
||||
# http://www.apache.org/licenses/LICENSE-2.0)
|
||||
# * MIT license ([LICENSE-MIT](LICENSE-MIT) or
|
||||
# http://opensource.org/licenses/MIT)
|
||||
# at your option. This file may not be copied, modified, or distributed except
|
||||
# according to those terms.
|
||||
|
||||
import os, osproc, json
|
||||
|
||||
when defined(windows):
|
||||
const
|
||||
premixExecutable = "premix.exe"
|
||||
browserLauncher = "cmd /c start"
|
||||
elif defined(macos):
|
||||
const
|
||||
premixExecutable = "premix"
|
||||
browserLauncher = "open"
|
||||
else:
|
||||
const
|
||||
premixExecutable = "premix"
|
||||
browserLauncher = "xdg-open"
|
||||
|
||||
proc getFileDir*(file: string): string =
|
||||
var searchDirs = [
|
||||
"." ,
|
||||
"." / "build" ,
|
||||
"." / "premix"
|
||||
]
|
||||
|
||||
for dir in searchDirs:
|
||||
if fileExists(dir / file):
|
||||
return dir
|
||||
|
||||
result = ""
|
||||
|
||||
proc getFilePath(file: string): string =
|
||||
let dir = getFileDir(file)
|
||||
if dir.len > 0:
|
||||
return dir / file
|
||||
else:
|
||||
return ""
|
||||
|
||||
proc launchPremix*(fileName: string, metaData: JsonNode) =
|
||||
let premixExe = getFilePath(premixExecutable)
|
||||
|
||||
writeFile(fileName, metaData.pretty)
|
||||
|
||||
if premixExe.len > 0:
|
||||
if execCmd(premixExe & " " & fileName) == 0:
|
||||
if execCmd(browserLauncher & " " & getFilePath("index.html")) != 0:
|
||||
echo "failed to launch default browser"
|
||||
else:
|
||||
echo "failed to execute the premix debugging tool"
|
||||
|
@ -23,7 +23,6 @@ import
|
||||
./db/[core_db, ledger],
|
||||
./evm/[code_bytes, state, types],
|
||||
./evm/tracer/legacy_tracer,
|
||||
./launcher,
|
||||
./transaction,
|
||||
./utils/utils
|
||||
|
||||
@ -352,44 +351,6 @@ proc traceTransactionsImpl(
|
||||
result.add traceTransactionImpl(
|
||||
com, header, transactions, i.uint64, {DisableState})
|
||||
|
||||
|
||||
proc dumpDebuggingMetaDataImpl(
|
||||
vmState: BaseVMState;
|
||||
blk: EthBlock;
|
||||
launchDebugger = true;
|
||||
) {.raises: [CatchableError].} =
|
||||
template header: Header = blk.header
|
||||
|
||||
let
|
||||
cc = activate CaptCtxRef.init(vmState.com, header)
|
||||
blockNumber = header.number
|
||||
bloom = createBloom(vmState.receipts)
|
||||
|
||||
defer: cc.release()
|
||||
|
||||
let blockSummary = %{
|
||||
"receiptsRoot": %("0x" & toHex(calcReceiptsRoot(vmState.receipts).data)),
|
||||
"stateRoot": %("0x" & toHex(vmState.stateDB.rootHash.data)),
|
||||
"logsBloom": %("0x" & toHex(bloom))
|
||||
}
|
||||
|
||||
var metaData = %{
|
||||
"blockNumber": %blockNumber.toHex,
|
||||
"txTraces": traceTransactionsImpl(vmState.com, header, blk.transactions),
|
||||
"stateDump": dumpBlockStateImpl(vmState.com, blk),
|
||||
"blockTrace": traceBlockImpl(vmState.com, blk, {DisableState}),
|
||||
"receipts": toJson(vmState.receipts),
|
||||
"block": blockSummary
|
||||
}
|
||||
|
||||
metaData.dumpMemoryDB(cc.cpt)
|
||||
|
||||
let jsonFileName = "debug" & $blockNumber & ".json"
|
||||
if launchDebugger:
|
||||
launchPremix(jsonFileName, metaData)
|
||||
else:
|
||||
writeFile(jsonFileName, metaData.pretty())
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
# Public functions
|
||||
# ------------------------------------------------------------------------------
|
||||
@ -443,14 +404,6 @@ proc traceTransactions*(
|
||||
"traceTransactions".safeTracer:
|
||||
result = com.traceTransactionsImpl(header, transactions)
|
||||
|
||||
proc dumpDebuggingMetaData*(
|
||||
vmState: BaseVMState;
|
||||
blk: EthBlock;
|
||||
launchDebugger = true;
|
||||
) =
|
||||
"dumpDebuggingMetaData".safeTracer:
|
||||
vmState.dumpDebuggingMetaDataImpl(blk, launchDebugger)
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
# End
|
||||
# ------------------------------------------------------------------------------
|
||||
|
@ -105,6 +105,12 @@ proc short*(h: Hash256): string =
|
||||
bytes[^3..^1] = h.data[^3..^1]
|
||||
bytes.toHex
|
||||
|
||||
proc short*(h: Bytes32): string =
|
||||
var bytes: array[6, byte]
|
||||
bytes[0..2] = h.data[0..2]
|
||||
bytes[^3..^1] = h.data[^3..^1]
|
||||
bytes.toHex
|
||||
|
||||
func short*(x: Duration): string =
|
||||
let parts = x.toParts
|
||||
if parts[Hours] > 0:
|
||||
|
8
premix/.gitignore
vendored
8
premix/.gitignore
vendored
@ -1,8 +0,0 @@
|
||||
*.db
|
||||
*.db-lock
|
||||
/output
|
||||
/temp
|
||||
/data
|
||||
/nimcache
|
||||
*.json
|
||||
premixData.js
|
1
premix/assets/css/uikit.min.css
vendored
1
premix/assets/css/uikit.min.css
vendored
File diff suppressed because one or more lines are too long
Binary file not shown.
Before Width: | Height: | Size: 189 KiB |
@ -1,520 +0,0 @@
|
||||
var premix = function() {
|
||||
function chunkSubstr(str, size) {
|
||||
const numChunks = Math.ceil(str.length / size)
|
||||
const chunks = new Array(numChunks)
|
||||
|
||||
for (let i = 0, o = 0; i < numChunks; ++i, o += size) {
|
||||
chunks[i] = str.substr(o, size)
|
||||
}
|
||||
|
||||
return chunks
|
||||
}
|
||||
|
||||
function split32(text) {
|
||||
if(text.length > 32) {
|
||||
let chunks = chunkSubstr(text, 32);
|
||||
let result = "";
|
||||
for(var x of chunks) {
|
||||
result += '<div>'+x+'</div>';
|
||||
}
|
||||
return result;
|
||||
} else {
|
||||
return text;
|
||||
}
|
||||
}
|
||||
|
||||
function renderCells(row, cls, cells) {
|
||||
for(var text of cells) {
|
||||
let cell = $(`<td ${cls}>${split32(text)}</td>`).appendTo(row);
|
||||
if(text.length > 32) cell.addClass('tm-monospace-cell');
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
fields: ['op', 'pc', 'gas', 'gasCost', 'depth'],
|
||||
|
||||
newTable: function(container) {
|
||||
let table = $('<table class="uk-table uk-table-divider"/>').appendTo(container);
|
||||
$('<thead><tr><th>Field</th><th>Nimbus</th><th>Geth</th></tr></thead>').appendTo(table);
|
||||
return $('<tbody></tbody>').appendTo(table);
|
||||
},
|
||||
|
||||
renderRow: function(body, nimbus, geth, x) {
|
||||
let row = $('<tr/>').appendTo(body);
|
||||
let ncr = nimbus instanceof Object ? nimbus[x].toString().toLowerCase() : nimbus;
|
||||
let gcr = geth instanceof Object ? geth[x].toString().toLowerCase() : geth;
|
||||
let cls = ncr == gcr ? '' : 'class="uk-text-danger"';
|
||||
renderCells(row, cls, [x, ncr, gcr]);
|
||||
},
|
||||
|
||||
newSection: function(container, title, colored) {
|
||||
let section = $('<div class="uk-section uk-section-xsmall tm-horizontal-overflow"></div>').appendTo(container);
|
||||
section.addClass(colored ? "uk-section-secondary uk-light" : "uk-section-muted");
|
||||
let contentDiv = $('<div class="uk-container uk-margin-small-left uk-margin-small-right"></div>').appendTo(section);
|
||||
$(`<h4>${title}</h4>`).appendTo(contentDiv);
|
||||
return contentDiv;
|
||||
}
|
||||
|
||||
};
|
||||
}();
|
||||
|
||||
function deepCopy(src) {
|
||||
return JSON.parse(JSON.stringify(src));
|
||||
}
|
||||
|
||||
function windowResize() {
|
||||
let bodyHeight = $(window).height();
|
||||
$('#opCodeSideBar').css('height', parseInt(bodyHeight) - 80);
|
||||
}
|
||||
|
||||
function renderTrace(title, nimbus, geth) {
|
||||
let container = $('#opCodeContainer').empty();
|
||||
let body = premix.newTable(container);
|
||||
for(var x of premix.fields) {
|
||||
premix.renderRow(body, nimbus, geth, x);
|
||||
}
|
||||
|
||||
if(nimbus.error) {
|
||||
geth.error = '';
|
||||
premix.renderRow(body, nimbus, geth, 'error');
|
||||
}
|
||||
|
||||
function renderExtra(name) {
|
||||
let nk = Object.keys(nimbus[name]);
|
||||
let gk = Object.keys(geth[name]);
|
||||
let keys = new Set(nk.concat(gk));
|
||||
|
||||
if(keys.size > 0) {
|
||||
let section = premix.newSection(container, name);
|
||||
let body = premix.newTable(section);
|
||||
for(var key of keys) {
|
||||
premix.renderRow(body, nimbus[name], geth[name], key);
|
||||
}
|
||||
$('<hr class="uk-divider-icon">').appendTo(container);
|
||||
}
|
||||
}
|
||||
|
||||
renderExtra("memory");
|
||||
renderExtra("stack");
|
||||
renderExtra("storage");
|
||||
}
|
||||
|
||||
function opCodeRenderer(txId, nimbus, geth) {
|
||||
function analyzeList(nimbus, geth) {
|
||||
for(var i in nimbus) {
|
||||
if(nimbus[i].toString().toLowerCase() != geth[i].toString().toLowerCase()) return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
function fillEmptyList(a, b) {
|
||||
if(a.length > b.length) {
|
||||
for(var i in a) {
|
||||
if(b[i] === undefined) {
|
||||
b[i] = '';
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function fillEmptyMap(a, b) {
|
||||
if(Object.keys(a).length > Object.keys(b).length) {
|
||||
for(var i in a) {
|
||||
if(b[i] === undefined) {
|
||||
b[i] = '';
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function fillEmptyField(nimbus, geth) {
|
||||
if(nimbus.memory === undefined) {
|
||||
nimbus.memory = [];
|
||||
}
|
||||
if(geth.memory === undefined) {
|
||||
geth.memory = [];
|
||||
}
|
||||
if(nimbus.stack === undefined) {
|
||||
nimbus.stack = [];
|
||||
}
|
||||
if(geth.stack === undefined) {
|
||||
geth.stack = [];
|
||||
}
|
||||
if(nimbus.storage === undefined) {
|
||||
nimbus.storage = {};
|
||||
}
|
||||
if(geth.storage === undefined) {
|
||||
geth.storage = {};
|
||||
}
|
||||
fillEmptyList(nimbus.memory, geth.memory);
|
||||
fillEmptyList(geth.memory, nimbus.memory);
|
||||
|
||||
fillEmptyList(nimbus.stack, geth.stack);
|
||||
fillEmptyList(geth.stack, nimbus.stack);
|
||||
|
||||
fillEmptyMap(nimbus.storage, geth.storage);
|
||||
fillEmptyMap(geth.storage, nimbus.storage);
|
||||
}
|
||||
|
||||
function moveStack(ncs, gcs, i) {
|
||||
let idx = parseInt(i);
|
||||
ncs[idx-1].stack = deepCopy(ncs[idx].stack);
|
||||
gcs[idx-1].stack = deepCopy(gcs[idx].stack);
|
||||
}
|
||||
|
||||
function analyze(nimbus, geth) {
|
||||
for(var x of premix.fields) {
|
||||
if(nimbus[x] === undefined) nimbus[x] = '';
|
||||
if(geth[x] === undefined) geth[x] = '';
|
||||
if(nimbus[x].toString().toLowerCase() != geth[x].toString().toLowerCase()) return false;
|
||||
}
|
||||
|
||||
let result = analyzeList(nimbus.memory, geth.memory);
|
||||
result = result && analyzeList(nimbus.stack, geth.stack);
|
||||
result = result && analyzeList(nimbus.storage, geth.storage);
|
||||
return result;
|
||||
}
|
||||
|
||||
txId = parseInt(txId);
|
||||
$('#opCodeTitle').text(`Tx #${(txId+1)}`);
|
||||
const numRows = Math.max(nimbus.txTraces[txId].structLogs.length, geth.txTraces[txId].structLogs.length);
|
||||
|
||||
if(numRows == 0) {
|
||||
$('#opCodeContainer').empty();
|
||||
$('#paging').empty();
|
||||
$('#opCodeSideBar').empty();
|
||||
return;
|
||||
}
|
||||
|
||||
const rowsPerPage = 500;
|
||||
var numPages = numRows / rowsPerPage;
|
||||
if(numRows % rowsPerPage != 0) numPages++;
|
||||
|
||||
$("#paging").paging(numRows, {
|
||||
format: numPages < 10 ? "n".repeat(numPages) : '[< (qq -) nnncnnn (- pp) >]',
|
||||
perpage: rowsPerPage,
|
||||
lapping: 1,
|
||||
page: 1,
|
||||
onSelect: function (page) {
|
||||
const data = this.slice;
|
||||
const start = data[0];
|
||||
const stop = data[1];
|
||||
|
||||
var ncs = deepCopy(nimbus.txTraces[txId].structLogs.slice(start, stop));
|
||||
var gcs = deepCopy(geth.txTraces[txId].structLogs.slice(start, stop));
|
||||
var sideBar = $('#opCodeSideBar').empty();
|
||||
|
||||
function fillEmptyOp(a, b) {
|
||||
function emptyOp() {
|
||||
return {op: '', pc: '', gas: '', gasCost: '', depth: '',
|
||||
storage:{}, memory: [], stack: []};
|
||||
}
|
||||
|
||||
if(a.length > b.length) {
|
||||
for(var i in a) {
|
||||
if(b[i] === undefined) {
|
||||
b[i] = emptyOp();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fillEmptyOp(ncs, gcs);
|
||||
fillEmptyOp(gcs, ncs);
|
||||
|
||||
for(var i in ncs) {
|
||||
fillEmptyField(ncs[i], gcs[i]);
|
||||
if(parseInt(i) > 0) {
|
||||
moveStack(ncs, gcs, i);
|
||||
}
|
||||
}
|
||||
|
||||
for(var i in ncs) {
|
||||
let pc = ncs[i].pc == '' ? gcs[i].pc : ncs[i].pc;
|
||||
let op = ncs[i].op == '' ? gcs[i].op : ncs[i].op;
|
||||
if(!analyze(ncs[i], gcs[i])) {
|
||||
var nav = $(`<li><a class="tm-text-danger" rel="${i}" href="#">${pc + ' ' + op}</a></li>`).appendTo(sideBar);
|
||||
} else {
|
||||
var nav = $(`<li><a rel="${i}" href="#">${pc + ' ' + op}</a></li>`).appendTo(sideBar);
|
||||
}
|
||||
nav.children('a').click(function(ev) {
|
||||
let idx = this.rel;
|
||||
$('#opCodeSideBar li').removeClass('uk-active');
|
||||
$(this).parent().addClass('uk-active');
|
||||
renderTrace('tx', ncs[idx], gcs[idx]);
|
||||
});
|
||||
}
|
||||
|
||||
if(ncs.length > 0) {
|
||||
renderTrace("tx", ncs[0], gcs[0]);
|
||||
} else {
|
||||
$('#opCodeContainer').empty();
|
||||
}
|
||||
|
||||
},
|
||||
onFormat: function (type) {
|
||||
switch (type) {
|
||||
case 'block': // n and c
|
||||
if (this.value == this.page) {
|
||||
return '<li class="uk-active"><span>' + this.value + '</span></li>';
|
||||
} else {
|
||||
return '<li><a href="#">' + this.value + '</a></li>';
|
||||
}
|
||||
case 'next': // >
|
||||
return '<li><a href="#"><span uk-pagination-next></span></a></li>';
|
||||
case 'prev': // <
|
||||
return '<li><a href="#"><span uk-pagination-previous></span></a></li>';
|
||||
case 'first': // [
|
||||
return '<li><a href="#">first</a></li>';
|
||||
case 'last': // ]
|
||||
return '<li><a href="#">last</a></li>';
|
||||
case "leap":
|
||||
return " ";
|
||||
case 'fill':
|
||||
return '<li class="uk-disabled"><span>...</span></li>';
|
||||
case 'left':
|
||||
if(this.value >= this.page) return '';
|
||||
return '<li><a href="#">' + this.value + '</a></li>';
|
||||
case 'right':
|
||||
if(this.value <= this.page) return '';
|
||||
return '<li><a href="#">' + this.value + '</a></li>';
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
windowResize();
|
||||
}
|
||||
|
||||
function transactionsRenderer(txId, nimbus, geth) {
|
||||
txId = parseInt(txId);
|
||||
$('#transactionsTitle').text(`Tx #${(txId+1)}`);
|
||||
let container = $('#transactionsContainer').empty();
|
||||
|
||||
function renderTx(nimbus, geth) {
|
||||
let body = premix.newTable(container);
|
||||
const fields = ["gas", "returnValue", "cumulativeGasUsed", "bloom"];
|
||||
for(var x of fields) {
|
||||
premix.renderRow(body, nimbus, geth, x);
|
||||
}
|
||||
$('<hr class="uk-divider-icon">').appendTo(container);
|
||||
|
||||
if(nimbus.root || geth.root) {
|
||||
if(geth.root === undefined) geth.root = '';
|
||||
if(nimbus.root == undefined) nimbus.root = '';
|
||||
premix.renderRow(body, nimbus, geth, 'root');
|
||||
}
|
||||
|
||||
if(nimbus.status || geth.status) {
|
||||
if(geth.status === undefined) geth.status = '';
|
||||
if(nimbus.status == undefined) nimbus.status = '';
|
||||
premix.renderRow(body, nimbus, geth, 'status');
|
||||
}
|
||||
|
||||
function fillEmptyLogs(a, b) {
|
||||
function emptyLog() {
|
||||
return {address: '', topics: [], data: ''};
|
||||
}
|
||||
|
||||
if(a.logs.length > b.logs.length) {
|
||||
for(var i in a.logs) {
|
||||
if(b.logs[i] === undefined) {
|
||||
b.logs[i] = emptyLog();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fillEmptyLogs(geth, nimbus);
|
||||
fillEmptyLogs(nimbus, geth);
|
||||
|
||||
for(var i in nimbus.logs) {
|
||||
$(`<h4>Receipt Log #${i}</h4>`).appendTo(container);
|
||||
let a = nimbus.logs[i];
|
||||
let b = geth.logs[i];
|
||||
//console.log(a.topics);
|
||||
a.topics = a.topics.join(',');
|
||||
b.topics = b.topics.join(',');
|
||||
let body = premix.newTable(container);
|
||||
premix.renderRow(body, a, b, 'address');
|
||||
premix.renderRow(body, a, b, 'data');
|
||||
premix.renderRow(body, a, b, 'topics');
|
||||
$('<hr class="uk-divider-icon">').appendTo(container);
|
||||
}
|
||||
}
|
||||
|
||||
let tx = geth.block.transactions[txId];
|
||||
let ntx = nimbus.txTraces[txId];
|
||||
let gtx = geth.txTraces[txId];
|
||||
|
||||
if(ntx.returnValue.length == 0) {
|
||||
ntx.returnValue = "0x";
|
||||
}
|
||||
|
||||
let ncr = $.extend({
|
||||
gas: ntx.gas,
|
||||
returnValue: ntx.returnValue
|
||||
},
|
||||
deepCopy(nimbus.receipts[txId])
|
||||
);
|
||||
|
||||
let gcr = $.extend({
|
||||
gas: gtx.gas,
|
||||
returnValue: "0x" + gtx.returnValue
|
||||
},
|
||||
deepCopy(geth.receipts[txId])
|
||||
);
|
||||
|
||||
$(`<h4>Transaction Kind: ${tx.txKind}</h4>`).appendTo(container);
|
||||
renderTx(ncr, gcr);
|
||||
}
|
||||
|
||||
function accountsRenderer(nimbus, geth) {
|
||||
function emptyAccount() {
|
||||
return {
|
||||
address: '',
|
||||
nonce: '',
|
||||
balance: '',
|
||||
codeHash: '',
|
||||
code: '',
|
||||
storageRoot: '',
|
||||
storage: {}
|
||||
};
|
||||
}
|
||||
|
||||
function precompiledContractsName(address) {
|
||||
switch(address) {
|
||||
case "0x0000000000000000000000000000000000000001": return "ecRecover";
|
||||
case "0x0000000000000000000000000000000000000002": return "Sha256";
|
||||
case "0x0000000000000000000000000000000000000003": return "RipeMd160";
|
||||
case "0x0000000000000000000000000000000000000004": return "Identity";
|
||||
case "0x0000000000000000000000000000000000000005": return "ModExp";
|
||||
case "0x0000000000000000000000000000000000000006": return "bn256ecAdd";
|
||||
case "0x0000000000000000000000000000000000000007": return "bn256ecMul";
|
||||
case "0x0000000000000000000000000000000000000008": return "bn256ecPairing";
|
||||
default: return "";
|
||||
}
|
||||
}
|
||||
|
||||
let container = $('#accountsContainer').empty();
|
||||
$('#accountsTitle').text('Block #' + parseInt(geth.block.number, 16));
|
||||
|
||||
let ncs = deepCopy(nimbus.stateDump.after);
|
||||
let gcs = deepCopy(geth.accounts);
|
||||
let accounts = [];
|
||||
|
||||
for(var address in ncs) {
|
||||
let n = ncs[address];
|
||||
n.address = address;
|
||||
if(gcs[address]) {
|
||||
let geth = gcs[address];
|
||||
geth.address = address;
|
||||
accounts.push({name: n.name, nimbus: n, geth: geth});
|
||||
delete gcs[address];
|
||||
} else {
|
||||
accounts.push({name: n.name, nimbus: n, geth: emptyAccount()});
|
||||
}
|
||||
}
|
||||
|
||||
for(var address in gcs) {
|
||||
let geth = gcs[address];
|
||||
geth.address = address;
|
||||
accounts.push({name: "unknown", nimbus: emptyAccount(), geth: geth});
|
||||
}
|
||||
|
||||
for(var acc of accounts) {
|
||||
let pa = precompiledContractsName(acc.nimbus.address);
|
||||
let precompiledContract = pa == '' ? '' : ` or Precompiled Contract(${pa})`;
|
||||
$(`<h4>Account Name: ${acc.name}${precompiledContract}</h4>`).appendTo(container);
|
||||
let body = premix.newTable(container);
|
||||
const fields = ['address', 'nonce', 'balance', 'codeHash', 'code', 'storageRoot'];
|
||||
for(var x of fields) {
|
||||
premix.renderRow(body, acc.nimbus, acc.geth, x);
|
||||
}
|
||||
|
||||
let storage = [];
|
||||
let nss = acc.nimbus.storage;
|
||||
let gss = acc.geth.storage;
|
||||
|
||||
for(var idx in nss) {
|
||||
if(gss[idx]) {
|
||||
storage.push({idx: idx, nimbus: nss[idx], geth: gss[idx]});
|
||||
delete gss[idx];
|
||||
} else {
|
||||
if(nss[idx] != "0x0000000000000000000000000000000000000000000000000000000000000000") {
|
||||
storage.push({idx: idx, nimbus: nss[idx], geth: ''});
|
||||
}
|
||||
}
|
||||
}
|
||||
for(var idx in gss) {
|
||||
if(gss[idx] != "0x0000000000000000000000000000000000000000000000000000000000000000") {
|
||||
storage.push({idx: idx, nimbus: '', geth: gss[idx]});
|
||||
}
|
||||
}
|
||||
|
||||
if(storage.length > 0) {
|
||||
$(`<h4>${acc.name} Storage</h4>`).appendTo(container);
|
||||
let body = premix.newTable(container);
|
||||
for(var s of storage) {
|
||||
premix.renderRow(body, s.nimbus, s.geth, s.idx);
|
||||
}
|
||||
}
|
||||
|
||||
$('<hr class="uk-divider-icon">').appendTo(container);
|
||||
}
|
||||
}
|
||||
|
||||
function headerRenderer(nimbus, geth) {
|
||||
let container = $('#headerContainer').empty();
|
||||
$('#headerTitle').text('Block #' + parseInt(geth.block.number, 16));
|
||||
|
||||
let body = premix.newTable(container);
|
||||
const blockSummary = ['stateRoot', 'receiptsRoot', 'logsBloom'];
|
||||
for(var idx of blockSummary) {
|
||||
premix.renderRow(body, nimbus.block, geth.block, idx);
|
||||
}
|
||||
}
|
||||
|
||||
function generateNavigation(txs, nimbus, geth) {
|
||||
function navAux(menuId, renderer) {
|
||||
let menu = $(menuId).click(function(ev) {
|
||||
renderer(0, nimbus, geth);
|
||||
});
|
||||
|
||||
if(txs.length == 0) {
|
||||
menu.parent().addClass('uk-disabled');
|
||||
} else if(txs.length > 1) {
|
||||
$('<span uk-icon="icon: triangle-down"></span>').appendTo(menu);
|
||||
let div = $('<div uk-dropdown="mode: hover;"/>').appendTo(menu.parent());
|
||||
let list = $('<ul class="uk-nav uk-dropdown-nav"/>').appendTo(div);
|
||||
|
||||
for(var i in txs) {
|
||||
let id = parseInt(i) + 1;
|
||||
$(`<li class="uk-active"><a rel="${i}" href="#">TX #${id}</a></li>`).appendTo(list);
|
||||
}
|
||||
|
||||
list.find('li a').click(function(ev) {
|
||||
renderer(this.rel, nimbus, geth);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
navAux('#opCodeMenu', opCodeRenderer);
|
||||
navAux('#transactionsMenu', transactionsRenderer);
|
||||
|
||||
$('#accountsMenu').click(function(ev) {
|
||||
accountsRenderer(nimbus, geth);
|
||||
});
|
||||
|
||||
$('#headerMenu').click(function(ev) {
|
||||
headerRenderer(nimbus, geth);
|
||||
});
|
||||
}
|
||||
|
||||
$(document).ready(function() {
|
||||
|
||||
var nimbus = premixData.nimbus;
|
||||
var geth = premixData.geth;
|
||||
var transactions = geth.block.transactions;
|
||||
|
||||
generateNavigation(transactions, nimbus, geth);
|
||||
});
|
2
premix/assets/js/jquery.min.js
vendored
2
premix/assets/js/jquery.min.js
vendored
File diff suppressed because one or more lines are too long
14
premix/assets/js/jquery.paging.min.js
vendored
14
premix/assets/js/jquery.paging.min.js
vendored
@ -1,14 +0,0 @@
|
||||
/*
|
||||
jQuery paging plugin v1.3.0 23/06/2014
|
||||
http://www.xarg.org/2011/09/jquery-pagination-revised/
|
||||
|
||||
Copyright (c) 2011, Robert Eisele (robert@xarg.org)
|
||||
Dual licensed under the MIT or GPL Version 2 licenses.
|
||||
*/
|
||||
(function(n,v,r){n.fn.paging=function(z,A){var t=this,b={setOptions:function(a){b.a=n.extend(b.a||{lapping:0,perpage:10,page:1,refresh:{interval:10,url:null},format:"",lock:!1,circular:!1,onClick:null,onFormat:function(){},onSelect:function(){return!0},onRefresh:function(){}},a||{});b.a.lapping|=0;b.a.perpage|=0;null!==b.a.page&&(b.a.page|=0);1>b.a.perpage&&(b.a.perpage=10);b.interval&&v.clearInterval(b.interval);b.a.refresh.url&&(b.interval=v.setInterval(function(){n.ajax({url:b.a.refresh.url,success:function(a){if("string"===
|
||||
typeof a)try{a=n.parseJSON(a)}catch(m){return}b.a.onRefresh(a)}})},1E3*b.a.refresh.interval));b.format=function(a){for(var b=0,f=0,h=1,g={g:[],i:0,h:0,b:5,current:3,l:0,m:0},c,p=/[*<>pq\[\]().-]|[nc]+!?/g,n={"[":"first","]":"last","<":"prev",">":"next",q:"left",p:"right","-":"fill",".":"leap"},e={};c=p.exec(a);)c=""+c,r===n[c]?"("===c?f=++b:")"===c?f=0:h&&("*"===c?(g.i=1,g.h=0):(g.i=0,g.h="!"===c.charAt(c.length-1),g.b=c.length-g.h,(g.current=1+c.indexOf("c"))||(g.current=1+g.b>>1)),g.g.push({f:"block",
|
||||
j:0,c:0}),h=0):(g.g.push({f:n[c],j:f,c:r===e[c]?e[c]=1:++e[c]}),"q"===c?++g.m:"p"===c&&++g.l);return g}(b.a.format);return b},setNumber:function(a){b.s=r===a||0>a?-1:a;return b},setPage:function(a){function w(a,b,c){c=""+a.onFormat.call(b,c);p=b.value?p+c.replace(/<a/i,'<a data-page="'+b.value+'"'):p+c}if(b.a.lock)return b.a.onSelect(0,t),b;if(r===a){if(a=b.a.page,null===a)return b}else if(b.a.page==a)return b;b.a.page=a|=0;var m=b.s,f=b.a,h,g,c,p,x=1,e=b.format,d,k,l,q,y=e.g.length,u=y;f.perpage<=
|
||||
f.lapping&&(f.lapping=f.perpage-1);q=m<=f.lapping?0:f.lapping|0;0>m?(c=m=-1,h=Math.max(1,a-e.current+1-q),g=h+e.b):(c=1+Math.ceil((m-f.perpage)/(f.perpage-q)),a=Math.max(1,Math.min(0>a?1+c+a:a,c)),e.i?(h=1,g=1+c,e.current=a,e.b=c):(h=Math.max(1,Math.min(a-e.current,c-e.b)+1),g=e.h?h+e.b:Math.min(h+e.b,1+c)));for(;u--;){k=0;l=e.g[u];switch(l.f){case "left":k=l.c<h;break;case "right":k=g<=c-e.l+l.c;break;case "first":k=e.current<a;break;case "last":k=e.b<e.current+c-a;break;case "prev":k=1<a;break;
|
||||
case "next":k=a<c}x|=k<<l.j}d={number:m,lapping:q,pages:c,perpage:f.perpage,page:a,slice:[(k=a*(f.perpage-q)+q)-f.perpage,Math.min(k,m)]};for(p="";++u<y;){l=e.g[u];k=x>>l.j&1;switch(l.f){case "block":for(;h<g;++h)d.value=h,d.pos=1+e.b-g+h,d.active=h<=c||0>m,d.first=1===h,d.last=h===c&&0<m,w(f,d,l.f);continue;case "left":d.value=l.c;d.active=l.c<h;break;case "right":d.value=c-e.l+l.c;d.active=g<=d.value;break;case "first":d.value=1;d.active=k&&1<a;break;case "prev":(d.active=f.circular)?d.value=1===
|
||||
a?c:a-1:(d.value=Math.max(1,a-1),d.active=k&&1<a);break;case "last":(d.active=0>m)?d.value=1+a:(d.value=c,d.active=k&&a<c);break;case "next":(d.active=f.circular)?d.value=1+a%c:(d.active=0>m)?d.value=1+a:(d.value=Math.min(1+a,c),d.active=k&&a<c);break;case "leap":case "fill":d.pos=l.c;d.active=k;w(f,d,l.f);continue}d.pos=l.c;d.last=d.first=r;w(f,d,l.f)}t.length&&(n("a",t.html(p)).click(f.onClick||function(a){a.preventDefault();a=this;do if("a"===a.nodeName.toLowerCase())break;while(a=a.parentNode);
|
||||
b.setPage(n(a).data("page"));b.o&&(v.location=a.href)}),b.o=f.onSelect.call({number:m,lapping:q,pages:c,slice:d.slice},a,t));return b}};return b.setNumber(z).setOptions(A).setPage()}})(jQuery,this);
|
3
premix/assets/js/uikit-icons.min.js
vendored
3
premix/assets/js/uikit-icons.min.js
vendored
File diff suppressed because one or more lines are too long
3
premix/assets/js/uikit.min.js
vendored
3
premix/assets/js/uikit.min.js
vendored
File diff suppressed because one or more lines are too long
@ -1,134 +0,0 @@
|
||||
# Nimbus
|
||||
# Copyright (c) 2020-2024 Status Research & Development GmbH
|
||||
# Licensed under either of
|
||||
# * Apache License, version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or
|
||||
# http://www.apache.org/licenses/LICENSE-2.0)
|
||||
# * MIT license ([LICENSE-MIT](LICENSE-MIT) or
|
||||
# http://opensource.org/licenses/MIT)
|
||||
# at your option. This file may not be copied, modified, or distributed except
|
||||
# according to those terms.
|
||||
|
||||
import
|
||||
std/[os, parseopt, strutils],
|
||||
eth/common,
|
||||
stint,
|
||||
chronicles,
|
||||
../nimbus/config
|
||||
|
||||
from ../nimbus/common/chain_config import
|
||||
MainNet,
|
||||
SepoliaNet,
|
||||
HoleskyNet
|
||||
|
||||
type
|
||||
ConfigStatus* = enum
|
||||
## Configuration status flags
|
||||
Success, ## Success
|
||||
EmptyOption, ## No options in category
|
||||
ErrorUnknownOption, ## Unknown option in command line found
|
||||
ErrorParseOption, ## Error in parsing command line option
|
||||
ErrorIncorrectOption, ## Option has incorrect value
|
||||
Error ## Unspecified error
|
||||
|
||||
PremixConfiguration* = ref object
|
||||
dataDir*: string
|
||||
head*: BlockNumber
|
||||
maxBlocks*: int
|
||||
numCommits*: int
|
||||
netId*: NetworkId
|
||||
|
||||
var premixConfig {.threadvar.}: PremixConfiguration
|
||||
|
||||
proc getConfiguration*(): PremixConfiguration {.gcsafe.}
|
||||
|
||||
proc processInteger(v: string, o: var int): ConfigStatus =
|
||||
## Convert string to integer.
|
||||
try:
|
||||
o = parseInt(v)
|
||||
result = Success
|
||||
except ValueError:
|
||||
result = ErrorParseOption
|
||||
|
||||
proc initConfiguration(): PremixConfiguration =
|
||||
result = new PremixConfiguration
|
||||
|
||||
const dataDir = defaultDataDir()
|
||||
|
||||
result.dataDir = dataDir
|
||||
result.head = 0'u64
|
||||
result.maxBlocks = 0
|
||||
result.numCommits = 128
|
||||
result.netId = MainNet
|
||||
|
||||
proc getConfiguration*(): PremixConfiguration =
|
||||
if isNil(premixConfig):
|
||||
premixConfig = initConfiguration()
|
||||
result = premixConfig
|
||||
|
||||
proc processBlockNumber(val: string, o: var BlockNumber): ConfigStatus =
|
||||
if val.len > 2 and val[0] == '0' and val[1] == 'x':
|
||||
o = UInt256.fromHex(val).truncate(BlockNumber)
|
||||
else:
|
||||
o = parse(val, UInt256).truncate(BlockNumber)
|
||||
result = Success
|
||||
|
||||
func processNetId(val: string, o: var NetworkId): ConfigStatus =
|
||||
case val.toLowerAscii()
|
||||
of "main": o = MainNet
|
||||
of "sepolia": o = SepoliaNet
|
||||
of "holesky": o = HoleskyNet
|
||||
|
||||
template checkArgument(fun, o, value: untyped) =
|
||||
## Checks if arguments got processed successfully
|
||||
let res = fun(value, o)
|
||||
if res == Success:
|
||||
continue
|
||||
elif res == ErrorParseOption:
|
||||
msg = "Error processing option [" & key & "] with value [" & value & "]"
|
||||
result = res
|
||||
break
|
||||
elif res == ErrorIncorrectOption:
|
||||
msg = "Incorrect value for option [" & key & "] value [" & value & "]"
|
||||
result = res
|
||||
break
|
||||
|
||||
proc processArguments*(msg: var string): ConfigStatus =
|
||||
var
|
||||
opt = initOptParser()
|
||||
length = 0
|
||||
config = getConfiguration()
|
||||
|
||||
result = Success
|
||||
for kind, key, value in opt.getopt():
|
||||
case kind
|
||||
of cmdArgument:
|
||||
return EmptyOption
|
||||
of cmdLongOption, cmdShortOption:
|
||||
inc(length)
|
||||
case key.toLowerAscii()
|
||||
of "help":
|
||||
return EmptyOption
|
||||
of "datadir": config.dataDir = value
|
||||
of "maxblocks":
|
||||
checkArgument(processInteger, config.maxBlocks, value)
|
||||
of "head":
|
||||
checkArgument(processBlockNumber, config.head, value)
|
||||
of "numcommits":
|
||||
checkArgument(processInteger, config.numCommits, value)
|
||||
config.numCommits = max(config.numCommits, 512)
|
||||
of "netid":
|
||||
checkArgument(processNetId, config.netId, value)
|
||||
else:
|
||||
msg = "Unknown option " & key
|
||||
if value.len > 0: msg = msg & " : " & value
|
||||
return ErrorUnknownOption
|
||||
of cmdEnd:
|
||||
msg = "Error processing option [" & key & "]"
|
||||
return ErrorParseOption
|
||||
|
||||
info "Using configuration parameters: ",
|
||||
datadir = config.dataDir,
|
||||
maxblocks = config.maxBlocks,
|
||||
head = config.head,
|
||||
numcommits = config.numCommits,
|
||||
netid = config.netId
|
@ -1,79 +0,0 @@
|
||||
# Nimbus
|
||||
# Copyright (c) 2020-2024 Status Research & Development GmbH
|
||||
# Licensed under either of
|
||||
# * Apache License, version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or
|
||||
# http://www.apache.org/licenses/LICENSE-2.0)
|
||||
# * MIT license ([LICENSE-MIT](LICENSE-MIT) or
|
||||
# http://opensource.org/licenses/MIT)
|
||||
# at your option. This file may not be copied, modified, or distributed except
|
||||
# according to those terms.
|
||||
|
||||
import
|
||||
std/[json, os, strutils],
|
||||
stew/byteutils,
|
||||
chronicles,
|
||||
results,
|
||||
../nimbus/[evm/state, evm/types],
|
||||
../nimbus/core/executor,
|
||||
./premixcore, ./prestate,
|
||||
../nimbus/tracer,
|
||||
../nimbus/common/common
|
||||
|
||||
proc prepareBlockEnv(node: JsonNode, memoryDB: CoreDbRef) =
|
||||
let state = node["state"]
|
||||
let kvt = memoryDB.ctx.getKvt()
|
||||
for k, v in state:
|
||||
let key = hexToSeqByte(k)
|
||||
let value = hexToSeqByte(v.getStr())
|
||||
kvt.put(key, value).isOkOr:
|
||||
raiseAssert "prepareBlockEnv(): put() (loop) failed " & $$error
|
||||
|
||||
proc executeBlock(blockEnv: JsonNode, memoryDB: CoreDbRef, blockNumber: BlockNumber) =
|
||||
var
|
||||
parentNumber = blockNumber - 1
|
||||
com = CommonRef.new(memoryDB)
|
||||
parent = com.db.getBlockHeader(parentNumber)
|
||||
blk = com.db.getEthBlock(blockNumber)
|
||||
let transaction = memoryDB.ctx.newTransaction()
|
||||
defer: transaction.dispose()
|
||||
|
||||
let
|
||||
vmState = BaseVMState.new(parent, blk.header, com)
|
||||
validationResult = vmState.processBlock(blk)
|
||||
|
||||
if validationResult.isErr:
|
||||
error "block validation error", err = validationResult.error()
|
||||
else:
|
||||
info "block validation success", blockNumber
|
||||
|
||||
transaction.rollback()
|
||||
vmState.dumpDebuggingMetaData(blk, false)
|
||||
let
|
||||
fileName = "debug" & $blockNumber & ".json"
|
||||
nimbus = json.parseFile(fileName)
|
||||
geth = blockEnv["geth"]
|
||||
|
||||
processNimbusData(nimbus)
|
||||
|
||||
# premix data goes to report page
|
||||
generatePremixData(nimbus, geth)
|
||||
|
||||
# prestate data goes to debug tool and contains data
|
||||
# needed to execute single block
|
||||
generatePrestate(nimbus, geth, blockNumber, parent, blk)
|
||||
|
||||
proc main() =
|
||||
if paramCount() == 0:
|
||||
echo "usage: debug blockxxx.json"
|
||||
quit(QuitFailure)
|
||||
|
||||
let
|
||||
blockEnv = json.parseFile(paramStr(1))
|
||||
memoryDB = newCoreDbRef(DefaultDbMemory)
|
||||
blockNumberHex = blockEnv["blockNumber"].getStr()
|
||||
blockNumber = parseHexInt(blockNumberHex).uint64
|
||||
|
||||
prepareBlockEnv(blockEnv, memoryDB)
|
||||
executeBlock(blockEnv, memoryDB, blockNumber)
|
||||
|
||||
main()
|
@ -1,70 +0,0 @@
|
||||
# Nimbus
|
||||
# Copyright (c) 2020-2024 Status Research & Development GmbH
|
||||
# Licensed under either of
|
||||
# * Apache License, version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or
|
||||
# http://www.apache.org/licenses/LICENSE-2.0)
|
||||
# * MIT license ([LICENSE-MIT](LICENSE-MIT) or
|
||||
# http://opensource.org/licenses/MIT)
|
||||
# at your option. This file may not be copied, modified, or distributed except
|
||||
# according to those terms.
|
||||
|
||||
#
|
||||
# helper tool to dump debugging data for persisted block
|
||||
# usage: dumper [--datadir:your_path] --head:blockNumber
|
||||
#
|
||||
|
||||
import
|
||||
results,
|
||||
../nimbus/common/common,
|
||||
../nimbus/db/opts,
|
||||
../nimbus/db/core_db/persistent,
|
||||
../nimbus/core/executor,
|
||||
../nimbus/[evm/state, evm/types],
|
||||
../nimbus/tracer,
|
||||
./configuration # must be late (compilation annoyance)
|
||||
|
||||
proc dumpDebug(com: CommonRef, blockNumber: BlockNumber) =
|
||||
var
|
||||
capture = com.db.newCapture.value
|
||||
captureCom = com.clone(capture.recorder)
|
||||
|
||||
let transaction = capture.recorder.ctx.newTransaction()
|
||||
defer: transaction.dispose()
|
||||
|
||||
|
||||
var
|
||||
parentNumber = blockNumber - 1
|
||||
parent = captureCom.db.getBlockHeader(parentNumber)
|
||||
blk = captureCom.db.getEthBlock(blockNumber)
|
||||
vmState = BaseVMState.new(parent, blk.header, captureCom)
|
||||
|
||||
discard captureCom.db.setHead(parent, true)
|
||||
discard vmState.processBlock(blk)
|
||||
|
||||
transaction.rollback()
|
||||
vmState.dumpDebuggingMetaData(blk, false)
|
||||
|
||||
proc main() {.used.} =
|
||||
let conf = getConfiguration()
|
||||
let com = CommonRef.new(
|
||||
newCoreDbRef(DefaultDbPersistent, conf.dataDir, DbOptions.init()))
|
||||
|
||||
if conf.head != 0'u64:
|
||||
dumpDebug(com, conf.head)
|
||||
|
||||
when isMainModule:
|
||||
var message: string
|
||||
|
||||
## Processing command line arguments
|
||||
if processArguments(message) != Success:
|
||||
echo message
|
||||
quit(QuitFailure)
|
||||
else:
|
||||
if len(message) > 0:
|
||||
echo message
|
||||
quit(QuitSuccess)
|
||||
|
||||
try:
|
||||
main()
|
||||
except:
|
||||
echo getCurrentExceptionMsg()
|
@ -1,118 +0,0 @@
|
||||
# Nimbus
|
||||
# Copyright (c) 2021-2024 Status Research & Development GmbH
|
||||
# Licensed under either of
|
||||
# * Apache License, version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or
|
||||
# http://www.apache.org/licenses/LICENSE-2.0)
|
||||
# * MIT license ([LICENSE-MIT](LICENSE-MIT) or
|
||||
# http://opensource.org/licenses/MIT)
|
||||
# at your option. This file may not be copied, modified, or distributed except
|
||||
# according to those terms.
|
||||
|
||||
import
|
||||
std/json,
|
||||
chronos, results, eth/common,
|
||||
graphql/httpclient,
|
||||
./parser
|
||||
|
||||
const ethQuery = """
|
||||
fragment headerFields on Block {
|
||||
parentHash: parent { value: hash }
|
||||
sha3Uncles: ommerHash
|
||||
miner { value: address }
|
||||
stateRoot
|
||||
transactionsRoot
|
||||
receiptsRoot
|
||||
logsBloom
|
||||
difficulty
|
||||
number
|
||||
gasLimit
|
||||
gasUsed
|
||||
timestamp
|
||||
extraData
|
||||
mixHash
|
||||
nonce
|
||||
baseFeePerGas # EIP-1559
|
||||
}
|
||||
|
||||
query getBlock($blockNumber: Long!) {
|
||||
chainID # EIP-1559
|
||||
block(number: $blockNumber) {
|
||||
... headerFields
|
||||
ommerCount
|
||||
ommers {
|
||||
... headerFields
|
||||
}
|
||||
transactionCount
|
||||
transactions {
|
||||
nonce
|
||||
gasPrice
|
||||
gas
|
||||
to {value: address}
|
||||
value
|
||||
input: inputData
|
||||
v
|
||||
r
|
||||
s
|
||||
maxFeePerGas # EIP-1559
|
||||
maxPriorityFeePerGas # EIP-1559
|
||||
effectiveGasPrice # EIP-1559
|
||||
type
|
||||
hash
|
||||
from {value: address}
|
||||
accessList {
|
||||
address
|
||||
storageKeys
|
||||
}
|
||||
index
|
||||
}
|
||||
}
|
||||
}
|
||||
"""
|
||||
|
||||
type
|
||||
Block* = object
|
||||
header*: BlockHeader
|
||||
body*: BlockBody
|
||||
|
||||
proc fromJson(_: type ChainId, n: JsonNode, name: string): ChainId =
|
||||
var chainId: int
|
||||
fromJson(n, name, chainId)
|
||||
ChainId(chainId)
|
||||
|
||||
proc requestBlock*(blockNumber: BlockNumber, parseTx = true): Block =
|
||||
let address = initTAddress("127.0.0.1:8545")
|
||||
let clientRes = GraphqlHttpClientRef.new(address)
|
||||
if clientRes.isErr:
|
||||
raise newException(ValueError, clientRes.error)
|
||||
|
||||
let client = clientRes.get()
|
||||
client.addVar("blockNumber", $blockNumber)
|
||||
|
||||
let res = waitFor client.sendRequest(ethQuery)
|
||||
if res.isErr:
|
||||
raise newException(ValueError, res.error)
|
||||
|
||||
let resp = res.get()
|
||||
|
||||
let n = json.parseJson(resp.response)
|
||||
if n.hasKey("errors"):
|
||||
debugEcho n.pretty
|
||||
quit(1)
|
||||
|
||||
let nh = n["data"]["block"]
|
||||
let chainId = ChainId.fromJson(n["data"], "chainID")
|
||||
result.header = parseBlockHeader(nh)
|
||||
|
||||
if parseTx:
|
||||
let txs = nh["transactions"]
|
||||
for txn in txs:
|
||||
var tx = parseTransaction(txn)
|
||||
tx.chainId = chainId
|
||||
validateTxSenderAndHash(txn, tx)
|
||||
result.body.transactions.add tx
|
||||
|
||||
let uncles = nh["ommers"]
|
||||
for un in uncles:
|
||||
result.body.uncles.add parseBlockHeader(un)
|
||||
|
||||
waitFor client.closeWait()
|
@ -1,15 +0,0 @@
|
||||
# Nimbus
|
||||
# Copyright (c) 2024 Status Research & Development GmbH
|
||||
# Licensed under either of
|
||||
# * Apache License, version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or
|
||||
# http://www.apache.org/licenses/LICENSE-2.0)
|
||||
# * MIT license ([LICENSE-MIT](LICENSE-MIT) or
|
||||
# http://opensource.org/licenses/MIT)
|
||||
# at your option. This file may not be copied, modified, or distributed except
|
||||
# according to those terms.
|
||||
|
||||
# Currently disabled (moved to no-hunter.nim)
|
||||
|
||||
# Would never haved worked on Aristo. Needs to maintain the account with
|
||||
# the state db, i.e. using `CoreDbAccount` for `Account` (see
|
||||
# `state_db/base.nim`.)
|
@ -1,181 +0,0 @@
|
||||
<html class="uk-height-1-1">
|
||||
<head>
|
||||
<title id="windowTitle">Premix Report Page</title>
|
||||
<meta http-equiv="Content-type" content="text/html; charset=utf-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1">
|
||||
|
||||
<script src="assets/js/jquery.min.js"></script>
|
||||
<script src="assets/js/jquery.paging.min.js"></script>
|
||||
<script src="assets/js/uikit.min.js"></script>
|
||||
<script src="assets/js/uikit-icons.min.js"></script>
|
||||
<script src="assets/js/index.js"></script>
|
||||
<script src="premixData.js"></script>
|
||||
<link rel="stylesheet" href="assets/css/uikit.min.css" />
|
||||
<style>
|
||||
body {
|
||||
font: 12px normal Arial, Helvetica, sans-serif;
|
||||
}
|
||||
.tm-horizontal-overflow {
|
||||
overflow-x: auto;
|
||||
}
|
||||
.tm-sidebar {
|
||||
position: fixed;
|
||||
overflow-y: auto;
|
||||
padding-left:40px;
|
||||
}
|
||||
.tm-monospace-cell {
|
||||
font-family: "Courier New", Courier, monospace;
|
||||
font-weight: 500;
|
||||
}
|
||||
.uk-nav-default > li > a.tm-text-danger {
|
||||
color: #f0506e;
|
||||
}
|
||||
.uk-nav-default > li > a.tm-text-danger:hover {
|
||||
color: red;
|
||||
}
|
||||
.uk-nav-default > li.uk-active > a.tm-text-danger {
|
||||
color: red;
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
|
||||
<body onresize="windowResize()" class="uk-height-1-1">
|
||||
|
||||
<div class="uk-section-small uk-background-primary uk-light" uk-sticky="bottom: #offset">
|
||||
<!-- Navigation -->
|
||||
<div class="uk-overlay uk-position-left uk-flex uk-flex-middle">
|
||||
<h1>Premix Report Page</h1>
|
||||
</div>
|
||||
|
||||
<div class="uk-position-right uk-overlay">
|
||||
<ul class="uk-subnav uk-subnav-divider" uk-switcher="connect:#switcherSection">
|
||||
<li><a id="opCodeMenu" href="#">OpCode</a></li>
|
||||
<li><a id="transactionsMenu" href="#">Transactions</a></li>
|
||||
<li><a id="accountsMenu" href="#">Accounts</a></li>
|
||||
<li><a id="headerMenu" href="#">Header</a></li>
|
||||
<li class="uk-active"><a href="#">Help</a></li>
|
||||
</ul>
|
||||
</div>
|
||||
<!-- Navigation -->
|
||||
</div>
|
||||
|
||||
<ul id="switcherSection" class="uk-switcher">
|
||||
|
||||
<li>
|
||||
<!-- Opcode Page -->
|
||||
<div class="uk-grid-collapse" uk-grid>
|
||||
|
||||
<div class="uk-width-1-5@m">
|
||||
<ul id="opCodeSideBar" class="tm-sidebar uk-nav uk-nav-default uk-height-1-1 uk-width-1-5@m">
|
||||
<!-- op code traces navigation sidebar -->
|
||||
</ul>
|
||||
</div>
|
||||
|
||||
<div class="uk-width-4-5@m">
|
||||
<div class="uk-section uk-section-small uk-section-secondary uk-light">
|
||||
<ul id="paging" class="uk-pagination uk-flex-center" uk-margin>
|
||||
</ul>
|
||||
</div>
|
||||
|
||||
<div class="uk-section-small uk-section-default">
|
||||
<div class="uk-container uk-container-expand">
|
||||
<h3>Opcode Trace <span id="opCodeTitle" class="uk-text-primary uk-text-small">Tx #</span></h3>
|
||||
<div id="opCodeContainer">
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="uk-section uk-section-small uk-section-secondary uk-light">
|
||||
<div class="uk-container uk-text-center">
|
||||
<h2>Have You Found The Bug?</h2>
|
||||
</div>
|
||||
|
||||
<ul class="uk-subnav uk-subnav-divider uk-flex uk-flex-center" uk-margin>
|
||||
<li><a href="https://github.com/status-im/nimbus"><span uk-icon="github" class="uk-margin-small-right"></span>Github</a></li>
|
||||
<li><a href="https://gitter.im/status-im/nimbus"><span uk-icon="gitter" class="uk-margin-small-right"></span>Gitter</a></li>
|
||||
</ul>
|
||||
|
||||
</div>
|
||||
|
||||
</div>
|
||||
</div>
|
||||
<!-- Opcode Page -->
|
||||
</li>
|
||||
|
||||
<li>
|
||||
<!-- Transactions Page -->
|
||||
<div class="uk-section-small uk-section-default">
|
||||
<div class="uk-container uk-container-medium">
|
||||
<h3>Transaction's Receipts <span id="transactionsTitle" class="uk-text-primary uk-text-small">Tx #</span></h3>
|
||||
<div id="transactionsContainer">
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<!-- Transactions Page -->
|
||||
</li>
|
||||
|
||||
<li>
|
||||
<!-- Accounts Page -->
|
||||
<div class="uk-section-small uk-section-default">
|
||||
<div class="uk-container uk-container-medium">
|
||||
<h3>Post State Accounts <span id="accountsTitle" class="uk-text-primary uk-text-small">Block #</span></h3>
|
||||
<div id="accountsContainer">
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<!-- Accounts Page -->
|
||||
</li>
|
||||
|
||||
<li>
|
||||
<!-- Header Page -->
|
||||
<div class="uk-section-small uk-section-default">
|
||||
<div class="uk-container uk-container-medium">
|
||||
<h3>Header Summary <span id="headerTitle" class="uk-text-primary uk-text-small">Block #</span></h3>
|
||||
<div id="headerContainer">
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<!-- Header Page -->
|
||||
</li>
|
||||
|
||||
<li>
|
||||
<!-- Help Page -->
|
||||
<div class="uk-section-small uk-section-default">
|
||||
<div class="uk-container uk-container-xsmall">
|
||||
<h2>Help</h2>
|
||||
|
||||
<p>
|
||||
Work your way through the top-right menu, left to right, to find out where the bug might be located.
|
||||
If you see <span class="uk-text-danger">red colored text</span>, it means you already found the difference between Nimbus and the other Ethereum
|
||||
client's tracing result.
|
||||
</p>
|
||||
|
||||
<p>
|
||||
If there is no red colored text in the <strong>OPCODE</strong> section, it means the bug might be located in the <strong>TRANSACTIONS</strong> section, or in the <strong>HEADER</strong> section.
|
||||
</p>
|
||||
|
||||
<p>
|
||||
Once you locate the bug, you can use the <span class="uk-text-primary">./build/debug</span> tool
|
||||
to sort things out until there are no more errors and the block passes validation.
|
||||
</p>
|
||||
|
||||
<p>
|
||||
Blocks with multiple transactions will have submenus in the navigation bar.
|
||||
Usually, only the first transaction with red colored text is problematic, but it might affect the
|
||||
other transactions. In the <strong>OPCODE</strong> section, the same thing happens. Perhaps only the first red-colored instruction
|
||||
is problematic, but it will affect the other instructions.
|
||||
</p>
|
||||
|
||||
<p>
|
||||
Transactions in the <strong>TRANSACTIONS</strong> section are marked as: <span class="uk-text-warning">Regular, ContractCreation, or ContractCall</span>.
|
||||
Each kind is processed separately by Nimbus, in different procedures.
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
<!-- Help Page -->
|
||||
</li>
|
||||
|
||||
</ul>
|
||||
|
||||
</body>
|
||||
</html>
|
@ -1,78 +0,0 @@
|
||||
# Nimbus
|
||||
# Copyright (c) 2020-2023 Status Research & Development GmbH
|
||||
# Licensed under either of
|
||||
# * Apache License, version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or
|
||||
# http://www.apache.org/licenses/LICENSE-2.0)
|
||||
# * MIT license ([LICENSE-MIT](LICENSE-MIT) or
|
||||
# http://opensource.org/licenses/MIT)
|
||||
# at your option. This file may not be copied, modified, or distributed except
|
||||
# according to those terms.
|
||||
|
||||
const postStateTracer* = """{
|
||||
postState: {},
|
||||
|
||||
// lookupAccount injects the specified account into the postState object.
|
||||
lookupAccount: function(addr, db){
|
||||
var acc = toHex(addr);
|
||||
if (this.postState[acc] === undefined) {
|
||||
this.postState[acc] = {
|
||||
code: toHex(db.getCode(addr)),
|
||||
storage: {}
|
||||
};
|
||||
}
|
||||
},
|
||||
|
||||
// lookupStorage injects the specified storage entry of the given account into
|
||||
// the postState object.
|
||||
lookupStorage: function(addr, key, db){
|
||||
var acc = toHex(addr);
|
||||
var idx = toHex(key);
|
||||
this.lookupAccount(addr, db);
|
||||
if (this.postState[acc].storage[idx] === undefined) {
|
||||
// bug in geth js tracer
|
||||
// we will use eth_getProof to fill the storage later
|
||||
this.postState[acc].storage[idx] = "";
|
||||
}
|
||||
},
|
||||
|
||||
// result is invoked when all the opcodes have been iterated over and returns
|
||||
// the final result of the tracing.
|
||||
result: function(ctx, db) {
|
||||
this.lookupAccount(ctx.from, db);
|
||||
this.lookupAccount(ctx.to, db);
|
||||
|
||||
// Return the assembled allocations (postState)
|
||||
return this.postState;
|
||||
},
|
||||
|
||||
// step is invoked for every opcode that the VM executes.
|
||||
step: function(log, db) {
|
||||
// Add the current account if we just started tracing
|
||||
if (this.postState === null){
|
||||
this.postState = {};
|
||||
// Balance will potentially be wrong here, since this will include the value
|
||||
// sent along with the message. We fix that in 'result()'.
|
||||
this.lookupAccount(log.contract.getAddress(), db);
|
||||
}
|
||||
// Whenever new state is accessed, add it to the postState
|
||||
switch (log.op.toString()) {
|
||||
case "EXTCODECOPY": case "EXTCODESIZE": case "BALANCE":
|
||||
this.lookupAccount(toAddress(log.stack.peek(0).toString(16)), db);
|
||||
break;
|
||||
case "CREATE":
|
||||
var from = log.contract.getAddress();
|
||||
this.lookupAccount(toContract(from, db.getNonce(from)), db);
|
||||
break;
|
||||
case "CALL": case "CALLCODE": case "DELEGATECALL": case "STATICCALL":
|
||||
this.lookupAccount(toAddress(log.stack.peek(1).toString(16)), db);
|
||||
break;
|
||||
case 'SSTORE':case 'SLOAD':
|
||||
this.lookupStorage(log.contract.getAddress(), toWord(log.stack.peek(0).toString(16)), db);
|
||||
break;
|
||||
}
|
||||
},
|
||||
|
||||
// fault is invoked when the actual execution of an opcode fails.
|
||||
fault: function(log, db) {}
|
||||
}
|
||||
"""
|
@ -1,164 +0,0 @@
|
||||
# Nimbus
|
||||
# Copyright (c) 2020-2024 Status Research & Development GmbH
|
||||
# Licensed under either of
|
||||
# * Apache License, version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or
|
||||
# http://www.apache.org/licenses/LICENSE-2.0)
|
||||
# * MIT license ([LICENSE-MIT](LICENSE-MIT) or
|
||||
# http://opensource.org/licenses/MIT)
|
||||
# at your option. This file may not be copied, modified, or distributed except
|
||||
# according to those terms.
|
||||
|
||||
import
|
||||
std/[json, tables, hashes],
|
||||
eth/trie/trie_defs,
|
||||
stint, stew/byteutils, chronicles,
|
||||
../nimbus/[evm/state, evm/types],
|
||||
../nimbus/utils/utils,
|
||||
../nimbus/tracer,
|
||||
../nimbus/db/[core_db, state_db/read_write],
|
||||
../nimbus/core/executor,
|
||||
../nimbus/common/common,
|
||||
"."/[configuration, downloader, parser, premixcore]
|
||||
|
||||
const
|
||||
emptyCodeHash = blankStringHash
|
||||
|
||||
proc store(memoryDB: CoreDbRef, branch: JsonNode) =
|
||||
for p in branch:
|
||||
let rlp = hexToSeqByte(p.getStr)
|
||||
let hash = keccakHash(rlp)
|
||||
memoryDB.kvt.put(hash.data, rlp)
|
||||
|
||||
proc parseAddress(address: string): EthAddress =
|
||||
hexToByteArray(address, result)
|
||||
|
||||
proc parseU256(val: string): UInt256 =
|
||||
UInt256.fromHex(val)
|
||||
|
||||
proc prepareBlockEnv(parent: BlockHeader, thisBlock: Block): CoreDbRef =
|
||||
var
|
||||
accounts = requestPostState(thisBlock)
|
||||
memoryDB = newCoreDbRef DefaultDbMemory
|
||||
accountDB = newAccountStateDB(memoryDB, parent.stateRoot)
|
||||
parentNumber = %(parent.number.prefixHex)
|
||||
|
||||
for address, account in accounts:
|
||||
updateAccount(address, account, parent.number)
|
||||
let
|
||||
accountProof = account["accountProof"]
|
||||
storageProof = account["storageProof"]
|
||||
address = parseAddress(address)
|
||||
acc = parseAccount(account)
|
||||
|
||||
memoryDB.store(accountProof)
|
||||
accountDB.setAccount(address, acc)
|
||||
|
||||
for storage in storageProof:
|
||||
let
|
||||
key = parseU256(storage["key"].getStr)
|
||||
val = parseU256(storage["value"].getStr)
|
||||
proof = storage["proof"]
|
||||
memoryDB.store(proof)
|
||||
accountDB.setStorage(address, key, val)
|
||||
|
||||
if acc.codeHash != emptyCodeHash:
|
||||
let codeStr = request("eth_getCode", %[%address.prefixHex, parentNumber])
|
||||
let code = hexToSeqByte(codeStr.getStr)
|
||||
accountDB.setCode(address, code)
|
||||
|
||||
accountDB.setAccount(address, acc)
|
||||
|
||||
result = memoryDB
|
||||
|
||||
type
|
||||
HunterVMState = ref object of BaseVMState
|
||||
headers: Table[BlockNumber, BlockHeader]
|
||||
|
||||
proc hash*(x: UInt256): Hash =
|
||||
result = hash(x.toBytesBE)
|
||||
|
||||
proc new(T: type HunterVMState; parent, header: BlockHeader, com: CommonRef): T =
|
||||
new result
|
||||
result.init(parent, header, com)
|
||||
result.headers = Table[BlockNumber, BlockHeader]()
|
||||
|
||||
method getAncestorHash*(vmState: HunterVMState, blockNumber: BlockNumber): Hash256 =
|
||||
if blockNumber in vmState.headers:
|
||||
result = vmState.headers[blockNumber].hash
|
||||
else:
|
||||
let data = requestHeader(blockNumber)
|
||||
let header = parseBlockHeader(data)
|
||||
result = header.hash
|
||||
vmState.headers[blockNumber] = header
|
||||
|
||||
proc putAncestorsIntoDB(vmState: HunterVMState, db: CoreDbRef) =
|
||||
for header in vmState.headers.values:
|
||||
db.addBlockNumberToHashLookup(header)
|
||||
|
||||
proc huntProblematicBlock(blockNumber: UInt256): Result[void, string] =
|
||||
let
|
||||
# prepare needed state from previous block
|
||||
parentNumber = blockNumber - 1
|
||||
thisBlock = requestBlock(blockNumber)
|
||||
parentBlock = requestBlock(parentNumber)
|
||||
memoryDB = prepareBlockEnv(parentBlock.header, thisBlock)
|
||||
|
||||
# try to execute current block
|
||||
com = CommonRef.new(memoryDB)
|
||||
|
||||
discard com.db.setHead(parentBlock.header, true)
|
||||
|
||||
let transaction = memoryDB.beginTransaction()
|
||||
defer: transaction.dispose()
|
||||
let
|
||||
vmState = HunterVMState.new(parentBlock.header, thisBlock.header, com)
|
||||
validationResult = vmState.processBlock(thisBlock.header, thisBlock.body)
|
||||
|
||||
if validationResult.isErr():
|
||||
transaction.rollback()
|
||||
putAncestorsIntoDB(vmState, com.db)
|
||||
vmState.dumpDebuggingMetaData(thisBlock.header, thisBlock.body, false)
|
||||
|
||||
validationResult
|
||||
|
||||
proc main() {.used.} =
|
||||
let conf = getConfiguration()
|
||||
|
||||
if conf.head == 0.u256:
|
||||
echo "please specify the starting block with `--head:blockNumber`"
|
||||
quit(QuitFailure)
|
||||
|
||||
if conf.maxBlocks == 0:
|
||||
echo "please specify the number of problematic blocks you want to hunt with `--maxBlocks:number`"
|
||||
quit(QuitFailure)
|
||||
|
||||
var
|
||||
problematicBlocks = newSeq[UInt256]()
|
||||
blockNumber = conf.head
|
||||
|
||||
while true:
|
||||
echo blockNumber
|
||||
if huntProblematicBlock(blockNumber).isErr:
|
||||
echo "shot down problematic block: ", blockNumber
|
||||
problematicBlocks.add blockNumber
|
||||
blockNumber = blockNumber + 1
|
||||
if problematicBlocks.len >= conf.maxBlocks:
|
||||
echo "Problematic blocks: ", problematicBlocks
|
||||
break
|
||||
|
||||
when isMainModule:
|
||||
var message: string
|
||||
|
||||
## Processing command line arguments
|
||||
if processArguments(message) != Success:
|
||||
echo message
|
||||
quit(QuitFailure)
|
||||
else:
|
||||
if len(message) > 0:
|
||||
echo message
|
||||
quit(QuitSuccess)
|
||||
|
||||
try:
|
||||
main()
|
||||
except:
|
||||
echo getCurrentExceptionMsg()
|
@ -1,125 +0,0 @@
|
||||
# Nimbus
|
||||
# Copyright (c) 2020-2024 Status Research & Development GmbH
|
||||
# Licensed under either of
|
||||
# * Apache License, version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or
|
||||
# http://www.apache.org/licenses/LICENSE-2.0)
|
||||
# * MIT license ([LICENSE-MIT](LICENSE-MIT) or
|
||||
# http://opensource.org/licenses/MIT)
|
||||
# at your option. This file may not be copied, modified, or distributed except
|
||||
# according to those terms.
|
||||
|
||||
# use this module to quickly populate db with data from geth/parity
|
||||
|
||||
import
|
||||
std/os,
|
||||
chronicles,
|
||||
../nimbus/errors,
|
||||
../nimbus/core/chain,
|
||||
../nimbus/common,
|
||||
../nimbus/db/opts,
|
||||
../nimbus/db/core_db/persistent,
|
||||
configuration # must be late (compilation annoyance)
|
||||
|
||||
when defined(graphql):
|
||||
import graphql_downloader
|
||||
else:
|
||||
import downloader
|
||||
|
||||
template persistToDb(db: CoreDbRef, body: untyped) =
|
||||
block: body
|
||||
|
||||
proc contains(kvt: CoreDbKvtRef; key: openArray[byte]): bool =
|
||||
kvt.hasKeyRc(key).expect "valid bool"
|
||||
|
||||
proc main() {.used.} =
|
||||
# 97 block with uncles
|
||||
# 46147 block with first transaction
|
||||
# 46400 block with transaction
|
||||
# 46402 block with first contract: failed
|
||||
# 47205 block with first success contract
|
||||
# 48712 block with 5 transactions
|
||||
# 48915 block with contract
|
||||
# 49018 first problematic block
|
||||
# 49439 first block with contract call
|
||||
# 52029 first block with receipts logs
|
||||
# 66407 failed transaction
|
||||
|
||||
let conf = configuration.getConfiguration()
|
||||
let com = CommonRef.new(
|
||||
newCoreDbRef(DefaultDbPersistent, conf.dataDir, DbOptions.init()),
|
||||
conf.netId, networkParams(conf.netId))
|
||||
|
||||
# move head to block number ...
|
||||
if conf.head != 0'u64:
|
||||
var parentBlock = requestBlock(conf.head, { DownloadAndValidate })
|
||||
discard com.db.setHead(parentBlock.header)
|
||||
|
||||
let kvt = com.db.ctx.getKvt()
|
||||
var head = com.db.getCanonicalHead()
|
||||
var blockNumber = head.number + 1
|
||||
var chain = newChain(com)
|
||||
|
||||
let numBlocksToCommit = conf.numCommits
|
||||
|
||||
var blocks = newSeqOfCap[EthBlock](numBlocksToCommit)
|
||||
var one = 1'u64
|
||||
|
||||
var numBlocks = 0
|
||||
var counter = 0
|
||||
var retryCount = 0
|
||||
|
||||
while true:
|
||||
var thisBlock: downloader.Block
|
||||
try:
|
||||
thisBlock = requestBlock(blockNumber, { DownloadAndValidate })
|
||||
except CatchableError as e:
|
||||
if retryCount < 3:
|
||||
warn "Unable to get block data via JSON-RPC API", error = e.msg
|
||||
inc retryCount
|
||||
sleep(1000)
|
||||
continue
|
||||
else:
|
||||
raise e
|
||||
|
||||
blocks.add EthBlock.init(thisBlock.header, thisBlock.body)
|
||||
info "REQUEST HEADER", blockNumber=blockNumber, txs=thisBlock.body.transactions.len
|
||||
|
||||
inc numBlocks
|
||||
blockNumber += one
|
||||
|
||||
if numBlocks == numBlocksToCommit:
|
||||
persistToDb(com.db):
|
||||
let res = chain.persistBlocks(blocks)
|
||||
res.isOkOr:
|
||||
raise newException(ValidationError, "Error when validating blocks: " & res.error)
|
||||
numBlocks = 0
|
||||
blocks.setLen(0)
|
||||
|
||||
inc counter
|
||||
if conf.maxBlocks != 0 and counter >= conf.maxBlocks:
|
||||
break
|
||||
|
||||
if numBlocks > 0:
|
||||
persistToDb(com.db):
|
||||
let res = chain.persistBlocks(blocks)
|
||||
res.isOkOr:
|
||||
raise newException(ValidationError, "Error when validating blocks: " & res.error)
|
||||
|
||||
when isMainModule:
|
||||
var message: string
|
||||
|
||||
## Processing command line arguments
|
||||
if configuration.processArguments(message) != Success:
|
||||
if len(message) > 0:
|
||||
echo message
|
||||
echo "Usage: persist --datadir=<DATA_DIR> --maxblocks=<MAX_BLOCKS> --head=<HEAD> --numcommits=<NUM_COMMITS> --netid=<NETWORK_ID>"
|
||||
quit(QuitFailure)
|
||||
else:
|
||||
if len(message) > 0:
|
||||
echo message
|
||||
quit(QuitSuccess)
|
||||
|
||||
try:
|
||||
main()
|
||||
except CatchableError:
|
||||
echo getCurrentExceptionMsg()
|
@ -1,74 +0,0 @@
|
||||
# Nimbus
|
||||
# Copyright (c) 2020-2024 Status Research & Development GmbH
|
||||
# Licensed under either of
|
||||
# * Apache License, version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or
|
||||
# http://www.apache.org/licenses/LICENSE-2.0)
|
||||
# * MIT license ([LICENSE-MIT](LICENSE-MIT) or
|
||||
# http://opensource.org/licenses/MIT)
|
||||
# at your option. This file may not be copied, modified, or distributed except
|
||||
# according to those terms.
|
||||
|
||||
import
|
||||
std/[json, strutils, os],
|
||||
downloader,
|
||||
../nimbus/tracer, prestate,
|
||||
eth/common, premixcore
|
||||
|
||||
proc generateGethData(thisBlock: Block, blockNumber: BlockNumber, accounts: JsonNode): JsonNode =
|
||||
let
|
||||
receipts = toJson(thisBlock.receipts)
|
||||
|
||||
let geth = %{
|
||||
"blockNumber": %blockNumber.toHex,
|
||||
"txTraces": thisBlock.traces,
|
||||
"receipts": receipts,
|
||||
"block": thisBlock.jsonData,
|
||||
"accounts": accounts
|
||||
}
|
||||
|
||||
result = geth
|
||||
|
||||
proc printDebugInstruction(blockNumber: BlockNumber) =
|
||||
var text = """
|
||||
|
||||
Successfully created debugging environment for block $1.
|
||||
You can continue to find nimbus EVM bug by viewing premix report page `./index.html`.
|
||||
After that you can try to debug that single block using `nim c -r debug block$1.json` command.
|
||||
|
||||
Happy bug hunting
|
||||
""" % [$blockNumber]
|
||||
|
||||
echo text
|
||||
|
||||
proc main() =
|
||||
if paramCount() == 0:
|
||||
echo "usage: premix debugxxx.json"
|
||||
quit(QuitFailure)
|
||||
|
||||
try:
|
||||
let
|
||||
nimbus = json.parseFile(paramStr(1))
|
||||
blockNumberHex = nimbus["blockNumber"].getStr()
|
||||
blockNumber = parseHexInt(blockNumberHex).uint64
|
||||
thisBlock = requestBlock(blockNumber, {DownloadReceipts, DownloadTxTrace})
|
||||
accounts = requestPostState(thisBlock)
|
||||
geth = generateGethData(thisBlock, blockNumber, accounts)
|
||||
parentNumber = blockNumber - 1
|
||||
parentBlock = requestBlock(parentNumber)
|
||||
|
||||
processNimbusData(nimbus)
|
||||
|
||||
# premix data goes to report page
|
||||
generatePremixData(nimbus, geth)
|
||||
|
||||
# prestate data goes to debug tool and contains data
|
||||
# needed to execute single block
|
||||
generatePrestate(
|
||||
nimbus, geth, blockNumber, parentBlock.header,
|
||||
EthBlock.init(thisBlock.header, thisBlock.body))
|
||||
|
||||
printDebugInstruction(blockNumber)
|
||||
except CatchableError:
|
||||
echo getCurrentExceptionMsg()
|
||||
|
||||
main()
|
@ -1,184 +0,0 @@
|
||||
# Nimbus
|
||||
# Copyright (c) 2020-2024 Status Research & Development GmbH
|
||||
# Licensed under either of
|
||||
# * Apache License, version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or
|
||||
# http://www.apache.org/licenses/LICENSE-2.0)
|
||||
# * MIT license ([LICENSE-MIT](LICENSE-MIT) or
|
||||
# http://opensource.org/licenses/MIT)
|
||||
# at your option. This file may not be copied, modified, or distributed except
|
||||
# according to those terms.
|
||||
|
||||
import
|
||||
json, strutils, os,
|
||||
chronicles, eth/common, eth/common/transaction_utils,
|
||||
../nimbus/transaction, ../nimbus/launcher,
|
||||
./js_tracer, ./parser, ./downloader
|
||||
|
||||
proc fakeAlloc(n: JsonNode) =
|
||||
const
|
||||
chunk = repeat('0', 64)
|
||||
|
||||
for i in 1 ..< n.len:
|
||||
if not n[i].hasKey("memory"): return
|
||||
let
|
||||
prevMem = n[i-1]["memory"]
|
||||
currMem = n[i]["memory"]
|
||||
prevPc = n[i-1]["pc"].getInt()
|
||||
currPc = n[i]["pc"].getInt()
|
||||
|
||||
if currMem.len > prevMem.len and prevPc == currPc - 1:
|
||||
let diff = currMem.len - prevMem.len
|
||||
for _ in 0 ..< diff:
|
||||
prevMem.add %chunk
|
||||
|
||||
proc updateAccount*(a, b: JsonNode) =
|
||||
if b.hasKey("name"):
|
||||
a["name"] = newJString(b["name"].getStr)
|
||||
a["balance"] = newJString(b["balance"].getStr)
|
||||
a["nonce"] = newJString(b["nonce"].getStr)
|
||||
a["code"] = newJString(b["code"].getStr)
|
||||
var storage = a["storage"]
|
||||
for k, v in b["storage"]:
|
||||
storage[k] = newJString(v.getStr)
|
||||
a["storageRoot"] = newJString(b["storageRoot"].getStr)
|
||||
a["codeHash"] = newJString(b["codeHash"].getStr)
|
||||
|
||||
proc copyAccount*(acc: JsonNode): JsonNode =
|
||||
result = newJObject()
|
||||
result["storage"] = newJObject()
|
||||
updateAccount(result, acc)
|
||||
|
||||
proc removePostStateDup*(postState: JsonNode): JsonNode =
|
||||
var accounts = newJObject()
|
||||
for acc in postState:
|
||||
let address = acc["address"].getStr
|
||||
if accounts.hasKey(address):
|
||||
updateAccount(accounts[address], acc)
|
||||
else:
|
||||
accounts[address] = copyAccount(acc)
|
||||
accounts
|
||||
|
||||
proc processNimbusData*(nimbus: JsonNode) =
|
||||
# remove duplicate accounts with same address
|
||||
# and only take newest one
|
||||
let postState = nimbus["stateDump"]["after"]
|
||||
nimbus["stateDump"]["after"] = removePostStateDup(postState)
|
||||
|
||||
let txTraces = nimbus["txTraces"]
|
||||
|
||||
for trace in txTraces:
|
||||
trace["structLogs"].fakeAlloc()
|
||||
|
||||
proc generatePremixData*(nimbus, geth: JsonNode) =
|
||||
var premixData = %{
|
||||
"nimbus": nimbus,
|
||||
"geth": geth
|
||||
}
|
||||
|
||||
var data = "var premixData = " & premixData.pretty & "\n"
|
||||
writeFile(getFileDir("index.html") / "premixData.js", data)
|
||||
|
||||
proc hasInternalTx(tx: Transaction, blockNumber: BlockNumber, sender: EthAddress): bool =
|
||||
let
|
||||
number = %(blockNumber.prefixHex)
|
||||
recipient = tx.getRecipient(sender)
|
||||
code = request("eth_getCode", %[%recipient.prefixHex, number])
|
||||
recipientHasCode = code.getStr.len > 2 # "0x"
|
||||
|
||||
if tx.contractCreation:
|
||||
return recipientHasCode or tx.payload.len > 0
|
||||
|
||||
recipientHasCode
|
||||
|
||||
proc jsonTracer(tracer: string): JsonNode =
|
||||
result = %{ "tracer": %tracer }
|
||||
|
||||
proc requestInternalTx(txHash, tracer: JsonNode): JsonNode =
|
||||
let txTrace = request("debug_traceTransaction", %[txHash, tracer])
|
||||
if txTrace.kind == JNull:
|
||||
error "requested postState not available", txHash=txHash
|
||||
raise newException(ValueError, "Error when retrieving transaction postState")
|
||||
result = txTrace
|
||||
|
||||
proc requestAccount*(premix: JsonNode, blockNumber: BlockNumber, address: EthAddress) =
|
||||
let
|
||||
number = %(blockNumber.prefixHex)
|
||||
address = address.prefixHex
|
||||
proof = request("eth_getProof", %[%address, %[], number])
|
||||
|
||||
let account = %{
|
||||
"address": %address,
|
||||
"codeHash": proof["codeHash"],
|
||||
"storageRoot": proof["storageHash"],
|
||||
"balance": proof["balance"],
|
||||
"nonce": proof["nonce"],
|
||||
"code": newJString("0x"),
|
||||
"storage": newJObject(),
|
||||
"accountProof": proof["accountProof"],
|
||||
"storageProof": proof["storageProof"]
|
||||
}
|
||||
premix.add account
|
||||
|
||||
proc padding(x: string): JsonNode =
|
||||
let val = x.substr(2)
|
||||
let pad = repeat('0', 64 - val.len)
|
||||
result = newJString("0x" & pad & val)
|
||||
|
||||
proc updateAccount*(address: string, account: JsonNode, blockNumber: BlockNumber) =
|
||||
let number = %(blockNumber.prefixHex)
|
||||
|
||||
var storage = newJArray()
|
||||
for k, _ in account["storage"]:
|
||||
storage.add %k
|
||||
|
||||
let proof = request("eth_getProof", %[%address, storage, number])
|
||||
account["address"] = %address
|
||||
account["codeHash"] = proof["codeHash"]
|
||||
account["storageRoot"] = proof["storageHash"]
|
||||
account["nonce"] = proof["nonce"]
|
||||
account["balance"] = proof["balance"]
|
||||
account["accountProof"]= proof["accountProof"]
|
||||
account["storageProof"]= proof["storageProof"]
|
||||
for x in proof["storageProof"]:
|
||||
x["value"] = padding(x["value"].getStr())
|
||||
account["storage"][x["key"].getStr] = x["value"]
|
||||
|
||||
proc requestPostState*(premix, n: JsonNode, blockNumber: BlockNumber) =
|
||||
type
|
||||
TxKind {.pure.} = enum
|
||||
Regular
|
||||
ContractCreation
|
||||
ContractCall
|
||||
|
||||
let txs = n["transactions"]
|
||||
if txs.len == 0: return
|
||||
|
||||
let tracer = jsonTracer(postStateTracer)
|
||||
for t in txs:
|
||||
var txKind = TxKind.Regular
|
||||
let tx = parseTransaction(t)
|
||||
let sender = tx.recoverSender().valueOr:
|
||||
raise (ref ValueError)(msg: "Invalid tx signature")
|
||||
if tx.contractCreation: txKind = TxKind.ContractCreation
|
||||
if hasInternalTx(tx, blockNumber, sender):
|
||||
let txTrace = requestInternalTx(t["hash"], tracer)
|
||||
for address, account in txTrace:
|
||||
updateAccount(address, account, blockNumber)
|
||||
premix.add account
|
||||
if not tx.contractCreation: txKind = TxKind.ContractCall
|
||||
else:
|
||||
premix.requestAccount(blockNumber, tx.getRecipient(sender))
|
||||
premix.requestAccount(blockNumber, sender)
|
||||
|
||||
t["txKind"] = %($txKind)
|
||||
|
||||
proc requestPostState*(thisBlock: Block): JsonNode =
|
||||
let blockNumber = thisBlock.header.number
|
||||
var premix = newJArray()
|
||||
|
||||
premix.requestPostState(thisBlock.jsonData, blockNumber)
|
||||
premix.requestAccount(blockNumber, thisBlock.header.coinbase)
|
||||
for uncle in thisBlock.body.uncles:
|
||||
premix.requestAccount(blockNumber, uncle.coinbase)
|
||||
|
||||
removePostStateDup(premix)
|
@ -1,46 +0,0 @@
|
||||
# Nimbus
|
||||
# Copyright (c) 2020-2024 Status Research & Development GmbH
|
||||
# Licensed under either of
|
||||
# * Apache License, version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or
|
||||
# http://www.apache.org/licenses/LICENSE-2.0)
|
||||
# * MIT license ([LICENSE-MIT](LICENSE-MIT) or
|
||||
# http://opensource.org/licenses/MIT)
|
||||
# at your option. This file may not be copied, modified, or distributed except
|
||||
# according to those terms.
|
||||
|
||||
import
|
||||
std/strutils,
|
||||
json, stew/byteutils,
|
||||
results,
|
||||
../nimbus/db/[core_db, storage_types], eth/[rlp, common],
|
||||
../nimbus/tracer
|
||||
|
||||
proc generatePrestate*(nimbus, geth: JsonNode, blockNumber: BlockNumber, parent: BlockHeader, blk: EthBlock) =
|
||||
template header: BlockHeader = blk.header
|
||||
let
|
||||
state = nimbus["state"]
|
||||
headerHash = rlpHash(header)
|
||||
chainDB = newCoreDbRef(DefaultDbMemory)
|
||||
kvt = chainDB.ctx.getKvt()
|
||||
|
||||
discard chainDB.setHead(parent, true)
|
||||
chainDB.persistTransactions(blockNumber, header.txRoot, blk.transactions)
|
||||
discard chainDB.persistUncles(blk.uncles)
|
||||
|
||||
kvt.put(genericHashKey(headerHash).toOpenArray, rlp.encode(header)).isOkOr:
|
||||
raiseAssert "generatePrestate(): put() failed " & $$error
|
||||
chainDB.addBlockNumberToHashLookup(header.number, headerHash)
|
||||
|
||||
for k, v in state:
|
||||
let key = hexToSeqByte(k)
|
||||
let value = hexToSeqByte(v.getStr())
|
||||
kvt.put(key, value).isOkOr:
|
||||
raiseAssert "generatePrestate(): put() (loop) failed " & $$error
|
||||
|
||||
var metaData = %{
|
||||
"blockNumber": %blockNumber.toHex,
|
||||
"geth": geth
|
||||
}
|
||||
|
||||
metaData.dumpMemoryDB(chainDB)
|
||||
writeFile("block" & $blockNumber & ".json", metaData.pretty())
|
153
premix/readme.md
153
premix/readme.md
@ -1,153 +0,0 @@
|
||||
# Premix
|
||||
|
||||
> Premix is **pre**mium gasoline **mix**ed with lubricant oil and it is
|
||||
used in two-stroke internal combustion engines. It tends to produce a lot of
|
||||
smoke.
|
||||
|
||||
This Premix is a block validation debugging tool for the Nimbus Ethereum
|
||||
client. Premix will query transaction execution steps from other Ethereum
|
||||
clients and compare them with those generated by Nimbus. It will then produce a
|
||||
web page to present comparison results that can be inspected by the developer
|
||||
to pinpoint the faulty instruction.
|
||||
|
||||
Premix will also produce a test case for the specific problematic transaction,
|
||||
complete with a database snapshot to execute transaction validation in
|
||||
isolation. This test case can then be integrated with the Nimbus project's test
|
||||
suite.
|
||||
|
||||
![screenshot](assets/images/premix_screenshot.png)
|
||||
|
||||
## Requirements
|
||||
|
||||
Before you can use the Premix debugging tool there are several things you need
|
||||
to prepare. The first requirement is a recent version of `geth` installed from
|
||||
[source](https://github.com/ethereum/go-ethereum/releases) or
|
||||
[binary](https://ethereum.github.io/go-ethereum/downloads/). The minimum
|
||||
required version is 1.8.18. Beware that version 1.8.x contains bugs in
|
||||
transaction tracer, upgrade it to 1.9.x soon after it has been released.
|
||||
Afterwards, you can run it with this command:
|
||||
|
||||
```bash
|
||||
geth --rpc --rpcapi eth,debug --syncmode full --gcmode=archive
|
||||
```
|
||||
|
||||
You need to run it until it fully syncs past the problematic block you want to
|
||||
debug (you might need to do it on an empty db, because some geth versions will
|
||||
keep on doing a fast sync if that's what was done before). After that, you can
|
||||
stop it by pressing `CTRL-C` and rerun it with the additional flag `--maxpeers
|
||||
0` if you want it to stop syncing
|
||||
- or just let it run as is if you want to keep syncing.
|
||||
|
||||
The next requirement is building Nimbus and Premix:
|
||||
|
||||
```bash
|
||||
# in the top-level directory:
|
||||
make
|
||||
```
|
||||
|
||||
After that, you can run Nimbus with this command:
|
||||
|
||||
```bash
|
||||
./build/nimbus --prune:archive --port:30304
|
||||
```
|
||||
|
||||
Nimbus will try to sync up to the problematic block, then stop and execute
|
||||
Premix which will then load a report page in your default browser. If it fails
|
||||
to do that, you can see the report page by manually opening
|
||||
`premix/index.html`.
|
||||
|
||||
In your browser, you can explore the tracing result and find where the problem is.
|
||||
|
||||
## Tools
|
||||
|
||||
### Premix
|
||||
|
||||
Premix is the main debugging tool. It produces reports that can be viewed in
|
||||
a browser and serialised debug data that can be consumed by the `debug` tool.
|
||||
Premix consumes data produced by either `nimbus`, `persist`, or `dumper`.
|
||||
|
||||
You can run it manually using this command:
|
||||
|
||||
```bash
|
||||
./build/premix debug*.json
|
||||
```
|
||||
|
||||
### Persist
|
||||
|
||||
Because the Nimbus P2P layer still contains bugs, you may become impatient when
|
||||
trying to sync blocks. In the `./premix` directory, you can find a `persist`
|
||||
tool. It will help you sync relatively quicker because it will bypass the P2P
|
||||
layer and download blocks from `geth` via `rpc-api`.
|
||||
|
||||
When it encounters a problematic block during syncing, it will stop and produce
|
||||
debugging data just like Nimbus does.
|
||||
|
||||
```bash
|
||||
./build/persist [--dataDir:your_database_directory] [--head: blockNumber] [--maxBlocks: number] [--numCommits: number]
|
||||
```
|
||||
|
||||
### Debug
|
||||
|
||||
In the same `./premix` directory you'll find the `debug` tool that you can use
|
||||
to process previously generated debugging info in order to work with one block
|
||||
and one transaction at a time instead of multiple confusing blocks and
|
||||
transactions.
|
||||
|
||||
```bash
|
||||
./build/debug block*.json
|
||||
```
|
||||
|
||||
where `block*.json` contains the database snapshot needed to debug a single
|
||||
block produced by the Premix tool.
|
||||
|
||||
### Dumper
|
||||
|
||||
Dumper was designed specifically to produce debugging data that can be further
|
||||
processed by Premix from information already stored in database. It will create
|
||||
tracing information for a single block if that block has been already
|
||||
persisted.
|
||||
|
||||
If you want to generate debugging data, it's better to use the Persist tool.
|
||||
The data generated by Dumper is usually used to debug Premix features in
|
||||
general and the report page logic in particular.
|
||||
|
||||
```bash
|
||||
# usage:
|
||||
./build/dumper [--datadir:your_path] --head:blockNumber
|
||||
```
|
||||
|
||||
### Hunter
|
||||
|
||||
Hunter's purpose is to track down problematic blocks and create debugging info
|
||||
associated with them. It will not access your on-disk database, because it has
|
||||
its own prestate construction code.
|
||||
|
||||
Hunter will download all it needs from geth, just make sure your geth version
|
||||
is at least 1.8.18.
|
||||
|
||||
Hunter depends on
|
||||
`eth_getProof`[(EIP1186)](https://github.com/ethereum/EIPs/issues/1186). Make
|
||||
sure your installed `geth` supports this functionality (older versions don't
|
||||
have this implemented).
|
||||
|
||||
```bash
|
||||
# usage:
|
||||
./build/hunter --head:blockNumber --maxBlocks:number
|
||||
```
|
||||
|
||||
`blockNumber` is the starting block where the hunt begins.
|
||||
|
||||
`maxBlocks` is the number of problematic blocks you want to capture before
|
||||
stopping the hunt.
|
||||
|
||||
### Regress
|
||||
|
||||
Regress is an offline block validation tool. It will not download block
|
||||
information from anywhere like Persist tool. Regress will validate your
|
||||
already persisted block in database. It will try to find any regression
|
||||
introduced either by bugfixing or refactoring.
|
||||
|
||||
```bash
|
||||
# usage:
|
||||
./build/regress [--dataDir:your_db_path] --head:blockNumber
|
||||
```
|
@ -1,87 +0,0 @@
|
||||
# Nimbus
|
||||
# Copyright (c) 2020-2024 Status Research & Development GmbH
|
||||
# Licensed under either of
|
||||
# * Apache License, version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or
|
||||
# http://www.apache.org/licenses/LICENSE-2.0)
|
||||
# * MIT license ([LICENSE-MIT](LICENSE-MIT) or
|
||||
# http://opensource.org/licenses/MIT)
|
||||
# at your option. This file may not be copied, modified, or distributed except
|
||||
# according to those terms.
|
||||
|
||||
import
|
||||
chronicles,
|
||||
../nimbus/[evm/state, evm/types],
|
||||
../nimbus/core/executor,
|
||||
../nimbus/common/common,
|
||||
../nimbus/db/opts,
|
||||
../nimbus/db/core_db/persistent,
|
||||
configuration # must be late (compilation annoyance)
|
||||
|
||||
const
|
||||
numBlocks = 256
|
||||
|
||||
proc validateBlock(com: CommonRef, blockNumber: BlockNumber): BlockNumber =
|
||||
var
|
||||
parentNumber = blockNumber - 1
|
||||
parent = com.db.getBlockHeader(parentNumber)
|
||||
blocks = newSeq[EthBlock](numBlocks)
|
||||
|
||||
for i in 0 ..< numBlocks:
|
||||
blocks[i] = com.db.getEthBlock(blockNumber + i.BlockNumber)
|
||||
|
||||
let transaction = com.db.ctx.newTransaction()
|
||||
defer: transaction.dispose()
|
||||
|
||||
for i in 0 ..< numBlocks:
|
||||
stdout.write blockNumber + i.BlockNumber
|
||||
stdout.write "\r"
|
||||
|
||||
let
|
||||
vmState = BaseVMState.new(parent, blocks[i].header, com)
|
||||
validationResult = vmState.processBlock(blocks[i])
|
||||
|
||||
if validationResult.isErr:
|
||||
error "block validation error",
|
||||
err = validationResult.error(), blockNumber = blockNumber + i.BlockNumber
|
||||
|
||||
parent = blocks[i].header
|
||||
|
||||
transaction.rollback()
|
||||
result = blockNumber + numBlocks.BlockNumber
|
||||
|
||||
proc main() {.used.} =
|
||||
let
|
||||
conf = getConfiguration()
|
||||
com = CommonRef.new(newCoreDbRef(
|
||||
DefaultDbPersistent, conf.dataDir, DbOptions.init()))
|
||||
|
||||
# move head to block number ...
|
||||
if conf.head == 0'u64:
|
||||
raise newException(ValueError, "please set block number with --head: blockNumber")
|
||||
|
||||
var counter = 0
|
||||
var blockNumber = conf.head
|
||||
|
||||
while true:
|
||||
blockNumber = com.validateBlock(blockNumber)
|
||||
|
||||
inc counter
|
||||
if conf.maxBlocks != 0 and counter >= conf.maxBlocks:
|
||||
break
|
||||
|
||||
when isMainModule:
|
||||
var message: string
|
||||
|
||||
## Processing command line arguments
|
||||
if processArguments(message) != Success:
|
||||
echo message
|
||||
quit(QuitFailure)
|
||||
else:
|
||||
if len(message) > 0:
|
||||
echo message
|
||||
quit(QuitSuccess)
|
||||
|
||||
try:
|
||||
main()
|
||||
except:
|
||||
echo getCurrentExceptionMsg()
|
@ -12,18 +12,12 @@
|
||||
{.warning[UnusedImport]: off.}
|
||||
|
||||
import
|
||||
#../premix/premix, # -- currently disabled (no tracer at the moment)
|
||||
#../premix/persist, # -- ditto
|
||||
#../premix/debug, # -- ditto
|
||||
#../premix/dumper, # -- ditto
|
||||
#../premix/hunter, # -- ditto
|
||||
#../premix/regress, # -- ditto
|
||||
#./tracerTestGen, # -- ditto
|
||||
#./persistBlockTestGen, # -- ditto
|
||||
../hive_integration/nodocker/rpc/rpc_sim,
|
||||
../hive_integration/nodocker/consensus/consensus_sim,
|
||||
#../hive_integration/nodocker/graphql/graphql_sim, # -- does not compile
|
||||
#../hive_integration/nodocker/engine/engine_sim, # -- does not compile
|
||||
../hive_integration/nodocker/engine/engine_sim,
|
||||
../hive_integration/nodocker/pyspec/pyspec_sim,
|
||||
../tools/t8n/t8n,
|
||||
../tools/t8n/t8n_test,
|
||||
|
Loading…
x
Reference in New Issue
Block a user