Merge stable into unstable

This commit is contained in:
Zahary Karadjov 2021-05-19 09:38:13 +03:00
parent d8bb91d9a9
commit b7aa30adfd
No known key found for this signature in database
GPG Key ID: C8936F8A3073D609
25 changed files with 733 additions and 142 deletions

View File

@ -233,6 +233,9 @@ jobs:
shell: bash
working-directory: nim-beacon-chain
run: |
if [[ "${{ runner.os }}" == "macOS" ]]; then
ulimit -n 1024
fi
make -j$ncpu ARCH_OVERRIDE=$PLATFORM CI_CACHE=NimBinaries QUICK_AND_DIRTY_COMPILER=1 update
- name: Get latest fixtures commit hash
@ -268,6 +271,9 @@ jobs:
shell: bash
working-directory: nim-beacon-chain
run: |
if [[ "${{ runner.os }}" == "macOS" ]]; then
ulimit -n 1024
fi
make -j$ncpu ARCH_OVERRIDE=$PLATFORM LOG_LEVEL=TRACE NIMFLAGS="-d:testnet_servers_image" nimbus_beacon_node nimbus_validator_client
- name: Run nim-beacon-chain tests
@ -275,6 +281,9 @@ jobs:
shell: bash
working-directory: nim-beacon-chain
run: |
if [[ "${{ runner.os }}" == "macOS" ]]; then
ulimit -n 1024
fi
make -j$ncpu ARCH_OVERRIDE=$PLATFORM DISABLE_TEST_FIXTURES_SCRIPT=1 test
# The upload creates a combined report that gets posted as a comment on the PR

190
.github/workflows/cron.yml vendored Normal file
View File

@ -0,0 +1,190 @@
name: Daily
on:
schedule:
- cron: "45 3 * * *"
jobs:
build:
strategy:
fail-fast: false
max-parallel: 20
matrix:
target:
- os: linux
cpu: amd64
- os: linux
cpu: i386
- os: macos
cpu: amd64
- os: windows
cpu: amd64
- os: windows
cpu: i386
branch: [version-1-2, version-1-4, devel]
include:
- target:
os: linux
builder: ubuntu-18.04
shell: bash
- target:
os: macos
builder: macos-10.15
shell: bash
- target:
os: windows
builder: windows-2019
shell: msys2 {0}
defaults:
run:
shell: ${{ matrix.shell }}
name: '${{ matrix.target.os }}-${{ matrix.target.cpu }} (Nim ${{ matrix.branch }})'
runs-on: ${{ matrix.builder }}
continue-on-error: ${{ matrix.branch }} == 'devel'
steps:
- name: Checkout nimbus-eth2
uses: actions/checkout@v2
with:
ref: unstable
- name: Derive environment variables
shell: bash
run: |
if [[ '${{ matrix.target.cpu }}' == 'amd64' ]]; then
PLATFORM=x64
else
PLATFORM=x86
fi
echo "PLATFORM=${PLATFORM}" >> $GITHUB_ENV
# libminiupnp / natpmp
if [[ '${{ runner.os }}' == 'Linux' && '${{ matrix.target.cpu }}' == 'i386' ]]; then
export CFLAGS="${CFLAGS} -m32 -mno-adx"
echo "CFLAGS=${CFLAGS}" >> $GITHUB_ENV
fi
ncpu=""
case '${{ runner.os }}' in
'Linux')
ncpu=$(nproc)
;;
'macOS')
ncpu=$(sysctl -n hw.ncpu)
;;
'Windows')
ncpu=${NUMBER_OF_PROCESSORS}
;;
esac
[[ -z "$ncpu" || $ncpu -le 0 ]] && ncpu=1
echo "ncpu=${ncpu}" >> $GITHUB_ENV
- name: Install build dependencies (Linux i386)
if: runner.os == 'Linux' && matrix.target.cpu == 'i386'
run: |
sudo dpkg --add-architecture i386
sudo apt-fast update -qq
sudo DEBIAN_FRONTEND='noninteractive' apt-fast install \
--no-install-recommends -yq gcc-multilib g++-multilib
mkdir -p external/bin
cat << EOF > external/bin/gcc
#!/bin/bash
exec $(which gcc) -m32 -mno-adx "\$@"
EOF
cat << EOF > external/bin/g++
#!/bin/bash
exec $(which g++) -m32 -mno-adx "\$@"
EOF
chmod 755 external/bin/gcc external/bin/g++
echo "${{ github.workspace }}/external/bin" >> $GITHUB_PATH
- name: MSYS2 (Windows i386)
if: runner.os == 'Windows' && matrix.target.cpu == 'i386'
uses: msys2/setup-msys2@v2
with:
path-type: inherit
msystem: MINGW32
install: >-
base-devel
git
mingw-w64-i686-toolchain
- name: MSYS2 (Windows amd64)
if: runner.os == 'Windows' && matrix.target.cpu == 'amd64'
uses: msys2/setup-msys2@v2
with:
path-type: inherit
install: >-
base-devel
git
mingw-w64-x86_64-toolchain
- name: Restore Nim DLLs dependencies (Windows) from cache
if: runner.os == 'Windows'
id: windows-dlls-cache
uses: actions/cache@v2
with:
path: external/dlls-${{ matrix.target.cpu }}
key: 'dlls-${{ matrix.target.cpu }}'
- name: Install DLLs dependencies (Windows)
if: >
steps.windows-dlls-cache.outputs.cache-hit != 'true' &&
runner.os == 'Windows'
shell: bash
run: |
mkdir -p external
curl -L "https://nim-lang.org/download/windeps.zip" -o external/windeps.zip
7z x -y external/windeps.zip -oexternal/dlls-${{ matrix.target.cpu }}
- name: Path to cached dependencies (Windows)
if: >
runner.os == 'Windows'
shell: bash
run: |
echo "${{ github.workspace }}/external/dlls-${{ matrix.target.cpu }}" >> $GITHUB_PATH
- name: Install build dependencies (macOS)
if: runner.os == 'macOS'
shell: bash
run: |
brew install gnu-getopt
brew link --force gnu-getopt
- name: Get latest fixtures commit hash
id: fixtures_version
shell: bash
run: |
getHash() {
git ls-remote "https://github.com/$1" "${2:-HEAD}" | cut -f 1
}
fixturesHash=$(getHash status-im/nim-eth2-scenarios)
echo "::set-output name=fixtures::${fixturesHash}"
- name: Restore Ethereum Foundation fixtures from cache
id: fixtures-cache
uses: actions/cache@v2
with:
path: fixturesCache
key: 'eth2-scenarios-${{ steps.fixtures_version.outputs.fixtures }}'
- name: Get the Ethereum Foundation fixtures
shell: bash
run: |
scripts/setup_official_tests.sh fixturesCache
- name: Build Nim and Nimbus dependencies
shell: bash
run: |
make -j ${ncpu} NIM_COMMIT=${{ matrix.branch }} ARCH_OVERRIDE=${PLATFORM} QUICK_AND_DIRTY_COMPILER=1 update
- name: Smoke test the Beacon Node and Validator Client with all tracing enabled
shell: bash
run: |
make -j ${ncpu} NIM_COMMIT=${{ matrix.branch }} LOG_LEVEL=TRACE NIMFLAGS="-d:testnet_servers_image" nimbus_beacon_node nimbus_validator_client
- name: Run nimbus-eth2 tests
shell: bash
run: |
make -j ${ncpu} NIM_COMMIT=${{ matrix.branch }} DISABLE_TEST_FIXTURES_SCRIPT=1 test

View File

@ -178,9 +178,63 @@ jobs:
name: Windows_amd64_checksum
path: ./dist/${{ steps.make_dist.outputs.archive_dir }}/build/nimbus_beacon_node.sha512sum
retention-days: 2
build-macos-amd64:
name: macOS AMD64 release asset
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v2
- name: Build project
id: make_dist
run: |
make dist-macos
cd dist
ARCHIVE=$(echo nimbus-eth2_macOS_amd64_*.tar.gz)
echo "::set-output name=archive::"${ARCHIVE}
echo "::set-output name=archive_dir::"${ARCHIVE%.tar.gz}
tar -xzf ${ARCHIVE} ${ARCHIVE%.tar.gz}/build/nimbus_beacon_node.sha512sum
- name: Upload archive artefact
uses: actions/upload-artifact@v2
with:
name: macOS_amd64_archive
path: ./dist/${{ steps.make_dist.outputs.archive }}
retention-days: 2
- name: Upload checksum artefact
uses: actions/upload-artifact@v2
with:
name: macOS_amd64_checksum
path: ./dist/${{ steps.make_dist.outputs.archive_dir }}/build/nimbus_beacon_node.sha512sum
retention-days: 2
build-macos-arm64:
name: macOS ARM64 release asset
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v2
- name: Build project
id: make_dist
run: |
make dist-macos-arm64
cd dist
ARCHIVE=$(echo nimbus-eth2_macOS_arm64_*.tar.gz)
echo "::set-output name=archive::"${ARCHIVE}
echo "::set-output name=archive_dir::"${ARCHIVE%.tar.gz}
tar -xzf ${ARCHIVE} ${ARCHIVE%.tar.gz}/build/nimbus_beacon_node.sha512sum
- name: Upload archive artefact
uses: actions/upload-artifact@v2
with:
name: macOS_arm64_archive
path: ./dist/${{ steps.make_dist.outputs.archive }}
retention-days: 2
- name: Upload checksum artefact
uses: actions/upload-artifact@v2
with:
name: macOS_arm64_checksum
path: ./dist/${{ steps.make_dist.outputs.archive_dir }}/build/nimbus_beacon_node.sha512sum
retention-days: 2
prepare-release:
name: Prepare release draft
needs: [build-amd64, build-arm64, build-arm, build-win64]
needs: [build-amd64, build-arm64, build-arm, build-win64, build-macos-amd64, build-macos-arm64]
runs-on: ubuntu-latest
steps:
- name: Download artefacts
@ -202,6 +256,10 @@ jobs:
cat Linux_arm_checksum/* >> release_notes.md
echo '# Windows AMD64' >> release_notes.md
cat Windows_amd64_checksum/* >> release_notes.md
echo '# macOS AMD64' >> release_notes.md
cat macOS_amd64_checksum/* >> release_notes.md
echo '# macOS ARM64' >> release_notes.md
cat macOS_arm64_checksum/* >> release_notes.md
echo '```' >> release_notes.md
- name: Create release
id: create_release
@ -217,6 +275,8 @@ jobs:
Linux_arm64_archive/*
Linux_arm_archive/*
Windows_amd64_archive/*
macOS_amd64_archive/*
macOS_arm64_archive/*
- name: Delete artefacts
uses: geekyeggo/delete-artifact@v1
with:
@ -230,4 +290,8 @@ jobs:
Linux_arm_checksum
Windows_amd64_archive
Windows_amd64_checksum
macOS_amd64_archive
macOS_amd64_checksum
macOS_arm64_archive
macOS_arm64_checksum

View File

@ -1,3 +1,39 @@
2021-05-17 v1.3.0
=================
This release offers safer and easier options to migrate to Nimbus from other clients.
It also brings further performance optimizations.
**We've added:**
* A new `slashingdb` sub-command with `import` and `export` options. This allows for
safely migrating to Nimbus from another client (as per the [EIP-3076](https://eips.ethereum.org/EIPS/eip-3076)
slashing protection interchange format).
Please see the the newly prepared [migration guides](https://nimbus.guide/migration.html) for the details.
* A new `ncli_db validatorPerf` command. This can be used to perform a textual
report for the attestation performance of a particular validator
(please note that `ncli_db` is available only when compiling from source).
* Official binaries for macOS (AMD64 and ARM64).
* Pruning of the slashing protection database and a transition to more optimal
queries. This results in a significant reduction in both disk and CPU usage
on nodes running a large number of validators.
* More consistent level of validation for the attestations received from
third-party sources and the JSON-RPC and REST APIs. This prevents invalid
attestations from being broadcasted to the network.
* Performance tuning of attestation subnet transition timings and state
snapshotting intervals. This results in improved CPU and bandwidth usage.
**We've fixed:**
* Problems in the GossipSub subnet walking logic leading to unnecessary bandwidth
and CPU costs.
2021-05-03 v1.2.2
=================

View File

@ -85,6 +85,8 @@ TOOLS_CSV := $(subst $(SPACE),$(COMMA),$(TOOLS))
dist-arm64 \
dist-arm \
dist-win64 \
dist-macos \
dist-macos-arm64 \
dist \
benchmarks
@ -582,11 +584,21 @@ dist-win64:
+ MAKE="$(MAKE)" \
scripts/make_dist.sh win64
dist-macos:
+ MAKE="$(MAKE)" \
scripts/make_dist.sh macos
dist-macos-arm64:
+ MAKE="$(MAKE)" \
scripts/make_dist.sh macos-arm64
dist:
+ $(MAKE) dist-amd64
+ $(MAKE) dist-arm64
+ $(MAKE) dist-arm
+ $(MAKE) dist-win64
+ $(MAKE) dist-macos
+ $(MAKE) dist-macos-arm64
#- this simple test will show any missing dynamically-linked Glibc symbols in the target distro
dist-test:

View File

@ -9,15 +9,18 @@
import
strutils, os, options, unicode, uri,
chronicles, chronicles/options as chroniclesOptions,
confutils, confutils/defs, confutils/std/net, stew/shims/net as stewNet,
stew/io2, unicodedb/properties, normalize,
stew/[io2, byteutils], unicodedb/properties, normalize,
eth/common/eth_types as commonEthTypes, eth/net/nat,
eth/p2p/discoveryv5/enr,
json_serialization, web3/[ethtypes, confutils_defs],
spec/[crypto, keystore, digest, datatypes, network],
./spec/[crypto, keystore, digest, datatypes, network],
./networking/network_metadata,
filepath
./validators/slashing_protection_common,
./filepath
export
uri,
@ -38,6 +41,7 @@ type
wallets
record
web3
slashingdb
WalletsCmd* {.pure.} = enum
create = "Creates a new EIP-2386 wallet"
@ -70,10 +74,16 @@ type
v2
both
SlashProtCmd* = enum
`import` = "Import a EIP-3076 slashing protection interchange file"
`export` = "Export a EIP-3076 slashing protection interchange file"
# migrateAll = "Export and remove the whole validator slashing protection DB."
# migrate = "Export and remove specified validators from Nimbus."
BeaconNodeConf* = object
logLevel* {.
desc: "Sets the log level for process and topics (e.g. \"DEBUG; TRACE:discv5,libp2p; REQUIRED:none; DISABLED:none\")"
defaultValue: "INFO"
desc: "Sets the log level for process and topics (e.g. \"DEBUG; TRACE:discv5,libp2p; REQUIRED:none; DISABLED:none\") [=INFO]"
name: "log-level" }: string
logFile* {.
@ -81,12 +91,14 @@ type
name: "log-file" }: Option[OutFile]
eth2Network* {.
desc: "The Eth2 network to join [=mainnet]"
desc: "The Eth2 network to join"
defaultValueDesc: "mainnet"
name: "network" }: Option[string]
dataDir* {.
defaultValue: config.defaultDataDir()
desc: "The directory where nimbus will store all blockchain data"
defaultValue: config.defaultDataDir()
defaultValueDesc: ""
abbr: "d"
name: "data-dir" }: OutDir
@ -117,15 +129,14 @@ type
name: "non-interactive" }: bool
netKeyFile* {.
defaultValue: "random",
desc: "Source of network (secp256k1) private key file " &
"(random|<path>) [=random]"
"(random|<path>)"
defaultValue: "random",
name: "netkey-file" }: string
netKeyInsecurePassword* {.
desc: "Use pre-generated INSECURE password for network private key file"
defaultValue: false,
desc: "Use pre-generated INSECURE password for network private key " &
"file [=false]"
name: "insecure-netkey-password" }: bool
agentString* {.
@ -141,7 +152,7 @@ type
slashingDbKind* {.
hidden
defaultValue: SlashingDbKind.v2
desc: "The slashing DB flavour to use (v2) [=v2]"
desc: "The slashing DB flavour to use"
name: "slashing-db-kind" }: SlashingDbKind
case cmd* {.
@ -155,42 +166,45 @@ type
name: "bootstrap-node" }: seq[string]
bootstrapNodesFile* {.
defaultValue: ""
desc: "Specifies a line-delimited file of bootstrap Ethereum network addresses"
defaultValue: ""
name: "bootstrap-file" }: InputFile
listenAddress* {.
desc: "Listening address for the Ethereum LibP2P and Discovery v5 traffic"
defaultValue: defaultListenAddress
desc: "Listening address for the Ethereum LibP2P and Discovery v5 " &
"traffic [=0.0.0.0]"
defaultValueDesc: "0.0.0.0"
name: "listen-address" }: ValidIpAddress
tcpPort* {.
desc: "Listening TCP port for Ethereum LibP2P traffic"
defaultValue: defaultEth2TcpPort
desc: "Listening TCP port for Ethereum LibP2P traffic [=9000]"
defaultValueDesc: "9000"
name: "tcp-port" }: Port
udpPort* {.
desc: "Listening UDP port for node discovery"
defaultValue: defaultEth2TcpPort
desc: "Listening UDP port for node discovery [=9000]"
# defaultValueDesc: 9000
name: "udp-port" }: Port
maxPeers* {.
desc: "The maximum number of peers to connect to"
defaultValue: 160 # 5 (fanout) * 64 (subnets) / 2 (subs) for a heathy mesh
desc: "The maximum number of peers to connect to [=160]"
name: "max-peers" }: int
nat* {.
desc: "Specify method to use for determining public address. " &
"Must be one of: any, none, upnp, pmp, extip:<IP>"
defaultValue: NatConfig(hasExtIp: false, nat: NatAny)
defaultValueDesc: "any"
name: "nat" .}: NatConfig
enrAutoUpdate* {.
defaultValue: false
desc: "Discovery can automatically update its ENR with the IP address " &
"and UDP port as seen by other nodes it communicates with. " &
"This option allows to enable/disable this functionality"
defaultValue: false
name: "enr-auto-update" .}: bool
weakSubjectivityCheckpoint* {.
@ -206,9 +220,9 @@ type
name: "finalized-checkpoint-block" }: Option[InputFile]
nodeName* {.
defaultValue: ""
desc: "A name for this node that will appear in the logs. " &
"If you set this to 'auto', a persistent automatically generated ID will be selected for each --data-dir folder"
defaultValue: ""
name: "node-name" }: string
graffiti* {.
@ -217,88 +231,94 @@ type
name: "graffiti" }: Option[GraffitiBytes]
verifyFinalization* {.
defaultValue: false
desc: "Specify whether to verify finalization occurs on schedule, for testing"
defaultValue: false
name: "verify-finalization" }: bool
stopAtEpoch* {.
defaultValue: 0
desc: "A positive epoch selects the epoch at which to stop"
defaultValue: 0
name: "stop-at-epoch" }: uint64
metricsEnabled* {.
desc: "Enable the metrics server"
defaultValue: false
desc: "Enable the metrics server [=false]"
name: "metrics" }: bool
metricsAddress* {.
desc: "Listening address of the metrics server"
defaultValue: defaultAdminListenAddress
desc: "Listening address of the metrics server [=127.0.0.1]"
defaultValueDesc: "127.0.0.1"
name: "metrics-address" }: ValidIpAddress
metricsPort* {.
desc: "Listening HTTP port of the metrics server"
defaultValue: 8008
desc: "Listening HTTP port of the metrics server [=8008]"
name: "metrics-port" }: Port
statusBarEnabled* {.
defaultValue: true
desc: "Display a status bar at the bottom of the terminal screen"
defaultValue: true
name: "status-bar" }: bool
statusBarContents* {.
desc: "Textual template for the contents of the status bar"
defaultValue: "peers: $connected_peers;" &
"finalized: $finalized_root:$finalized_epoch;" &
"head: $head_root:$head_epoch:$head_epoch_slot;" &
"time: $epoch:$epoch_slot ($slot);" &
"sync: $sync_status|" &
"ETH: $attached_validators_balance"
desc: "Textual template for the contents of the status bar"
defaultValueDesc: ""
name: "status-bar-contents" }: string
rpcEnabled* {.
desc: "Enable the JSON-RPC server"
defaultValue: false
desc: "Enable the JSON-RPC server [=false]"
name: "rpc" }: bool
rpcPort* {.
desc: "HTTP port for the JSON-RPC service"
defaultValue: defaultEth2RpcPort
desc: "HTTP port for the JSON-RPC service [=9190]"
defaultValueDesc: "9190"
name: "rpc-port" }: Port
rpcAddress* {.
desc: "Listening address of the RPC server"
defaultValue: defaultAdminListenAddress
desc: "Listening address of the RPC server [=127.0.0.1]"
defaultValueDesc: "127.0.0.1"
name: "rpc-address" }: ValidIpAddress
restEnabled* {.
desc: "Enable the REST (BETA version) server"
defaultValue: false
desc: "Enable the REST (BETA version) server [=false]"
name: "rest" }: bool
restPort* {.
desc: "Port for the REST (BETA version) server"
defaultValue: DefaultEth2RestPort
desc: "Port for the REST (BETA version) server [=5052]"
defaultValueDesc: "5052"
name: "rest-port" }: Port
restAddress* {.
desc: "Listening address of the REST (BETA version) server"
defaultValue: defaultAdminListenAddress
desc: "Listening address of the REST (BETA version) server [=127.0.0.1]"
defaultValueDesc: "127.0.0.1"
name: "rest-address" }: ValidIpAddress
inProcessValidators* {.
defaultValue: true # the use of the nimbus_signing_process binary by default will be delayed until async I/O over stdin/stdout is developed for the child process.
desc: "Disable the push model (the beacon node tells a signing process with the private keys of the validators what to sign and when) and load the validators in the beacon node itself"
defaultValue: true # the use of the nimbus_signing_process binary by default will be delayed until async I/O over stdin/stdout is developed for the child process.
name: "in-process-validators" }: bool
discv5Enabled* {.
desc: "Enable Discovery v5"
defaultValue: true
desc: "Enable Discovery v5 [=true]"
name: "discv5" }: bool
dumpEnabled* {.
desc: "Write SSZ dumps of blocks, attestations and states to data dir"
defaultValue: false
desc: "Write SSZ dumps of blocks, attestations and states to data dir [=false]"
name: "dump" }: bool
directPeers* {.
@ -306,8 +326,8 @@ type
name: "direct-peer" .}: seq[string]
doppelgangerDetection* {.
desc: "Whether to detect whether another validator is be running the same validator keys"
defaultValue: true
desc: "Whether to detect whether another validator is be running the same validator keys [=true]"
name: "doppelganger-detection"
}: bool
@ -321,18 +341,20 @@ type
name: "total-validators" }: uint64
bootstrapAddress* {.
defaultValue: init(ValidIpAddress, "127.0.0.1")
desc: "The public IP address that will be advertised as a bootstrap node for the testnet"
defaultValue: init(ValidIpAddress, "127.0.0.1")
defaultValueDesc: "127.0.0.1"
name: "bootstrap-address" }: ValidIpAddress
bootstrapPort* {.
defaultValue: defaultEth2TcpPort
desc: "The TCP/UDP port that will be used by the bootstrap node"
defaultValue: defaultEth2TcpPort
defaultValueDesc: "9000"
name: "bootstrap-port" }: Port
genesisOffset* {.
defaultValue: 5
desc: "Seconds from now to add to genesis time"
defaultValue: 5
name: "genesis-offset" }: int
outputGenesis* {.
@ -340,8 +362,8 @@ type
name: "output-genesis" }: OutFile
withGenesisRoot* {.
defaultValue: false
desc: "Include a genesis root in 'network.json'"
defaultValue: false
name: "with-genesis-root" }: bool
outputBootstrapFile* {.
@ -385,8 +407,8 @@ type
case depositsCmd* {.command.}: DepositsCmd
of DepositsCmd.createTestnetDeposits:
totalDeposits* {.
defaultValue: 1
desc: "Number of deposits to generate"
defaultValue: 1
name: "count" }: int
existingWalletId* {.
@ -394,13 +416,13 @@ type
name: "wallet" }: Option[WalletName]
outValidatorsDir* {.
defaultValue: "validators"
desc: "Output folder for validator keystores"
defaultValue: "validators"
name: "out-validators-dir" }: string
outSecretsDir* {.
defaultValue: "secrets"
desc: "Output folder for randomly generated keystore passphrases"
defaultValue: "secrets"
name: "out-secrets-dir" }: string
outDepositsFile* {.
@ -431,9 +453,10 @@ type
desc: "Validator index or a public key of the exited validator" }: string
rpcUrlForExit* {.
name: "rpc-url"
desc: "URL of the beacon node JSON-RPC service"
defaultValue: parseUri("http://localhost:" & $defaultEth2RpcPort)
desc: "URL of the beacon node JSON-RPC service" }: Uri
defaultValueDesc: "http://localhost:9190"
name: "rpc-url" }: Uri
exitAtEpoch* {.
name: "epoch"
@ -455,8 +478,8 @@ type
name: "udp-port" .}: Port
seqNumber* {.
defaultValue: 1,
desc: "Record sequence number"
defaultValue: 1,
name: "seq-number" .}: uint
fields* {.
@ -477,10 +500,26 @@ type
desc: "The web3 provider URL to test"
name: "url" }: Uri
of slashingdb:
case slashingdbCmd* {.command.}: SlashProtCmd
of SlashProtCmd.`import`:
importedInterchangeFile* {.
desc: "EIP-3076 slashing protection interchange file to import"
argument .}: InputFile
of SlashProtCmd.`export`:
exportedValidators* {.
desc: "Limit the export to specific validators " &
"(specified as numeric indices or public keys)"
abbr: "v"
name: "validator" }: seq[PubKey0x]
exportedInterchangeFile* {.
desc: "EIP-3076 slashing protection interchange file to export"
argument }: OutFile
ValidatorClientConf* = object
logLevel* {.
desc: "Sets the log level"
defaultValue: "INFO"
desc: "Sets the log level [=INFO]"
name: "log-level" }: string
logFile* {.
@ -488,8 +527,9 @@ type
name: "log-file" }: Option[OutFile]
dataDir* {.
defaultValue: config.defaultDataDir()
desc: "The directory where nimbus will store all blockchain data"
defaultValue: config.defaultDataDir()
defaultValueDesc: ""
abbr: "d"
name: "data-dir" }: OutDir
@ -516,23 +556,25 @@ type
name: "graffiti" }: Option[GraffitiBytes]
stopAtEpoch* {.
defaultValue: 0
desc: "A positive epoch selects the epoch at which to stop"
defaultValue: 0
name: "stop-at-epoch" }: uint64
rpcPort* {.
desc: "HTTP port of the server to connect to for RPC"
defaultValue: defaultEth2RpcPort
desc: "HTTP port of the server to connect to for RPC [=9190]"
defaultValueDesc: "9190"
name: "rpc-port" }: Port
rpcAddress* {.
defaultValue: defaultAdminListenAddress
desc: "Address of the server to connect to for RPC [=127.0.0.1]"
desc: "Address of the server to connect to for RPC"
defaultValue: init(ValidIpAddress, "127.0.0.1")
defaultValueDesc: "127.0.0.1"
name: "rpc-address" }: ValidIpAddress
retryDelay* {.
defaultValue: 10
desc: "Delay in seconds between retries after unsuccessful attempts to connect to a beacon node [=10]"
defaultValue: 10
name: "retry-delay" }: int
proc defaultDataDir*(config: BeaconNodeConf|ValidatorClientConf): string =
@ -590,6 +632,13 @@ func parseCmdArg*(T: type Uri, input: TaintedString): T
func completeCmdArg*(T: type Uri, input: TaintedString): seq[string] =
return @[]
func parseCmdArg*(T: type PubKey0x, input: TaintedString): T
{.raises: [ValueError, Defect].} =
PubKey0x(hexToPaddedByteArray[RawPubKeySize](input.string))
func completeCmdArg*(T: type PubKey0x, input: TaintedString): seq[string] =
return @[]
func parseCmdArg*(T: type Checkpoint, input: TaintedString): T
{.raises: [ValueError, Defect].} =
let sepIdx = find(input.string, ':')

View File

@ -92,6 +92,9 @@ declareGauge next_action_wait,
logScope: topics = "beacnde"
const SlashingDbName = "slashing_protection"
# changing this requires physical file rename as well or history is lost.
proc init*(T: type BeaconNode,
runtimePreset: RuntimePreset,
rng: ref BrHmacDrbgContext,
@ -330,8 +333,7 @@ proc init*(T: type BeaconNode,
slashingProtectionDB =
SlashingProtectionDB.init(
getStateField(chainDag.headState, genesis_validators_root),
config.validatorsDir(), "slashing_protection"
)
config.validatorsDir(), SlashingDbName)
validatorPool = newClone(ValidatorPool.init(slashingProtectionDB))
consensusManager = ConsensusManager.new(
@ -1929,6 +1931,59 @@ proc doWeb3Cmd(config: BeaconNodeConf) {.raises: [Defect, CatchableError].} =
waitFor testWeb3Provider(config.web3TestUrl,
metadata.depositContractAddress)
proc doSlashingExport(conf: BeaconNodeConf) {.raises: [IOError, Defect].}=
let
dir = conf.validatorsDir()
filetrunc = SlashingDbName
# TODO: Make it read-only https://github.com/status-im/nim-eth/issues/312
let db = SlashingProtectionDB.loadUnchecked(dir, filetrunc, readOnly = false)
let interchange = conf.exportedInterchangeFile.string
db.exportSlashingInterchange(interchange, conf.exportedValidators)
echo "Export finished: '", dir/filetrunc & ".sqlite3" , "' into '", interchange, "'"
proc doSlashingImport(conf: BeaconNodeConf) {.raises: [SerializationError, IOError, Defect].} =
let
dir = conf.validatorsDir()
filetrunc = SlashingDbName
# TODO: Make it read-only https://github.com/status-im/nim-eth/issues/312
let interchange = conf.importedInterchangeFile.string
var spdir: SPDIR
try:
spdir = JSON.loadFile(interchange, SPDIR)
except SerializationError as err:
writeStackTrace()
stderr.write $JSON & " load issue for file \"", interchange, "\"\n"
stderr.write err.formatMsg(interchange), "\n"
quit 1
# Open DB and handle migration from v1 to v2 if needed
let db = SlashingProtectionDB.init(
genesis_validators_root = Eth2Digest spdir.metadata.genesis_validators_root,
basePath = dir,
dbname = filetrunc,
modes = {kCompleteArchive}
)
# Now import the slashing interchange file
# Failures mode:
# - siError can only happen with invalid genesis_validators_root which would be caught above
# - siPartial can happen for invalid public keys, slashable blocks, slashable votes
let status = db.inclSPDIR(spdir)
doAssert status in {siSuccess, siPartial}
echo "Import finished: '", interchange, "' into '", dir/filetrunc & ".sqlite3", "'"
proc doSlashingInterchange(conf: BeaconNodeConf) {.raises: [Defect, CatchableError].} =
doAssert conf.cmd == slashingdb
case conf.slashingdbCmd
of SlashProtCmd.`export`:
conf.doSlashingExport()
of SlashProtCmd.`import`:
conf.doSlashingImport()
{.pop.} # TODO moduletests exceptions
programMain:
var
@ -1977,3 +2032,4 @@ programMain:
of wallets: doWallets(config, rng[])
of record: doRecord(config, rng[])
of web3: doWeb3Cmd(config)
of slashingdb: doSlashingInterchange(config)

View File

@ -9,10 +9,11 @@
import
# stdlib
std/os,
std/[os, algorithm, sequtils],
# Status
eth/db/[kvstore, kvstore_sqlite3],
stew/results, chronicles,
stew/[results, byteutils],
chronicles, chronicles/timings,
# Internal
../spec/[datatypes, digest, crypto],
./slashing_protection_common,
@ -44,9 +45,8 @@ export chronicles
type
SlashProtDBMode* = enum
kCompleteArchiveV1 # Complete Format V1 backend (saves all attestations)
kCompleteArchiveV2 # Complete Format V2 backend (saves all attestations)
kLowWatermarkV2 # Low-Watermark Format V2 backend (prunes attestations)
kCompleteArchive # Complete Format V2 backend (saves all attestations)
kLowWatermark # Low-Watermark Format V2 backend (prunes attestations)
SlashingProtectionDB* = ref object
## Database storing the blocks attested
@ -70,15 +70,15 @@ proc init*(
genesis_validators_root: Eth2Digest,
basePath, dbname: string,
modes: set[SlashProtDBMode]
): T =
): T =
## Initialize or load a slashing protection DB
## This is for Beacon Node usage
## Handles DB version migration
doAssert modes.card >= 1, "No slashing protection mode chosen. Choose a v1, a v2 or v1 and v2 slashing DB mode."
doAssert not(
kCompleteArchiveV2 in modes and
kLowWatermarkV2 in modes), "Mode(s): " & $modes & ". Choose only one of V2 DB modes."
kCompleteArchive in modes and
kLowWatermark in modes), "Mode(s): " & $modes & ". Choose only one of V2 DB modes."
new result
result.modes = modes
@ -97,6 +97,7 @@ proc init*(
fatal "The slashing database refers to another chain/mainnet/testnet",
path = basePath/dbname,
genesis_validators_root = genesis_validators_root
quit 1
db_v1.fromRawDB(rawdb)
info "Migrating local validators slashing DB from v1 to v2"
@ -133,7 +134,7 @@ proc init*(
## Does not handle migration
init(
T, genesis_validators_root, basePath, dbname,
modes = {kLowWatermarkV2},
modes = {kLowWatermark}
)
proc loadUnchecked*(
@ -145,14 +146,17 @@ proc loadUnchecked*(
## this doesn't check the genesis validator root
##
## Does not handle migration
new result
result.modes = {kCompleteArchiveV1, kCompleteArchiveV2}
result.db_v2 = SlashingProtectionDB_v2.loadUnchecked(
basePath, dbname, readOnly
)
result.db_v1.fromRawDB(kvstore result.db_v2.getRawDBHandle().openKvStore())
result.modes = {}
try:
result.db_v2 = SlashingProtectionDB_v2.loadUnchecked(
basePath, dbname, readOnly
)
result.modes.incl(kCompleteArchive)
except CatchableError as err:
error "Failed to load the Slashing protection database", err = err.msg
quit 1
proc close*(db: SlashingProtectionDB) =
## Close a slashing protection database
@ -266,11 +270,12 @@ proc pruneAfterFinalization*(
## This ensures that even if pruning is called with an incorrect epoch
## slashing protection can fallback to the minimal / high-watermark protection mode.
##
## Pruning is only done if pruning is enabled (DB in kLowWatermarkV2 mode)
## Pruning is only done if pruning is enabled (DB in kLowWatermark mode)
## Pruning is only triggered on v2 database.
if kLowWatermarkV2 in db.modes:
db.db_v2.pruneAfterFinalization(finalizedEpoch)
if kLowWatermark in db.modes:
debug.logTime "Pruning slashing DB":
db.db_v2.pruneAfterFinalization(finalizedEpoch)
# The high-level import/export functions are
# - importSlashingInterchange
@ -287,3 +292,31 @@ proc inclSPDIR*(db: SlashingProtectionDB, spdir: SPDIR): SlashingImportStatus
proc toSPDIR*(db: SlashingProtectionDB): SPDIR
{.raises: [IOError, Defect].} =
db.db_v2.toSPDIR()
proc exportSlashingInterchange*(
db: SlashingProtectionDB,
path: string,
validatorsWhiteList: seq[PubKey0x] = @[],
prettify = true) {.raises: [Defect, IOError].} =
## Export a database to the Slashing Protection Database Interchange Format
# We could modify toSPDIR to do the filtering directly
# but this is not a performance sensitive operation.
# so it's better to keep it simple.
var spdir = db.toSPDIR()
if validatorsWhiteList.len > 0:
# O(a log b) with b the number of validators to keep
# and a the total number of validators in DB
let validators = validatorsWhiteList.sorted()
spdir.data.keepItIf(validators.binarySearch(it.pubkey) != -1)
if spdir.data.len != validatorsWhiteList.len:
let exportedKeys = spdir.data.mapIt(it.pubkey).sorted()
for v in validators:
if exportedKeys.binarySearch(v) == -1:
warn "Specified validator key not found in the slashing database",
key = v.PubKeyBytes.toHex
Json.saveFile(path, spdir, prettify)
echo "Exported slashing protection DB to '", path, "'"

View File

@ -170,6 +170,15 @@ func `==`*(a, b: BadVote): bool =
of BadVoteKind.DatabaseError:
true
template `==`*(a, b: PubKey0x): bool =
PubKeyBytes(a) == PubKeyBytes(b)
template `<`*(a, b: PubKey0x): bool =
PubKeyBytes(a) < PubKeyBytes(b)
template cmp*(a, b: PubKey0x): bool =
cmp(PubKeyBytes(a), PubKeyBytes(b))
func `==`*(a, b: BadProposal): bool =
## Comparison operator.
## Used implictily by Result when comparing the
@ -196,7 +205,7 @@ proc writeValue*(writer: var JsonWriter, value: PubKey0x)
proc readValue*(reader: var JsonReader, value: var PubKey0x)
{.raises: [SerializationError, IOError, Defect].} =
try:
value = PubKey0x reader.readValue(string).hexToByteArray(RawPubKeySize)
value = PubKey0x hexToByteArray(reader.readValue(string), RawPubKeySize)
except ValueError:
raiseUnexpectedValue(reader, "Hex string expected")
@ -222,14 +231,6 @@ proc readValue*(r: var JsonReader, a: var (SlotString or EpochString))
except ValueError:
raiseUnexpectedValue(r, "Integer in a string expected")
proc exportSlashingInterchange*(
db: auto,
path: string, prettify = true) {.raises: [Defect, IOError].} =
## Export a database to the Slashing Protection Database Interchange Format
let spdir = db.toSPDIR()
Json.saveFile(path, spdir, prettify)
echo "Exported slashing protection DB to '", path, "'"
proc importSlashingInterchange*(
db: auto,
path: string): SlashingImportStatus {.raises: [Defect, IOError, SerializationError].} =

View File

@ -400,7 +400,7 @@ proc loadUnchecked*(
subkey(kGenesisValidatorsRoot)
).get(), "The Slashing DB is missing genesis information"
result = T(db: db, backend: backend)
T(db: db, backend: backend)
proc close*(db: SlashingProtectionDB_v1) =
if db.db != nil:

View File

@ -687,7 +687,7 @@ proc loadUnchecked*(
let alreadyExists = fileExists(path)
if not alreadyExists:
raise newException(IOError, "DB '" & path & "' does not exist.")
result = T(backend: SqStoreRef.init(basePath, dbname, readOnly = readOnly, keyspaces = []).get())
result = T(backend: SqStoreRef.init(basePath, dbname, readOnly = readOnly).get())
# Cached queries
result.setupCachedQueries()

View File

@ -15,8 +15,8 @@ when not defined(nimscript):
const
versionMajor* = 1
versionMinor* = 2
versionBuild* = 2
versionMinor* = 3
versionBuild* = 0
versionBlob* = "stateofus" # Single word - ends up in the default graffitti

View File

@ -1,3 +1,5 @@
import strutils
const nimCachePathOverride {.strdefine.} = ""
when nimCachePathOverride == "":
when defined(release):
@ -79,8 +81,7 @@ switch("define", "withoutPCRE")
switch("import", "testutils/moduletests")
const useLibStackTrace = not defined(macosx) and
not defined(windows) and
const useLibStackTrace = not defined(windows) and
not defined(disable_libbacktrace)
when useLibStackTrace:
@ -90,9 +91,25 @@ else:
--stacktrace:on
--linetrace:on
# the default open files limit is too low on macOS (512), breaking the
# "--debugger:native" build. It can be increased with `ulimit -n 1024`.
if not defined(macosx):
var canEnableDebuggingSymbols = true
if defined(macosx):
# The default open files limit is too low on macOS (512), breaking the
# "--debugger:native" build. It can be increased with `ulimit -n 1024`.
let openFilesLimitTarget = 1024
var openFilesLimit = 0
try:
openFilesLimit = staticExec("ulimit -n").strip(chars = Whitespace + Newlines).parseInt()
if openFilesLimit < openFilesLimitTarget:
echo "Open files limit too low to enable debugging symbols and lightweight stack traces."
echo "Increase it with \"ulimit -n " & $openFilesLimitTarget & "\""
canEnableDebuggingSymbols = false
except:
echo "ulimit error"
# We ignore this resource limit on Windows, where a default `ulimit -n` of 256
# in Git Bash is apparently ignored by the OS, and on Linux where the default of
# 1024 is good enough for us.
if canEnableDebuggingSymbols:
# add debugging symbols and original files and line numbers
--debugger:native

18
docker/dist/Dockerfile.macos vendored Normal file
View File

@ -0,0 +1,18 @@
# The build is reproducible only if this base image stays the same.
FROM statusteam/nimbus_beacon_node:dist_base_20210513160553_macos@sha256:eef4aff594307c0ff615160aa7184b3660648ce929bb670a409428fc32bd04ed
SHELL ["/bin/bash", "-c"]
ARG USER_ID
ARG GROUP_ID
RUN addgroup --gid ${GROUP_ID} user; \
adduser --disabled-password --gecos '' --uid ${USER_ID} --gid ${GROUP_ID} user;
USER user
STOPSIGNAL SIGINT
COPY "entry_point.sh" "/home/user/"
ENTRYPOINT ["/home/user/entry_point.sh", "macOS_amd64"]

18
docker/dist/Dockerfile.macos-arm64 vendored Normal file
View File

@ -0,0 +1,18 @@
# The build is reproducible only if this base image stays the same.
FROM statusteam/nimbus_beacon_node:dist_base_20210513160553_macos@sha256:eef4aff594307c0ff615160aa7184b3660648ce929bb670a409428fc32bd04ed
SHELL ["/bin/bash", "-c"]
ARG USER_ID
ARG GROUP_ID
RUN addgroup --gid ${GROUP_ID} user; \
adduser --disabled-password --gecos '' --uid ${USER_ID} --gid ${GROUP_ID} user;
USER user
STOPSIGNAL SIGINT
COPY "entry_point.sh" "/home/user/"
ENTRYPOINT ["/home/user/entry_point.sh", "macOS_arm64"]

View File

@ -1,7 +1,7 @@
# Binary Nimbus beacon node distribution
This binary distribution of the Nimbus eth2 package is compiled
in [reproducible way](https://reproducible-builds.org/) from source files
in a [reproducible way](https://reproducible-builds.org/) from source files
hosted at https://github.com/status-im/nimbus-eth2.
The tarball containing this README uses the following naming scheme:

24
docker/dist/base_image/Dockerfile.macos vendored Normal file
View File

@ -0,0 +1,24 @@
# This Docker image can change from one build to another, because the upstream
# Debian/Ubuntu package index is continuously updated and we have to run
# `apt-get update` in here.
#
# The only way to make this a part of our reproducible build system is to build
# it once, upload it to Docker Hub and make sure it's being pulled regularly so
# it's not deleted after 6 months of inactivity.
FROM ubuntu:20.04
SHELL ["/bin/bash", "-c"]
ENV DEBIAN_FRONTEND=noninteractive TZ="Etc/UTC"
RUN apt-get -qq update \
&& apt-get -qq -y install build-essential git clang-11 llvm-11-dev cmake curl libssl-dev lzma-dev libxml2-dev &>/dev/null \
&& update-alternatives --install /usr/bin/clang clang /usr/bin/clang-11 100 \
&& update-alternatives --install /usr/bin/clang++ clang++ /usr/bin/clang++-11 100 \
&& apt-get -qq clean \
&& rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/*
COPY "build_osxcross.sh" "/root/"
RUN cd /root \
&& ./build_osxcross.sh

View File

@ -8,10 +8,12 @@ IMAGE_NAME := statusteam/nimbus_beacon_node:$(IMAGE_TAG)
build-arm64 \
build-arm \
build-win64 \
build-macos \
push-amd64 \
push-arm64 \
push-arm \
push-win64
push-win64 \
push-macos
build-amd64:
$(CURDIR)/make_base_image.sh amd64 "$(IMAGE_NAME)"
@ -25,6 +27,9 @@ build-arm:
build-win64:
$(CURDIR)/make_base_image.sh win64 "$(IMAGE_NAME)_win64"
build-macos:
$(CURDIR)/make_base_image.sh macos "$(IMAGE_NAME)_macos"
# You probably don't want to recreate and push these base images to Docker Hub,
# because when older images expire and get deleted, it will no longer be possible
# to reproduce old releases.
@ -41,3 +46,6 @@ build-win64:
#push-win64: build-win64
# docker push $(IMAGE_NAME)_win64
#push-macos: build-macos
#docker push $(IMAGE_NAME)_macos

27
docker/dist/base_image/build_osxcross.sh vendored Executable file
View File

@ -0,0 +1,27 @@
#!/bin/bash
set -e
git clone https://github.com/tpoechtrager/osxcross.git
# macOS SDK
cd osxcross/tarballs
MACOS_SDK_VER="11.3"
MACOS_SDK_TARBALL="MacOSX${MACOS_SDK_VER}.sdk.tar.xz"
curl -OLsS https://github.com/phracker/MacOSX-SDKs/releases/download/${MACOS_SDK_VER}/${MACOS_SDK_TARBALL}
cd ..
# build OSXCross toolchain
export TARGET_DIR="/opt/osxcross"
UNATTENDED=1 ./build.sh
# "tools/osxcross_conf.sh" ignores TARGET_DIR and uses "target" instead, so do a symlink
ln -s ${TARGET_DIR} target
./build_llvm_dsymutil.sh
# ridiculous amount of uncompressed man pages
rm -rf ${TARGET_DIR}/SDK/MacOSX${MACOS_SDK_VER}.sdk/usr/share
# cleanup
cd ..
rm -rf osxcross

View File

@ -90,6 +90,66 @@ elif [[ "${PLATFORM}" == "Linux_arm64v8" ]]; then
NIMFLAGS="-d:disableMarchNative -d:chronicles_sinks=textlines -d:chronicles_colors=none --cpu:arm64 --gcc.exe=${CC} --gcc.linkerexe=${CC}" \
PARTIAL_STATIC_LINKING=1 \
${BINARIES}
elif [[ "${PLATFORM}" == "macOS_amd64" ]]; then
export PATH="/opt/osxcross/bin:${PATH}"
export OSXCROSS_MP_INC=1 # sets up include and library paths
export ZERO_AR_DATE=1 # avoid timestamps in binaries
DARWIN_VER="20.4"
CC="o64-clang"
make \
-j$(nproc) \
USE_LIBBACKTRACE=0 \
QUICK_AND_DIRTY_COMPILER=1 \
deps-common build/generate_makefile
make \
-j$(nproc) \
CC="${CC}" \
LIBTOOL="x86_64-apple-darwin${DARWIN_VER}-libtool" \
OS="darwin" \
NIMFLAGS="-d:disableMarchNative -d:chronicles_sinks=textlines -d:chronicles_colors=none --os:macosx --clang.exe=${CC}" \
nat-libs
make \
-j$(nproc) \
LOG_LEVEL="TRACE" \
CC="${CC}" \
AR="x86_64-apple-darwin${DARWIN_VER}-ar" \
RANLIB="x86_64-apple-darwin${DARWIN_VER}-ranlib" \
CMAKE="x86_64-apple-darwin${DARWIN_VER}-cmake" \
DSYMUTIL="x86_64-apple-darwin${DARWIN_VER}-dsymutil" \
FORCE_DSYMUTIL=1 \
USE_VENDORED_LIBUNWIND=1 \
NIMFLAGS="-d:disableMarchNative -d:chronicles_sinks=textlines -d:chronicles_colors=none --os:macosx --clang.exe=${CC} --clang.linkerexe=${CC}" \
${BINARIES}
elif [[ "${PLATFORM}" == "macOS_arm64" ]]; then
export PATH="/opt/osxcross/bin:${PATH}"
export OSXCROSS_MP_INC=1 # sets up include and library paths
export ZERO_AR_DATE=1 # avoid timestamps in binaries
DARWIN_VER="20.4"
CC="oa64-clang"
make \
-j$(nproc) \
USE_LIBBACKTRACE=0 \
QUICK_AND_DIRTY_COMPILER=1 \
deps-common build/generate_makefile
make \
-j$(nproc) \
CC="${CC}" \
LIBTOOL="arm64-apple-darwin${DARWIN_VER}-libtool" \
OS="darwin" \
NIMFLAGS="-d:disableMarchNative -d:chronicles_sinks=textlines -d:chronicles_colors=none --os:macosx --cpu:arm64 --clang.exe=${CC}" \
nat-libs
make \
-j$(nproc) \
LOG_LEVEL="TRACE" \
CC="${CC}" \
AR="arm64-apple-darwin${DARWIN_VER}-ar" \
RANLIB="arm64-apple-darwin${DARWIN_VER}-ranlib" \
CMAKE="arm64-apple-darwin${DARWIN_VER}-cmake" \
DSYMUTIL="arm64-apple-darwin${DARWIN_VER}-dsymutil" \
FORCE_DSYMUTIL=1 \
USE_VENDORED_LIBUNWIND=1 \
NIMFLAGS="-d:disableMarchNative -d:chronicles_sinks=textlines -d:chronicles_colors=none --os:macosx --cpu:arm64 --clang.exe=${CC} --clang.linkerexe=${CC}" \
${BINARIES}
else
make \
-j$(nproc) \
@ -119,6 +179,10 @@ mkdir "${DIST_PATH}/build"
# copy and checksum binaries, copy scripts and docs
for BINARY in ${BINARIES}; do
cp -a "./build/${BINARY}" "${DIST_PATH}/build/"
if [[ "${PLATFORM}" =~ macOS ]]; then
# debug info
cp -a "./build/${BINARY}.dSYM" "${DIST_PATH}/build/"
fi
cd "${DIST_PATH}/build"
sha512sum "${BINARY}" > "${BINARY}.sha512sum"
if [[ "${PLATFORM}" == "Windows_amd64" ]]; then
@ -137,6 +201,10 @@ elif [[ "${PLATFORM}" == "Linux_arm64v8" ]]; then
elif [[ "${PLATFORM}" == "Windows_amd64" ]]; then
sed -i -e 's/^make dist$/make dist-win64/' "${DIST_PATH}/README.md"
cp -a docker/dist/README-Windows.md "${DIST_PATH}/"
elif [[ "${PLATFORM}" == "macOS_amd64" ]]; then
sed -i -e 's/^make dist$/make dist-macos/' "${DIST_PATH}/README.md"
elif [[ "${PLATFORM}" == "macOS_arm64" ]]; then
sed -i -e 's/^make dist$/make dist-macos-arm64/' "${DIST_PATH}/README.md"
fi
cp -a scripts/run-beacon-node.sh "${DIST_PATH}/scripts"

View File

@ -1,44 +0,0 @@
# beacon_chain
# Copyright (c) 2018-2020 Status Research & Development GmbH
# Licensed and distributed under either of
# * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT).
# * Apache v2 license (license terms in the root directory or at https://www.apache.org/licenses/LICENSE-2.0).
# at your option. This file may not be copied, modified, or distributed except according to those terms.
# Import/export the validator slashing protection database
import
std/[os, strutils],
confutils,
eth/db/[kvstore, kvstore_sqlite3],
../beacon_chain/validators/slashing_protection,
../beacon_chain/spec/digest
type
SlashProtCmd = enum
dump = "Dump the validator slashing protection DB to json"
restore = "Restore the validator slashing protection DB from json"
SlashProtConf = object
case cmd {.
command,
desc: "Dump database or restore" .}: SlashProtCmd
of dump, restore:
infile {.argument.}: string
outfile {.argument.}: string
proc doDump(conf: SlashProtConf) =
let (dir, file) = splitPath(conf.infile)
# TODO: Make it read-only https://github.com/status-im/nim-eth/issues/312
# TODO: why is sqlite3 always appending .sqlite3 ?
let filetrunc = file.changeFileExt("")
let db = SlashingProtectionDB.loadUnchecked(dir, filetrunc, readOnly = false)
db.exportSlashingInterchange(conf.outfile)
when isMainModule:
let conf = SlashProtConf.load()
case conf.cmd:
of dump: conf.doDump()
of restore: doAssert false, "unimplemented"

View File

@ -26,3 +26,7 @@ build/generate_makefile "nimcache/release/${BINARY}/${PROJECT_NAME}.json" "nimca
[[ "$V" == "0" ]] && exec &>/dev/null
"${MAKE}" -f "nimcache/release/${BINARY}/${BINARY}.makefile" --no-print-directory build
if uname | grep -qi darwin || [[ -n "${FORCE_DSYMUTIL}" ]]; then
[[ -z "${DSYMUTIL}" ]] && DSYMUTIL="dsymutil"
"${DSYMUTIL}" build/${BINARY}
fi

View File

@ -30,7 +30,8 @@ DOCKER_BUILDKIT=1 \
--build-arg GROUP_ID=$(id -g) \
-f Dockerfile.${ARCH} .
docker run --rm --name ${DOCKER_TAG} -v ${REPO_DIR}:/home/user/nimbus-eth2 ${DOCKER_TAG}
# seccomp can have some serious overhead, so we disable it with "--privileged" - https://pythonspeed.com/articles/docker-performance-overhead/
docker run --privileged --rm --name ${DOCKER_TAG} -v ${REPO_DIR}:/home/user/nimbus-eth2 ${DOCKER_TAG}
cd - &>/dev/null

@ -1 +1 @@
Subproject commit f091a70a5bf95ec772c8b4d9978e81b8ae89af0c
Subproject commit 6036a47000e342ba3b31eb74dac995e3cf922b56

@ -1 +1 @@
Subproject commit ce966b1c469dda179b54346feaaf1a62202c984f
Subproject commit 63196b0628fd6742a9467842f771d3b1ed1fb803