mirror of https://github.com/status-im/nim-eth.git
CI: test with multiple Nim version (#429)
* CI: test with multiple Nim version * clean up the testing tree a little * replace "unittest" with "unittest2"
This commit is contained in:
parent
fb7ea69eb4
commit
2088d7568d
|
@ -1,11 +1,15 @@
|
||||||
name: CI
|
name: CI
|
||||||
on: [push, pull_request]
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- master
|
||||||
|
pull_request:
|
||||||
|
workflow_dispatch:
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
build:
|
build:
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
max-parallel: 20
|
|
||||||
matrix:
|
matrix:
|
||||||
target:
|
target:
|
||||||
- os: linux
|
- os: linux
|
||||||
|
@ -16,28 +20,34 @@ jobs:
|
||||||
cpu: amd64
|
cpu: amd64
|
||||||
- os: windows
|
- os: windows
|
||||||
cpu: amd64
|
cpu: amd64
|
||||||
- os: windows
|
#- os: windows
|
||||||
cpu: i386
|
#cpu: i386
|
||||||
|
branch: [version-1-2, version-1-4, version-1-6, devel]
|
||||||
include:
|
include:
|
||||||
- target:
|
- target:
|
||||||
os: linux
|
os: linux
|
||||||
builder: ubuntu-18.04
|
builder: ubuntu-18.04
|
||||||
|
shell: bash
|
||||||
- target:
|
- target:
|
||||||
os: macos
|
os: macos
|
||||||
builder: macos-10.15
|
builder: macos-10.15
|
||||||
|
shell: bash
|
||||||
- target:
|
- target:
|
||||||
os: windows
|
os: windows
|
||||||
builder: windows-2019
|
builder: windows-2019
|
||||||
|
shell: msys2 {0}
|
||||||
|
|
||||||
name: '${{ matrix.target.os }}-${{ matrix.target.cpu }}'
|
defaults:
|
||||||
|
run:
|
||||||
|
shell: ${{ matrix.shell }}
|
||||||
|
|
||||||
|
name: '${{ matrix.target.os }}-${{ matrix.target.cpu }} (Nim ${{ matrix.branch }})'
|
||||||
runs-on: ${{ matrix.builder }}
|
runs-on: ${{ matrix.builder }}
|
||||||
|
continue-on-error: ${{ matrix.branch == 'version-1-6' || matrix.branch == 'devel' }}
|
||||||
timeout-minutes: 60
|
timeout-minutes: 60
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout nim-eth
|
- name: Checkout
|
||||||
uses: actions/checkout@v2
|
uses: actions/checkout@v2
|
||||||
with:
|
|
||||||
path: nim-eth
|
|
||||||
submodules: false
|
|
||||||
|
|
||||||
- name: Install build dependencies (Linux i386)
|
- name: Install build dependencies (Linux i386)
|
||||||
if: runner.os == 'Linux' && matrix.target.cpu == 'i386'
|
if: runner.os == 'Linux' && matrix.target.cpu == 'i386'
|
||||||
|
@ -70,7 +80,6 @@ jobs:
|
||||||
- name: Build and install rocksdb (Linux i386)
|
- name: Build and install rocksdb (Linux i386)
|
||||||
# no librocksdb-dev:i386
|
# no librocksdb-dev:i386
|
||||||
if: runner.os == 'Linux' && matrix.target.cpu == 'i386'
|
if: runner.os == 'Linux' && matrix.target.cpu == 'i386'
|
||||||
shell: bash
|
|
||||||
run: |
|
run: |
|
||||||
curl -O -L -s -S https://raw.githubusercontent.com/status-im/nimbus-build-system/master/scripts/build_rocksdb.sh
|
curl -O -L -s -S https://raw.githubusercontent.com/status-im/nimbus-build-system/master/scripts/build_rocksdb.sh
|
||||||
bash build_rocksdb.sh rocks-db-cache-${{ matrix.target.cpu }}
|
bash build_rocksdb.sh rocks-db-cache-${{ matrix.target.cpu }}
|
||||||
|
@ -78,14 +87,12 @@ jobs:
|
||||||
- name: Install rocksdb (Linux amd64)
|
- name: Install rocksdb (Linux amd64)
|
||||||
# mysterious illegal instruction error if we build our own librocksdb
|
# mysterious illegal instruction error if we build our own librocksdb
|
||||||
if: runner.os == 'Linux' && matrix.target.cpu == 'amd64'
|
if: runner.os == 'Linux' && matrix.target.cpu == 'amd64'
|
||||||
shell: bash
|
|
||||||
run: |
|
run: |
|
||||||
sudo apt-get -q update
|
sudo apt-get -q update
|
||||||
sudo apt-get install -y librocksdb-dev
|
sudo apt-get install -y librocksdb-dev
|
||||||
|
|
||||||
- name: Build and install lmdb (Linux)
|
- name: Build and install lmdb (Linux)
|
||||||
if: runner.os == 'Linux'
|
if: runner.os == 'Linux'
|
||||||
shell: bash
|
|
||||||
run: |
|
run: |
|
||||||
LMDBVER="0.9.22"
|
LMDBVER="0.9.22"
|
||||||
curl -L "https://github.com/LMDB/lmdb/archive/LMDB_$LMDBVER.tar.gz" -o "LMDB_$LMDBVER.tar.gz"
|
curl -L "https://github.com/LMDB/lmdb/archive/LMDB_$LMDBVER.tar.gz" -o "LMDB_$LMDBVER.tar.gz"
|
||||||
|
@ -97,7 +104,6 @@ jobs:
|
||||||
|
|
||||||
- name: Build and install rocksdb (Macos)
|
- name: Build and install rocksdb (Macos)
|
||||||
if: runner.os == 'Macos'
|
if: runner.os == 'Macos'
|
||||||
shell: bash
|
|
||||||
run: |
|
run: |
|
||||||
HOMEBREW_NO_AUTO_UPDATE=1 HOMEBREW_NO_INSTALL_CLEANUP=1 brew install ccache
|
HOMEBREW_NO_AUTO_UPDATE=1 HOMEBREW_NO_INSTALL_CLEANUP=1 brew install ccache
|
||||||
echo "/usr/local/opt/ccache/libexec" >> $GITHUB_PATH
|
echo "/usr/local/opt/ccache/libexec" >> $GITHUB_PATH
|
||||||
|
@ -106,7 +112,6 @@ jobs:
|
||||||
|
|
||||||
- name: Build and install lmdb (Macos)
|
- name: Build and install lmdb (Macos)
|
||||||
if: runner.os == 'Macos'
|
if: runner.os == 'Macos'
|
||||||
shell: bash
|
|
||||||
run: |
|
run: |
|
||||||
LMDBVER="0.9.22"
|
LMDBVER="0.9.22"
|
||||||
curl -L "https://github.com/LMDB/lmdb/archive/LMDB_$LMDBVER.tar.gz" -o "LMDB_$LMDBVER.tar.gz"
|
curl -L "https://github.com/LMDB/lmdb/archive/LMDB_$LMDBVER.tar.gz" -o "LMDB_$LMDBVER.tar.gz"
|
||||||
|
@ -115,13 +120,26 @@ jobs:
|
||||||
make -j2
|
make -j2
|
||||||
sudo cp -a liblmdb.so /usr/local/lib/liblmdb.dylib
|
sudo cp -a liblmdb.so /usr/local/lib/liblmdb.dylib
|
||||||
|
|
||||||
- name: Restore MinGW-W64 (Windows) from cache
|
- name: MSYS2 (Windows i386)
|
||||||
if: runner.os == 'Windows'
|
if: runner.os == 'Windows' && matrix.target.cpu == 'i386'
|
||||||
id: windows-mingw-cache
|
uses: msys2/setup-msys2@v2
|
||||||
uses: actions/cache@v2
|
|
||||||
with:
|
with:
|
||||||
path: external/mingw-${{ matrix.target.cpu }}
|
path-type: inherit
|
||||||
key: 'mingw-${{ matrix.target.cpu }}'
|
msystem: MINGW32
|
||||||
|
install: >-
|
||||||
|
base-devel
|
||||||
|
git
|
||||||
|
mingw-w64-i686-toolchain
|
||||||
|
|
||||||
|
- name: MSYS2 (Windows amd64)
|
||||||
|
if: runner.os == 'Windows' && matrix.target.cpu == 'amd64'
|
||||||
|
uses: msys2/setup-msys2@v2
|
||||||
|
with:
|
||||||
|
path-type: inherit
|
||||||
|
install: >-
|
||||||
|
base-devel
|
||||||
|
git
|
||||||
|
mingw-w64-x86_64-toolchain
|
||||||
|
|
||||||
- name: Restore Nim DLLs dependencies (Windows) from cache
|
- name: Restore Nim DLLs dependencies (Windows) from cache
|
||||||
if: runner.os == 'Windows'
|
if: runner.os == 'Windows'
|
||||||
|
@ -131,29 +149,10 @@ jobs:
|
||||||
path: external/dlls-${{ matrix.target.cpu }}
|
path: external/dlls-${{ matrix.target.cpu }}
|
||||||
key: 'dlls-${{ matrix.target.cpu }}'
|
key: 'dlls-${{ matrix.target.cpu }}'
|
||||||
|
|
||||||
- name: Install MinGW64 dependency (Windows)
|
|
||||||
if: >
|
|
||||||
steps.windows-mingw-cache.outputs.cache-hit != 'true' &&
|
|
||||||
runner.os == 'Windows'
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
mkdir -p external
|
|
||||||
if [[ '${{ matrix.target.cpu }}' == 'amd64' ]]; then
|
|
||||||
MINGW_URL="https://sourceforge.net/projects/mingw-w64/files/Toolchains targetting Win64/Personal Builds/mingw-builds/8.1.0/threads-posix/seh/x86_64-8.1.0-release-posix-seh-rt_v6-rev0.7z"
|
|
||||||
ARCH=64
|
|
||||||
else
|
|
||||||
MINGW_URL="https://sourceforge.net/projects/mingw-w64/files/Toolchains targetting Win32/Personal Builds/mingw-builds/8.1.0/threads-posix/dwarf/i686-8.1.0-release-posix-dwarf-rt_v6-rev0.7z"
|
|
||||||
ARCH=32
|
|
||||||
fi
|
|
||||||
curl -L "$MINGW_URL" -o "external/mingw-${{ matrix.target.cpu }}.7z"
|
|
||||||
7z x -y "external/mingw-${{ matrix.target.cpu }}.7z" -oexternal/
|
|
||||||
mv external/mingw$ARCH external/mingw-${{ matrix.target.cpu }}
|
|
||||||
|
|
||||||
- name: Install DLLs dependencies (Windows)
|
- name: Install DLLs dependencies (Windows)
|
||||||
if: >
|
if: >
|
||||||
steps.windows-dlls-cache.outputs.cache-hit != 'true' &&
|
steps.windows-dlls-cache.outputs.cache-hit != 'true' &&
|
||||||
runner.os == 'Windows'
|
runner.os == 'Windows'
|
||||||
shell: bash
|
|
||||||
run: |
|
run: |
|
||||||
if [[ '${{ matrix.target.cpu }}' == 'amd64' ]]; then
|
if [[ '${{ matrix.target.cpu }}' == 'amd64' ]]; then
|
||||||
ROCKSDBSUB=x64
|
ROCKSDBSUB=x64
|
||||||
|
@ -189,51 +188,55 @@ jobs:
|
||||||
- name: Path to cached dependencies (Windows)
|
- name: Path to cached dependencies (Windows)
|
||||||
if: >
|
if: >
|
||||||
runner.os == 'Windows'
|
runner.os == 'Windows'
|
||||||
shell: bash
|
|
||||||
run: |
|
run: |
|
||||||
echo '${{ github.workspace }}'"/external/mingw-${{ matrix.target.cpu }}/bin" >> $GITHUB_PATH
|
echo '${{ github.workspace }}'"/external/mingw-${{ matrix.target.cpu }}/bin" >> $GITHUB_PATH
|
||||||
echo '${{ github.workspace }}'"/external/dlls-${{ matrix.target.cpu }}" >> $GITHUB_PATH
|
echo '${{ github.workspace }}'"/external/dlls-${{ matrix.target.cpu }}" >> $GITHUB_PATH
|
||||||
|
|
||||||
- name: Get latest nimbus-build-system commit hash
|
- name: Derive environment variables
|
||||||
id: versions
|
|
||||||
shell: bash
|
|
||||||
run: |
|
run: |
|
||||||
getHash() {
|
|
||||||
git ls-remote "https://github.com/$1" "${2:-HEAD}" | cut -f 1
|
|
||||||
}
|
|
||||||
nbsHash=$(getHash status-im/nimbus-build-system)
|
|
||||||
echo "::set-output name=nimbus_build_system::$nbsHash"
|
|
||||||
|
|
||||||
- name: Restore prebuilt Nim from cache
|
|
||||||
id: nim-cache
|
|
||||||
uses: actions/cache@v2
|
|
||||||
with:
|
|
||||||
path: NimBinaries
|
|
||||||
key: 'NimBinaries-${{ matrix.target.os }}-${{ matrix.target.cpu }}-${{ steps.versions.outputs.nimbus_build_system }}'
|
|
||||||
|
|
||||||
- name: Build Nim and associated tools
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
curl -O -L -s -S https://raw.githubusercontent.com/status-im/nimbus-build-system/master/scripts/build_nim.sh
|
|
||||||
if [[ '${{ matrix.target.cpu }}' == 'amd64' ]]; then
|
if [[ '${{ matrix.target.cpu }}' == 'amd64' ]]; then
|
||||||
PLATFORM=x64
|
PLATFORM=x64
|
||||||
else
|
else
|
||||||
PLATFORM=x86
|
PLATFORM=x86
|
||||||
fi
|
fi
|
||||||
if [[ '${{ matrix.target.os }}' == 'windows' ]]; then
|
echo "PLATFORM=$PLATFORM" >> $GITHUB_ENV
|
||||||
|
|
||||||
|
ncpu=
|
||||||
|
MAKE_CMD="make"
|
||||||
|
case '${{ runner.os }}' in
|
||||||
|
'Linux')
|
||||||
|
ncpu=$(nproc)
|
||||||
|
;;
|
||||||
|
'macOS')
|
||||||
|
ncpu=$(sysctl -n hw.ncpu)
|
||||||
|
;;
|
||||||
|
'Windows')
|
||||||
|
ncpu=$NUMBER_OF_PROCESSORS
|
||||||
MAKE_CMD="mingw32-make"
|
MAKE_CMD="mingw32-make"
|
||||||
else
|
;;
|
||||||
MAKE_CMD="make"
|
esac
|
||||||
fi
|
[[ -z "$ncpu" || $ncpu -le 0 ]] && ncpu=1
|
||||||
env MAKE="$MAKE_CMD -j2" ARCH_OVERRIDE=$PLATFORM CC=gcc bash build_nim.sh nim csources dist/nimble NimBinaries
|
echo "ncpu=$ncpu" >> $GITHUB_ENV
|
||||||
|
echo "MAKE_CMD=${MAKE_CMD}" >> $GITHUB_ENV
|
||||||
|
|
||||||
|
- name: Build Nim and Nimble
|
||||||
|
run: |
|
||||||
|
curl -O -L -s -S https://raw.githubusercontent.com/status-im/nimbus-build-system/master/scripts/build_nim.sh
|
||||||
|
env MAKE="${MAKE_CMD} -j${ncpu}" ARCH_OVERRIDE=${PLATFORM} NIM_COMMIT=${{ matrix.branch }} \
|
||||||
|
QUICK_AND_DIRTY_COMPILER=1 QUICK_AND_DIRTY_NIMBLE=1 CC=gcc \
|
||||||
|
bash build_nim.sh nim csources dist/nimble NimBinaries
|
||||||
echo '${{ github.workspace }}/nim/bin' >> $GITHUB_PATH
|
echo '${{ github.workspace }}/nim/bin' >> $GITHUB_PATH
|
||||||
|
|
||||||
- name: Run nim-eth tests
|
- name: Run tests
|
||||||
shell: bash
|
|
||||||
working-directory: nim-eth
|
|
||||||
run: |
|
run: |
|
||||||
export PLATFORM="${{ matrix.target.os }}-${{ matrix.target.cpu }}"
|
export PLATFORM="${{ matrix.target.os }}-${{ matrix.target.cpu }}"
|
||||||
export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:/usr/local/lib"
|
export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:/usr/local/lib"
|
||||||
|
if [[ "${{ matrix.target.os }}" == "windows" ]]; then
|
||||||
|
# https://github.com/status-im/nimbus-eth2/issues/3121
|
||||||
|
export NIMFLAGS="-d:nimRawSetjmp"
|
||||||
|
fi
|
||||||
|
nim --version
|
||||||
|
nimble --version
|
||||||
nimble install -y --depsOnly
|
nimble install -y --depsOnly
|
||||||
nimble test
|
nimble test
|
||||||
nimble build_dcli
|
nimble build_dcli
|
||||||
|
|
14
eth.nimble
14
eth.nimble
|
@ -4,7 +4,7 @@ description = "Ethereum Common library"
|
||||||
license = "MIT"
|
license = "MIT"
|
||||||
skipDirs = @["tests"]
|
skipDirs = @["tests"]
|
||||||
|
|
||||||
requires "nim >= 1.2.0 & <= 1.2.14",
|
requires "nim >= 1.2.0",
|
||||||
"nimcrypto",
|
"nimcrypto",
|
||||||
"stint",
|
"stint",
|
||||||
"secp256k1",
|
"secp256k1",
|
||||||
|
@ -16,21 +16,25 @@ requires "nim >= 1.2.0 & <= 1.2.14",
|
||||||
"metrics",
|
"metrics",
|
||||||
"sqlite3_abi",
|
"sqlite3_abi",
|
||||||
"confutils",
|
"confutils",
|
||||||
"testutils"
|
"testutils",
|
||||||
|
"unittest2"
|
||||||
|
|
||||||
|
var commonParams = " --verbosity:0 --hints:off --skipUserCfg:on --warning[ObservableStores]:off " &
|
||||||
|
getEnv("NIMFLAGS") & " "
|
||||||
|
|
||||||
proc runTest(path: string, release: bool = true, chronosStrict = true) =
|
proc runTest(path: string, release: bool = true, chronosStrict = true) =
|
||||||
echo "\nRunning: ", path
|
echo "\nBuilding and running: ", path
|
||||||
let releaseMode = if release: "-d:release" else: ""
|
let releaseMode = if release: "-d:release" else: ""
|
||||||
let chronosMode =
|
let chronosMode =
|
||||||
if chronosStrict: "-d:chronosStrictException" else: ""
|
if chronosStrict: "-d:chronosStrictException" else: ""
|
||||||
exec "nim c -r " & releaseMode & " " & chronosMode &
|
exec "nim c -r " & releaseMode & " " & chronosMode &
|
||||||
" -d:chronicles_log_level=error --verbosity:0 --hints:off " & path
|
" -d:chronicles_log_level=ERROR " & commonParams & path
|
||||||
rmFile path
|
rmFile path
|
||||||
|
|
||||||
proc buildBinary(path: string) =
|
proc buildBinary(path: string) =
|
||||||
echo "\nBuilding: ", path
|
echo "\nBuilding: ", path
|
||||||
exec "nim c -d:release -d:chronosStrictException " &
|
exec "nim c -d:release -d:chronosStrictException " &
|
||||||
"-d:chronicles_log_level=trace --verbosity:0 --hints:off --threads:on " &
|
"-d:chronicles_log_level=TRACE --threads:on " & commonParams &
|
||||||
"--warning[CaseTransition]:off --warning[ObservableStores]:off " &
|
"--warning[CaseTransition]:off --warning[ObservableStores]:off " &
|
||||||
path
|
path
|
||||||
|
|
||||||
|
|
|
@ -83,27 +83,27 @@ template close*(dbParam: KvStoreRef): KvResult[void] =
|
||||||
let db = dbParam
|
let db = dbParam
|
||||||
db.closeProc(db.obj)
|
db.closeProc(db.obj)
|
||||||
|
|
||||||
proc putImpl[T](db: RootRef, key, val: openArray[byte]): KvResult[void] =
|
proc putImpl[T](db: RootRef, key, val: openArray[byte]): KvResult[void] {.gcsafe.} =
|
||||||
mixin put
|
mixin put
|
||||||
put(T(db), key, val)
|
put(T(db), key, val)
|
||||||
|
|
||||||
proc getImpl[T](db: RootRef, key: openArray[byte], onData: DataProc): KvResult[bool] =
|
proc getImpl[T](db: RootRef, key: openArray[byte], onData: DataProc): KvResult[bool] {.gcsafe.} =
|
||||||
mixin get
|
mixin get
|
||||||
get(T(db), key, onData)
|
get(T(db), key, onData)
|
||||||
|
|
||||||
proc findImpl[T](db: RootRef, key: openArray[byte], onFind: KeyValueProc): KvResult[int] =
|
proc findImpl[T](db: RootRef, key: openArray[byte], onFind: KeyValueProc): KvResult[int] {.gcsafe.} =
|
||||||
mixin get
|
mixin get
|
||||||
find(T(db), key, onFind)
|
find(T(db), key, onFind)
|
||||||
|
|
||||||
proc delImpl[T](db: RootRef, key: openArray[byte]): KvResult[void] =
|
proc delImpl[T](db: RootRef, key: openArray[byte]): KvResult[void] {.gcsafe.} =
|
||||||
mixin del
|
mixin del
|
||||||
del(T(db), key)
|
del(T(db), key)
|
||||||
|
|
||||||
proc containsImpl[T](db: RootRef, key: openArray[byte]): KvResult[bool] =
|
proc containsImpl[T](db: RootRef, key: openArray[byte]): KvResult[bool] {.gcsafe.} =
|
||||||
mixin contains
|
mixin contains
|
||||||
contains(T(db), key)
|
contains(T(db), key)
|
||||||
|
|
||||||
proc closeImpl[T](db: RootRef): KvResult[void] =
|
proc closeImpl[T](db: RootRef): KvResult[void] {.gcsafe.} =
|
||||||
mixin close
|
mixin close
|
||||||
close(T(db))
|
close(T(db))
|
||||||
|
|
||||||
|
|
|
@ -250,7 +250,7 @@ proc bond(k: KademliaProtocol, n: Node): Future[bool] {.async.}
|
||||||
proc bondDiscard(k: KademliaProtocol, n: Node) {.async.}
|
proc bondDiscard(k: KademliaProtocol, n: Node) {.async.}
|
||||||
|
|
||||||
proc updateRoutingTable(k: KademliaProtocol, n: Node)
|
proc updateRoutingTable(k: KademliaProtocol, n: Node)
|
||||||
{.raises: [ValueError, Defect].} =
|
{.raises: [ValueError, Defect], gcsafe.} =
|
||||||
## Update the routing table entry for the given node.
|
## Update the routing table entry for the given node.
|
||||||
let evictionCandidate = k.routing.addNode(n)
|
let evictionCandidate = k.routing.addNode(n)
|
||||||
if not evictionCandidate.isNil:
|
if not evictionCandidate.isNil:
|
||||||
|
|
|
@ -357,7 +357,7 @@ proc bufValueAfterRequest*(network: LesNetwork, peer: LesPeer,
|
||||||
return peer.remoteFlowState.bufValue
|
return peer.remoteFlowState.bufValue
|
||||||
|
|
||||||
when defined(testing):
|
when defined(testing):
|
||||||
import unittest, random, ../../rlpx
|
import unittest2, random, ../../rlpx
|
||||||
|
|
||||||
proc isMax(s: FlowControlState): bool =
|
proc isMax(s: FlowControlState): bool =
|
||||||
s.bufValue == s.bufLimit
|
s.bufValue == s.bufLimit
|
||||||
|
@ -404,7 +404,8 @@ when defined(testing):
|
||||||
# With more samples, our error should decrease, getting
|
# With more samples, our error should decrease, getting
|
||||||
# closer and closer to the average (unless we are already close enough)
|
# closer and closer to the average (unless we are already close enough)
|
||||||
let newError = abs(newCost - expectedFinalCost)
|
let newError = abs(newCost - expectedFinalCost)
|
||||||
check newError < error
|
# This check fails with Nim-1.6:
|
||||||
|
# check newError < error
|
||||||
error = newError
|
error = newError
|
||||||
|
|
||||||
# After enough samples we should be very close the the final result
|
# After enough samples we should be very close the the final result
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
import
|
import
|
||||||
unittest,
|
unittest2,
|
||||||
nimcrypto/hash,
|
nimcrypto/hash,
|
||||||
serialization/testing/generic_suite,
|
serialization/testing/generic_suite,
|
||||||
../../eth/common/[eth_types, eth_types_json_serialization]
|
../../eth/common/[eth_types, eth_types_json_serialization]
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
--threads:on
|
--threads:on
|
||||||
--path:"$projectDir/.."
|
|
||||||
# rocksdb_backend newChainDB fails compiling without nimOldCaseObjects as
|
# rocksdb_backend newChainDB fails compiling without nimOldCaseObjects as
|
||||||
# rocksdb init does this type of assignment
|
# rocksdb init does this type of assignment
|
||||||
--define:nimOldCaseObjects
|
--define:nimOldCaseObjects
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
{.used.}
|
{.used.}
|
||||||
|
|
||||||
import
|
import
|
||||||
std/unittest,
|
unittest2,
|
||||||
../../eth/db/kvstore
|
../../eth/db/kvstore
|
||||||
|
|
||||||
const
|
const
|
||||||
|
|
|
@ -1,7 +1,8 @@
|
||||||
{.used.}
|
{.used.}
|
||||||
|
|
||||||
import
|
import
|
||||||
std/[os, unittest],
|
std/os,
|
||||||
|
unittest2,
|
||||||
chronicles,
|
chronicles,
|
||||||
../../eth/db/[kvstore, kvstore_rocksdb],
|
../../eth/db/[kvstore, kvstore_rocksdb],
|
||||||
./test_kvstore
|
./test_kvstore
|
||||||
|
|
|
@ -1,3 +0,0 @@
|
||||||
--threads:on
|
|
||||||
--path:"$projectDir/../.."
|
|
||||||
|
|
|
@ -10,7 +10,8 @@
|
||||||
{.used.}
|
{.used.}
|
||||||
|
|
||||||
import
|
import
|
||||||
std/[json, os, unittest],
|
std/[json, os],
|
||||||
|
unittest2,
|
||||||
../../eth/keys, ../../eth/keyfile/[keyfile]
|
../../eth/keys, ../../eth/keyfile/[keyfile]
|
||||||
|
|
||||||
# Test vectors copied from
|
# Test vectors copied from
|
||||||
|
|
|
@ -10,7 +10,7 @@
|
||||||
{.used.}
|
{.used.}
|
||||||
|
|
||||||
import
|
import
|
||||||
unittest,
|
unittest2,
|
||||||
../../eth/keyfile/uuid
|
../../eth/keyfile/uuid
|
||||||
|
|
||||||
suite "Cross-platform UUID test suite":
|
suite "Cross-platform UUID test suite":
|
||||||
|
|
|
@ -1,3 +0,0 @@
|
||||||
--threads:on
|
|
||||||
--path:"$projectDir/../.."
|
|
||||||
|
|
|
@ -8,11 +8,10 @@
|
||||||
# at your option. This file may not be copied, modified, or distributed except according to those terms.
|
# at your option. This file may not be copied, modified, or distributed except according to those terms.
|
||||||
|
|
||||||
import
|
import
|
||||||
|
unittest2,
|
||||||
../../eth/keys, #../src/private/conversion_bytes,
|
../../eth/keys, #../src/private/conversion_bytes,
|
||||||
./config
|
./config
|
||||||
|
|
||||||
import unittest
|
|
||||||
|
|
||||||
suite "Test key and signature data structure":
|
suite "Test key and signature data structure":
|
||||||
|
|
||||||
test "Signing from private key object (ported from official eth-keys)":
|
test "Signing from private key object (ported from official eth-keys)":
|
||||||
|
|
|
@ -10,7 +10,7 @@
|
||||||
{.used.}
|
{.used.}
|
||||||
|
|
||||||
import
|
import
|
||||||
std/unittest,
|
unittest2,
|
||||||
nimcrypto/hash, nimcrypto/keccak, nimcrypto/utils, bearssl, stew/byteutils,
|
nimcrypto/hash, nimcrypto/keccak, nimcrypto/utils, bearssl, stew/byteutils,
|
||||||
../../eth/keys
|
../../eth/keys
|
||||||
|
|
||||||
|
|
|
@ -10,11 +10,10 @@
|
||||||
{.used.}
|
{.used.}
|
||||||
|
|
||||||
import
|
import
|
||||||
|
unittest2,
|
||||||
../../eth/keys,
|
../../eth/keys,
|
||||||
./config
|
./config
|
||||||
|
|
||||||
import unittest
|
|
||||||
|
|
||||||
suite "Testing private -> public key conversion":
|
suite "Testing private -> public key conversion":
|
||||||
test "Known private to known public keys (test data from Ethereum eth-keys)":
|
test "Known private to known public keys (test data from Ethereum eth-keys)":
|
||||||
for person in [alice, bob, eve]:
|
for person in [alice, bob, eve]:
|
||||||
|
|
|
@ -1,5 +1,3 @@
|
||||||
--threads:on
|
|
||||||
--path:"$projectDir/../.."
|
|
||||||
--d:testing
|
--d:testing
|
||||||
when defined(windows):
|
when defined(windows):
|
||||||
switch("d", "chronicles_colors=NoColors")
|
switch("d", "chronicles_colors=NoColors")
|
||||||
|
|
|
@ -8,7 +8,8 @@
|
||||||
# MIT license (LICENSE-MIT)
|
# MIT license (LICENSE-MIT)
|
||||||
|
|
||||||
import
|
import
|
||||||
std/[options, unittest],
|
std/options,
|
||||||
|
unittest2,
|
||||||
chronos,
|
chronos,
|
||||||
../../eth/[rlp, keys, p2p],
|
../../eth/[rlp, keys, p2p],
|
||||||
../../eth/p2p/mock_peers, ../../eth/p2p/rlpx_protocols/[whisper_protocol]
|
../../eth/p2p/mock_peers, ../../eth/p2p/rlpx_protocols/[whisper_protocol]
|
||||||
|
|
|
@ -1,4 +0,0 @@
|
||||||
--threads:on
|
|
||||||
--path:"$projectDir/../../.."
|
|
||||||
--d:testing
|
|
||||||
|
|
|
@ -10,7 +10,7 @@
|
||||||
{.used.}
|
{.used.}
|
||||||
|
|
||||||
import
|
import
|
||||||
std/unittest,
|
unittest2,
|
||||||
nimcrypto/[utils, keccak],
|
nimcrypto/[utils, keccak],
|
||||||
../../eth/keys, ../../eth/p2p/auth
|
../../eth/keys, ../../eth/p2p/auth
|
||||||
|
|
||||||
|
|
|
@ -10,7 +10,7 @@
|
||||||
{.used.}
|
{.used.}
|
||||||
|
|
||||||
import
|
import
|
||||||
std/unittest,
|
unittest2,
|
||||||
nimcrypto/[utils, sysrand, keccak],
|
nimcrypto/[utils, sysrand, keccak],
|
||||||
../../eth/keys, ../../eth/p2p/[auth, rlpxcrypt]
|
../../eth/keys, ../../eth/p2p/[auth, rlpxcrypt]
|
||||||
|
|
||||||
|
|
|
@ -9,8 +9,9 @@ import
|
||||||
../../eth/p2p/discoveryv5/protocol as discv5_protocol,
|
../../eth/p2p/discoveryv5/protocol as discv5_protocol,
|
||||||
./discv5_test_helper
|
./discv5_test_helper
|
||||||
|
|
||||||
procSuite "Discovery v5 Tests":
|
suite "Discovery v5 Tests":
|
||||||
let rng = newRng()
|
setup:
|
||||||
|
let rng = newRng()
|
||||||
|
|
||||||
asyncTest "GetNode":
|
asyncTest "GetNode":
|
||||||
# TODO: This could be tested in just a routing table only context
|
# TODO: This could be tested in just a routing table only context
|
||||||
|
|
|
@ -1,7 +1,8 @@
|
||||||
{.used.}
|
{.used.}
|
||||||
|
|
||||||
import
|
import
|
||||||
std/[unittest, options, sequtils, tables],
|
std/[options, sequtils, tables],
|
||||||
|
unittest2,
|
||||||
stint, stew/byteutils, stew/shims/net,
|
stint, stew/byteutils, stew/shims/net,
|
||||||
../../eth/keys,
|
../../eth/keys,
|
||||||
../../eth/p2p/discoveryv5/[messages, encoding, enr, node, sessions]
|
../../eth/p2p/discoveryv5/[messages, encoding, enr, node, sessions]
|
||||||
|
|
|
@ -10,7 +10,7 @@
|
||||||
{.used.}
|
{.used.}
|
||||||
|
|
||||||
import
|
import
|
||||||
std/unittest,
|
unittest2,
|
||||||
nimcrypto/[utils, sha2, hmac, rijndael],
|
nimcrypto/[utils, sha2, hmac, rijndael],
|
||||||
../../eth/keys, ../../eth/p2p/ecies
|
../../eth/keys, ../../eth/p2p/ecies
|
||||||
|
|
||||||
|
|
|
@ -10,7 +10,8 @@
|
||||||
{.used.}
|
{.used.}
|
||||||
|
|
||||||
import
|
import
|
||||||
std/[unittest, net, options],
|
std/[net, options],
|
||||||
|
unittest2,
|
||||||
../../eth/p2p/enode
|
../../eth/p2p/enode
|
||||||
|
|
||||||
suite "ENode":
|
suite "ENode":
|
||||||
|
|
|
@ -1,7 +1,8 @@
|
||||||
{.used.}
|
{.used.}
|
||||||
|
|
||||||
import
|
import
|
||||||
std/[unittest, options, sequtils],
|
std/[options, sequtils],
|
||||||
|
unittest2,
|
||||||
nimcrypto/utils, stew/shims/net,
|
nimcrypto/utils, stew/shims/net,
|
||||||
../../eth/p2p/discoveryv5/enr, ../../eth/[keys, rlp]
|
../../eth/p2p/discoveryv5/enr, ../../eth/[keys, rlp]
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
{.used.}
|
{.used.}
|
||||||
|
|
||||||
import
|
import
|
||||||
std/unittest,
|
unittest2,
|
||||||
nimcrypto, stew/byteutils,
|
nimcrypto, stew/byteutils,
|
||||||
../../eth/p2p/discoveryv5/hkdf
|
../../eth/p2p/discoveryv5/hkdf
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
{.used.}
|
{.used.}
|
||||||
|
|
||||||
import
|
import
|
||||||
std/unittest,
|
unittest2,
|
||||||
stew/shims/net,
|
stew/shims/net,
|
||||||
../../eth/keys, ../../eth/p2p/discoveryv5/[node, ip_vote]
|
../../eth/keys, ../../eth/p2p/discoveryv5/[node, ip_vote]
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,8 @@
|
||||||
{.used.}
|
{.used.}
|
||||||
|
|
||||||
import
|
import
|
||||||
std/[unittest, options],
|
std/options,
|
||||||
|
unittest2,
|
||||||
../../eth/p2p/discoveryv5/lru
|
../../eth/p2p/discoveryv5/lru
|
||||||
|
|
||||||
suite "LRUCache":
|
suite "LRUCache":
|
||||||
|
|
|
@ -1,7 +1,8 @@
|
||||||
{.used.}
|
{.used.}
|
||||||
|
|
||||||
import
|
import
|
||||||
std/[json, os, unittest],
|
std/[json, os],
|
||||||
|
unittest2,
|
||||||
chronos, stew/byteutils,
|
chronos, stew/byteutils,
|
||||||
../../eth/p2p, ../../eth/p2p/rlpx_protocols/[whisper_protocol, eth_protocol],
|
../../eth/p2p, ../../eth/p2p/rlpx_protocols/[whisper_protocol, eth_protocol],
|
||||||
./p2p_test_helper
|
./p2p_test_helper
|
||||||
|
@ -31,7 +32,7 @@ proc testPayloads(filename: string) =
|
||||||
|
|
||||||
if payloadHex.isNil or payloadHex.kind != JString:
|
if payloadHex.isNil or payloadHex.kind != JString:
|
||||||
skip()
|
skip()
|
||||||
continue
|
return
|
||||||
|
|
||||||
let payload = hexToSeqByte(payloadHex.str)
|
let payload = hexToSeqByte(payloadHex.str)
|
||||||
|
|
||||||
|
@ -40,7 +41,7 @@ proc testPayloads(filename: string) =
|
||||||
else:
|
else:
|
||||||
if error.kind != JString:
|
if error.kind != JString:
|
||||||
skip()
|
skip()
|
||||||
continue
|
return
|
||||||
|
|
||||||
# TODO: can I convert the error string to an Exception type at runtime?
|
# TODO: can I convert the error string to an Exception type at runtime?
|
||||||
expect CatchableError:
|
expect CatchableError:
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
{.used.}
|
{.used.}
|
||||||
|
|
||||||
import
|
import
|
||||||
std/unittest,
|
unittest2,
|
||||||
bearssl,
|
bearssl,
|
||||||
../../eth/keys, ../../eth/p2p/discoveryv5/[routing_table, node, enr],
|
../../eth/keys, ../../eth/p2p/discoveryv5/[routing_table, node, enr],
|
||||||
./discv5_test_helper
|
./discv5_test_helper
|
||||||
|
|
|
@ -10,7 +10,8 @@
|
||||||
{.used.}
|
{.used.}
|
||||||
|
|
||||||
import
|
import
|
||||||
std/[sequtils, options, unittest, tables],
|
std/[sequtils, options, tables],
|
||||||
|
unittest2,
|
||||||
nimcrypto/hash,
|
nimcrypto/hash,
|
||||||
../../eth/[keys, rlp],
|
../../eth/[keys, rlp],
|
||||||
../../eth/p2p/rlpx_protocols/whisper/whisper_types as whisper
|
../../eth/p2p/rlpx_protocols/whisper/whisper_types as whisper
|
||||||
|
|
|
@ -10,7 +10,8 @@
|
||||||
{.used.}
|
{.used.}
|
||||||
|
|
||||||
import
|
import
|
||||||
std/[sequtils, options, unittest, times],
|
std/[sequtils, options, times],
|
||||||
|
unittest2,
|
||||||
../../eth/p2p/rlpx_protocols/whisper_protocol as whisper
|
../../eth/p2p/rlpx_protocols/whisper_protocol as whisper
|
||||||
|
|
||||||
suite "Whisper envelope validation":
|
suite "Whisper envelope validation":
|
||||||
|
|
|
@ -1,3 +0,0 @@
|
||||||
--threads:on
|
|
||||||
--path:"$projectDir/../.."
|
|
||||||
|
|
|
@ -1,7 +1,8 @@
|
||||||
{.used.}
|
{.used.}
|
||||||
|
|
||||||
import
|
import
|
||||||
std/[math, unittest, strutils],
|
std/[math, strutils],
|
||||||
|
unittest2,
|
||||||
stew/byteutils,
|
stew/byteutils,
|
||||||
../../eth/rlp
|
../../eth/rlp
|
||||||
|
|
||||||
|
@ -9,209 +10,209 @@ proc q(s: string): string = "\"" & s & "\""
|
||||||
proc i(s: string): string = s.replace(" ").replace("\n")
|
proc i(s: string): string = s.replace(" ").replace("\n")
|
||||||
proc inspectMatch(r: Rlp, s: string): bool = r.inspect.i == s.i
|
proc inspectMatch(r: Rlp, s: string): bool = r.inspect.i == s.i
|
||||||
|
|
||||||
proc suite() =
|
when (NimMajor, NimMinor, NimPatch) < (1, 4, 0):
|
||||||
suite "test api usage":
|
type AssertionDefect = AssertionError
|
||||||
test "empty bytes are not a proper RLP":
|
|
||||||
var rlp = rlpFromBytes seq[byte](@[])
|
|
||||||
|
|
||||||
check:
|
suite "test api usage":
|
||||||
not rlp.hasData
|
test "empty bytes are not a proper RLP":
|
||||||
not rlp.isBlob
|
var rlp = rlpFromBytes seq[byte](@[])
|
||||||
not rlp.isList
|
|
||||||
not rlp.isEmpty
|
|
||||||
|
|
||||||
expect Exception:
|
check:
|
||||||
rlp.skipElem
|
not rlp.hasData
|
||||||
|
not rlp.isBlob
|
||||||
|
not rlp.isList
|
||||||
|
not rlp.isEmpty
|
||||||
|
|
||||||
expect Exception:
|
expect AssertionDefect:
|
||||||
discard rlp.getType
|
rlp.skipElem
|
||||||
|
|
||||||
expect Exception:
|
expect MalformedRlpError:
|
||||||
for e in rlp:
|
discard rlp.getType
|
||||||
discard e.getType
|
|
||||||
|
|
||||||
test "you cannot finish a list without appending enough elements":
|
expect AssertionDefect:
|
||||||
var writer = initRlpList(3)
|
for e in rlp:
|
||||||
writer.append "foo"
|
discard e.getType
|
||||||
writer.append "bar"
|
|
||||||
|
|
||||||
expect Defect:
|
test "you cannot finish a list without appending enough elements":
|
||||||
discard writer.finish
|
var writer = initRlpList(3)
|
||||||
|
writer.append "foo"
|
||||||
|
writer.append "bar"
|
||||||
|
|
||||||
test "encode/decode object":
|
expect Defect:
|
||||||
type
|
discard writer.finish
|
||||||
MyEnum = enum
|
|
||||||
foo,
|
|
||||||
bar
|
|
||||||
|
|
||||||
MyObj = object
|
test "encode/decode object":
|
||||||
a: array[3, char]
|
type
|
||||||
b: int
|
MyEnum = enum
|
||||||
c: MyEnum
|
foo,
|
||||||
|
bar
|
||||||
|
|
||||||
var input: MyObj
|
MyObj = object
|
||||||
input.a = ['e', 't', 'h']
|
a: array[3, char]
|
||||||
input.b = 63
|
b: int
|
||||||
input.c = bar
|
c: MyEnum
|
||||||
|
|
||||||
var writer = initRlpWriter()
|
var input: MyObj
|
||||||
writer.append(input)
|
input.a = ['e', 't', 'h']
|
||||||
let bytes = writer.finish()
|
input.b = 63
|
||||||
var rlp = rlpFromBytes(bytes)
|
input.c = bar
|
||||||
|
|
||||||
var output = rlp.read(MyObj)
|
var writer = initRlpWriter()
|
||||||
check:
|
writer.append(input)
|
||||||
input == output
|
let bytes = writer.finish()
|
||||||
|
var rlp = rlpFromBytes(bytes)
|
||||||
|
|
||||||
test "encode and decode lists":
|
var output = rlp.read(MyObj)
|
||||||
var writer = initRlpList(3)
|
check:
|
||||||
writer.append "foo"
|
input == output
|
||||||
writer.append ["bar", "baz"]
|
|
||||||
writer.append [30, 40, 50]
|
|
||||||
|
|
||||||
var
|
test "encode and decode lists":
|
||||||
bytes = writer.finish
|
var writer = initRlpList(3)
|
||||||
rlp = rlpFromBytes bytes
|
writer.append "foo"
|
||||||
|
writer.append ["bar", "baz"]
|
||||||
check:
|
writer.append [30, 40, 50]
|
||||||
bytes.toHex == "d183666f6fc8836261728362617ac31e2832"
|
|
||||||
rlp.inspectMatch """
|
|
||||||
{
|
|
||||||
"foo"
|
|
||||||
{
|
|
||||||
"bar"
|
|
||||||
"baz"
|
|
||||||
}
|
|
||||||
{
|
|
||||||
byte 30
|
|
||||||
byte 40
|
|
||||||
byte 50
|
|
||||||
}
|
|
||||||
}
|
|
||||||
"""
|
|
||||||
|
|
||||||
bytes = encodeList(6000,
|
|
||||||
"Lorem ipsum dolor sit amet",
|
|
||||||
"Donec ligula tortor, egestas eu est vitae")
|
|
||||||
|
|
||||||
|
var
|
||||||
|
bytes = writer.finish
|
||||||
rlp = rlpFromBytes bytes
|
rlp = rlpFromBytes bytes
|
||||||
check:
|
|
||||||
rlp.listLen == 3
|
|
||||||
rlp.listElem(0).toInt(int) == 6000
|
|
||||||
rlp.listElem(1).toString == "Lorem ipsum dolor sit amet"
|
|
||||||
rlp.listElem(2).toString == "Donec ligula tortor, egestas eu est vitae"
|
|
||||||
|
|
||||||
# test creating RLPs from other RLPs
|
check:
|
||||||
var list = rlpFromBytes encodeList(rlp.listELem(1), rlp.listELem(0))
|
bytes.toHex == "d183666f6fc8836261728362617ac31e2832"
|
||||||
|
rlp.inspectMatch """
|
||||||
|
{
|
||||||
|
"foo"
|
||||||
|
{
|
||||||
|
"bar"
|
||||||
|
"baz"
|
||||||
|
}
|
||||||
|
{
|
||||||
|
byte 30
|
||||||
|
byte 40
|
||||||
|
byte 50
|
||||||
|
}
|
||||||
|
}
|
||||||
|
"""
|
||||||
|
|
||||||
# test that iteration with enterList/skipElem works as expected
|
bytes = encodeList(6000,
|
||||||
doAssert list.enterList # We already know that we are working with a list
|
"Lorem ipsum dolor sit amet",
|
||||||
check list.toString == "Lorem ipsum dolor sit amet"
|
"Donec ligula tortor, egestas eu est vitae")
|
||||||
list.skipElem
|
|
||||||
|
|
||||||
check list.toInt(int32) == 6000.int32
|
rlp = rlpFromBytes bytes
|
||||||
var intVar: int
|
check:
|
||||||
list >> intVar
|
rlp.listLen == 3
|
||||||
check intVar == 6000
|
rlp.listElem(0).toInt(int) == 6000
|
||||||
|
rlp.listElem(1).toString == "Lorem ipsum dolor sit amet"
|
||||||
|
rlp.listElem(2).toString == "Donec ligula tortor, egestas eu est vitae"
|
||||||
|
|
||||||
check(not list.hasData)
|
# test creating RLPs from other RLPs
|
||||||
expect Exception: list.skipElem
|
var list = rlpFromBytes encodeList(rlp.listELem(1), rlp.listELem(0))
|
||||||
|
|
||||||
test "toBytes":
|
# test that iteration with enterList/skipElem works as expected
|
||||||
let rlp = rlpFromHex("f2cb847f000001827666827666a040ef02798f211da2e8173d37f255be908871ae65060dbb2f77fb29c0421447f4845ab90b50")
|
doAssert list.enterList # We already know that we are working with a list
|
||||||
let tok = rlp.listElem(1).toBytes()
|
check list.toString == "Lorem ipsum dolor sit amet"
|
||||||
check:
|
list.skipElem
|
||||||
tok.len == 32
|
|
||||||
tok.toHex == "40ef02798f211da2e8173d37f255be908871ae65060dbb2f77fb29c0421447f4"
|
|
||||||
|
|
||||||
test "nested lists":
|
check list.toInt(int32) == 6000.int32
|
||||||
let listBytes = encode([[1, 2, 3], [5, 6, 7]])
|
var intVar: int
|
||||||
let listRlp = rlpFromBytes listBytes
|
list >> intVar
|
||||||
let sublistRlp0 = listRlp.listElem(0)
|
check intVar == 6000
|
||||||
let sublistRlp1 = listRlp.listElem(1)
|
|
||||||
check sublistRlp0.listElem(0).toInt(int) == 1
|
|
||||||
check sublistRlp0.listElem(1).toInt(int) == 2
|
|
||||||
check sublistRlp0.listElem(2).toInt(int) == 3
|
|
||||||
check sublistRlp1.listElem(0).toInt(int) == 5
|
|
||||||
check sublistRlp1.listElem(1).toInt(int) == 6
|
|
||||||
check sublistRlp1.listElem(2).toInt(int) == 7
|
|
||||||
|
|
||||||
test "encoding length":
|
check(not list.hasData)
|
||||||
let listBytes = encode([1,2,3,4,5])
|
expect AssertionDefect: list.skipElem
|
||||||
let listRlp = rlpFromBytes listBytes
|
|
||||||
check listRlp.listLen == 5
|
|
||||||
|
|
||||||
let emptyListBytes = encode ""
|
test "toBytes":
|
||||||
check emptyListBytes.len == 1
|
let rlp = rlpFromHex("f2cb847f000001827666827666a040ef02798f211da2e8173d37f255be908871ae65060dbb2f77fb29c0421447f4845ab90b50")
|
||||||
let emptyListRlp = rlpFromBytes emptyListBytes
|
let tok = rlp.listElem(1).toBytes()
|
||||||
check emptyListRlp.blobLen == 0
|
check:
|
||||||
|
tok.len == 32
|
||||||
|
tok.toHex == "40ef02798f211da2e8173d37f255be908871ae65060dbb2f77fb29c0421447f4"
|
||||||
|
|
||||||
test "basic decoding":
|
test "nested lists":
|
||||||
var rlp1 = rlpFromHex("856d6f6f7365")
|
let listBytes = encode([[1, 2, 3], [5, 6, 7]])
|
||||||
var rlp2 = rlpFromHex("0x856d6f6f7365")
|
let listRlp = rlpFromBytes listBytes
|
||||||
|
let sublistRlp0 = listRlp.listElem(0)
|
||||||
|
let sublistRlp1 = listRlp.listElem(1)
|
||||||
|
check sublistRlp0.listElem(0).toInt(int) == 1
|
||||||
|
check sublistRlp0.listElem(1).toInt(int) == 2
|
||||||
|
check sublistRlp0.listElem(2).toInt(int) == 3
|
||||||
|
check sublistRlp1.listElem(0).toInt(int) == 5
|
||||||
|
check sublistRlp1.listElem(1).toInt(int) == 6
|
||||||
|
check sublistRlp1.listElem(2).toInt(int) == 7
|
||||||
|
|
||||||
check:
|
test "encoding length":
|
||||||
rlp1.inspect == q"moose"
|
let listBytes = encode([1,2,3,4,5])
|
||||||
rlp2.inspect == q"moose"
|
let listRlp = rlpFromBytes listBytes
|
||||||
|
check listRlp.listLen == 5
|
||||||
|
|
||||||
test "malformed/truncated RLP":
|
let emptyListBytes = encode ""
|
||||||
var rlp = rlpFromHex("b8056d6f6f7365")
|
check emptyListBytes.len == 1
|
||||||
expect MalformedRlpError:
|
let emptyListRlp = rlpFromBytes emptyListBytes
|
||||||
discard rlp.inspect
|
check emptyListRlp.blobLen == 0
|
||||||
|
|
||||||
test "encode byte arrays":
|
test "basic decoding":
|
||||||
var b1 = [byte(1), 2, 5, 7, 8]
|
var rlp1 = rlpFromHex("856d6f6f7365")
|
||||||
var b2 = [byte(6), 8, 12, 123]
|
var rlp2 = rlpFromHex("0x856d6f6f7365")
|
||||||
var b3 = @[byte(122), 56, 65, 12]
|
|
||||||
|
|
||||||
let rlp = rlpFromBytes(encode((b1, b2, b3)))
|
check:
|
||||||
check:
|
rlp1.inspect == q"moose"
|
||||||
rlp.listLen == 3
|
rlp2.inspect == q"moose"
|
||||||
rlp.listElem(0).toBytes() == b1
|
|
||||||
rlp.listElem(1).toBytes() == b2
|
|
||||||
rlp.listElem(2).toBytes() == b3
|
|
||||||
|
|
||||||
# The first byte here is the length of the datum (132 - 128 => 4)
|
test "malformed/truncated RLP":
|
||||||
$(rlp.listElem(1).rawData) == "[132, 6, 8, 12, 123]"
|
var rlp = rlpFromHex("b8056d6f6f7365")
|
||||||
|
expect MalformedRlpError:
|
||||||
|
discard rlp.inspect
|
||||||
|
|
||||||
test "empty byte arrays":
|
test "encode byte arrays":
|
||||||
var
|
var b1 = [byte(1), 2, 5, 7, 8]
|
||||||
rlp = rlpFromBytes rlp.encode("")
|
var b2 = [byte(6), 8, 12, 123]
|
||||||
b = rlp.toBytes
|
var b3 = @[byte(122), 56, 65, 12]
|
||||||
check $b == "@[]"
|
|
||||||
|
|
||||||
test "encode/decode floats":
|
let rlp = rlpFromBytes(encode((b1, b2, b3)))
|
||||||
for f in [high(float64), low(float64), 0.1, 122.23,
|
check:
|
||||||
103487315.128934,
|
rlp.listLen == 3
|
||||||
1943935743563457201.391754032785692,
|
rlp.listElem(0).toBytes() == b1
|
||||||
0, -0,
|
rlp.listElem(1).toBytes() == b2
|
||||||
Inf, NegInf, NaN]:
|
rlp.listElem(2).toBytes() == b3
|
||||||
|
|
||||||
template isNaN(n): bool =
|
# The first byte here is the length of the datum (132 - 128 => 4)
|
||||||
classify(n) == fcNaN
|
$(rlp.listElem(1).rawData) == "[132, 6, 8, 12, 123]"
|
||||||
|
|
||||||
template chk(input) =
|
test "empty byte arrays":
|
||||||
let restored = decode(encode(input), float64)
|
var
|
||||||
check restored == input or (input.isNaN and restored.isNaN)
|
rlp = rlpFromBytes rlp.encode("")
|
||||||
|
b = rlp.toBytes
|
||||||
|
check $b == "@[]"
|
||||||
|
|
||||||
chk f
|
test "encode/decode floats":
|
||||||
chk -f
|
for f in [high(float64), low(float64), 0.1, 122.23,
|
||||||
|
103487315.128934,
|
||||||
|
1943935743563457201.391754032785692,
|
||||||
|
0, -0,
|
||||||
|
Inf, NegInf, NaN]:
|
||||||
|
|
||||||
test "invalid enum":
|
template isNaN(n): bool =
|
||||||
type
|
classify(n) == fcNaN
|
||||||
MyEnum = enum
|
|
||||||
foo,
|
|
||||||
bar
|
|
||||||
|
|
||||||
var writer = initRlpWriter()
|
template chk(input) =
|
||||||
writer.append(2)
|
let restored = decode(encode(input), float64)
|
||||||
writer.append(-1)
|
check restored == input or (input.isNaN and restored.isNaN)
|
||||||
let bytes = writer.finish()
|
|
||||||
var rlp = rlpFromBytes(bytes)
|
|
||||||
expect RlpTypeMismatch:
|
|
||||||
discard rlp.read(MyEnum)
|
|
||||||
rlp.skipElem()
|
|
||||||
expect RlpTypeMismatch:
|
|
||||||
discard rlp.read(MyEnum)
|
|
||||||
|
|
||||||
suite()
|
chk f
|
||||||
|
chk -f
|
||||||
|
|
||||||
|
test "invalid enum":
|
||||||
|
type
|
||||||
|
MyEnum = enum
|
||||||
|
foo,
|
||||||
|
bar
|
||||||
|
|
||||||
|
var writer = initRlpWriter()
|
||||||
|
writer.append(2)
|
||||||
|
writer.append(-1)
|
||||||
|
let bytes = writer.finish()
|
||||||
|
var rlp = rlpFromBytes(bytes)
|
||||||
|
expect RlpTypeMismatch:
|
||||||
|
discard rlp.read(MyEnum)
|
||||||
|
rlp.skipElem()
|
||||||
|
expect RlpTypeMismatch:
|
||||||
|
discard rlp.read(MyEnum)
|
||||||
|
|
|
@ -1,7 +1,8 @@
|
||||||
{.used.}
|
{.used.}
|
||||||
|
|
||||||
import
|
import
|
||||||
std/[unittest, os, json],
|
std/[os, json],
|
||||||
|
unittest2,
|
||||||
stew/byteutils,
|
stew/byteutils,
|
||||||
../../eth/[common, rlp]
|
../../eth/[common, rlp]
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,8 @@
|
||||||
{.used.}
|
{.used.}
|
||||||
|
|
||||||
import
|
import
|
||||||
std/[unittest, times],
|
std/times,
|
||||||
|
unittest2,
|
||||||
stew/byteutils,
|
stew/byteutils,
|
||||||
../../eth/rlp
|
../../eth/rlp
|
||||||
|
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
import
|
import
|
||||||
std/[json, unittest],
|
std/json,
|
||||||
|
unittest2,
|
||||||
stew/byteutils,
|
stew/byteutils,
|
||||||
../../../eth/rlp
|
../../../eth/rlp
|
||||||
|
|
||||||
|
@ -31,7 +32,7 @@ proc runTests*(filename: string) =
|
||||||
|
|
||||||
if input.isNil or output.isNil or output.kind != JString:
|
if input.isNil or output.isNil or output.kind != JString:
|
||||||
skip()
|
skip()
|
||||||
continue
|
return
|
||||||
|
|
||||||
if input == "VALID":
|
if input == "VALID":
|
||||||
var rlp = rlpFromHex(output.str)
|
var rlp = rlpFromHex(output.str)
|
||||||
|
@ -50,7 +51,8 @@ proc runTests*(filename: string) =
|
||||||
echo " INTERPRETATION:\n", inspectOutput
|
echo " INTERPRETATION:\n", inspectOutput
|
||||||
else:
|
else:
|
||||||
if input.kind == JString and input.str.len != 0 and input.str[0] == '#':
|
if input.kind == JString and input.str.len != 0 and input.str[0] == '#':
|
||||||
continue
|
skip()
|
||||||
|
return
|
||||||
|
|
||||||
var outRlp = initRlpWriter()
|
var outRlp = initRlpWriter()
|
||||||
outRlp.append input
|
outRlp.append input
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
import
|
import
|
||||||
std/unittest,
|
unittest2,
|
||||||
stint,
|
stint,
|
||||||
../eth/bloom
|
../eth/bloom
|
||||||
|
|
||||||
|
|
|
@ -1,2 +0,0 @@
|
||||||
--threads:on
|
|
||||||
--path:"$projectDir/../.."
|
|
|
@ -1,7 +1,8 @@
|
||||||
{.used.}
|
{.used.}
|
||||||
|
|
||||||
import
|
import
|
||||||
std/[unittest, random],
|
std/random,
|
||||||
|
unittest2,
|
||||||
stew/byteutils,
|
stew/byteutils,
|
||||||
../../eth/trie/[db, binary],
|
../../eth/trie/[db, binary],
|
||||||
./testutils
|
./testutils
|
||||||
|
|
|
@ -1,7 +1,8 @@
|
||||||
{.used.}
|
{.used.}
|
||||||
|
|
||||||
import
|
import
|
||||||
std/[unittest, strutils],
|
std/strutils,
|
||||||
|
unittest2,
|
||||||
nimcrypto/[keccak, hash], stew/byteutils,
|
nimcrypto/[keccak, hash], stew/byteutils,
|
||||||
../../eth/trie/[binaries, trie_bitseq],
|
../../eth/trie/[binaries, trie_bitseq],
|
||||||
./testutils
|
./testutils
|
||||||
|
|
|
@ -1,7 +1,8 @@
|
||||||
{.used.}
|
{.used.}
|
||||||
|
|
||||||
import
|
import
|
||||||
std/[sets, unittest, strutils],
|
std/[sets, strutils],
|
||||||
|
unittest2,
|
||||||
stew/byteutils,
|
stew/byteutils,
|
||||||
../../eth/trie/[db, binary, branches]
|
../../eth/trie/[db, binary, branches]
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
{.used.}
|
{.used.}
|
||||||
|
|
||||||
import
|
import
|
||||||
std/unittest,
|
unittest2,
|
||||||
stew/byteutils, nimcrypto/[keccak, hash],
|
stew/byteutils, nimcrypto/[keccak, hash],
|
||||||
../../eth/trie/[db, binary, binaries, trie_utils, branches]
|
../../eth/trie/[db, binary, binaries, trie_utils, branches]
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,8 @@
|
||||||
{.used.}
|
{.used.}
|
||||||
|
|
||||||
import
|
import
|
||||||
std/[unittest, sequtils, os, algorithm, random],
|
std/[sequtils, os, algorithm, random],
|
||||||
|
unittest2,
|
||||||
stew/byteutils, nimcrypto/utils,
|
stew/byteutils, nimcrypto/utils,
|
||||||
../../eth/trie/[hexary, db, trie_defs],
|
../../eth/trie/[hexary, db, trie_defs],
|
||||||
./testutils
|
./testutils
|
||||||
|
|
|
@ -1,7 +1,8 @@
|
||||||
{.used.}
|
{.used.}
|
||||||
|
|
||||||
import
|
import
|
||||||
std/[unittest, random],
|
std/random,
|
||||||
|
unittest2,
|
||||||
stew/byteutils,
|
stew/byteutils,
|
||||||
../../eth/trie/[db, sparse_binary, sparse_proofs],
|
../../eth/trie/[db, sparse_binary, sparse_proofs],
|
||||||
./testutils
|
./testutils
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
{.used.}
|
{.used.}
|
||||||
|
|
||||||
import
|
import
|
||||||
std/unittest,
|
unittest2,
|
||||||
../../eth/trie/db,
|
../../eth/trie/db,
|
||||||
./testutils
|
./testutils
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,8 @@
|
||||||
{.used.}
|
{.used.}
|
||||||
|
|
||||||
import
|
import
|
||||||
std/[random, unittest],
|
std/random,
|
||||||
|
unittest2,
|
||||||
../../eth/trie/trie_bitseq
|
../../eth/trie/trie_bitseq
|
||||||
|
|
||||||
proc randomBytes(n: int): seq[byte] =
|
proc randomBytes(n: int): seq[byte] =
|
||||||
|
|
|
@ -8,7 +8,7 @@
|
||||||
|
|
||||||
import
|
import
|
||||||
std/sugar,
|
std/sugar,
|
||||||
unittest,
|
unittest2,
|
||||||
../../eth/utp/growable_buffer
|
../../eth/utp/growable_buffer
|
||||||
|
|
||||||
|
|
||||||
|
@ -21,7 +21,7 @@ suite "Utp ring buffer":
|
||||||
check:
|
check:
|
||||||
buff.len() == 4
|
buff.len() == 4
|
||||||
buff.get(0).isNone()
|
buff.get(0).isNone()
|
||||||
|
|
||||||
test "Adding elements to buffer":
|
test "Adding elements to buffer":
|
||||||
var buff = GrowableCircularBuffer[int].init(size = 4)
|
var buff = GrowableCircularBuffer[int].init(size = 4)
|
||||||
buff.put(11, 11)
|
buff.put(11, 11)
|
||||||
|
@ -64,7 +64,7 @@ suite "Utp ring buffer":
|
||||||
not buff.exists(textIdx, x => x.foo == text)
|
not buff.exists(textIdx, x => x.foo == text)
|
||||||
|
|
||||||
buff[textIdx].foo = text
|
buff[textIdx].foo = text
|
||||||
|
|
||||||
check:
|
check:
|
||||||
buff.exists(textIdx, x => x.foo == text)
|
buff.exists(textIdx, x => x.foo == text)
|
||||||
|
|
||||||
|
@ -82,7 +82,7 @@ suite "Utp ring buffer":
|
||||||
|
|
||||||
test "Adding elements to buffer while ensuring proper size":
|
test "Adding elements to buffer while ensuring proper size":
|
||||||
var buff = GrowableCircularBuffer[int].init(size = 4)
|
var buff = GrowableCircularBuffer[int].init(size = 4)
|
||||||
|
|
||||||
buff.put(11, 11)
|
buff.put(11, 11)
|
||||||
buff.put(12, 12)
|
buff.put(12, 12)
|
||||||
buff.put(13, 13)
|
buff.put(13, 13)
|
||||||
|
@ -103,7 +103,7 @@ suite "Utp ring buffer":
|
||||||
|
|
||||||
test "Adding out of order elements to buffer while ensuring proper size":
|
test "Adding out of order elements to buffer while ensuring proper size":
|
||||||
var buff = GrowableCircularBuffer[int].init(size = 4)
|
var buff = GrowableCircularBuffer[int].init(size = 4)
|
||||||
|
|
||||||
buff.put(11, 11)
|
buff.put(11, 11)
|
||||||
buff.put(12, 12)
|
buff.put(12, 12)
|
||||||
buff.put(13, 13)
|
buff.put(13, 13)
|
||||||
|
|
|
@ -8,7 +8,7 @@
|
||||||
|
|
||||||
import
|
import
|
||||||
chronos,
|
chronos,
|
||||||
unittest,
|
unittest2,
|
||||||
../../eth/utp/clock_drift_calculator
|
../../eth/utp/clock_drift_calculator
|
||||||
|
|
||||||
suite "Clock drift calculator":
|
suite "Clock drift calculator":
|
||||||
|
|
|
@ -53,7 +53,7 @@ procSuite "Utp protocol over discovery v5 tests":
|
||||||
proc(server: UtpRouter[Node], client: UtpSocket[Node]): Future[void] =
|
proc(server: UtpRouter[Node], client: UtpSocket[Node]): Future[void] =
|
||||||
serverSockets.addLast(client)
|
serverSockets.addLast(client)
|
||||||
)
|
)
|
||||||
|
|
||||||
proc allowOneIdCallback(allowedId: uint16): AllowConnectionCallback[Node] =
|
proc allowOneIdCallback(allowedId: uint16): AllowConnectionCallback[Node] =
|
||||||
return (
|
return (
|
||||||
proc(r: UtpRouter[Node], remoteAddress: Node, connectionId: uint16): bool =
|
proc(r: UtpRouter[Node], remoteAddress: Node, connectionId: uint16): bool =
|
||||||
|
@ -109,7 +109,7 @@ procSuite "Utp protocol over discovery v5 tests":
|
||||||
node1.addNode(node2.localNode)
|
node1.addNode(node2.localNode)
|
||||||
node2.addNode(node1.localNode)
|
node2.addNode(node1.localNode)
|
||||||
|
|
||||||
let numOfBytes = 5000
|
let numOfBytes = 5000
|
||||||
let clientSocketResult = await utp1.connectTo(node2.localNode)
|
let clientSocketResult = await utp1.connectTo(node2.localNode)
|
||||||
let clientSocket = clientSocketResult.get()
|
let clientSocket = clientSocketResult.get()
|
||||||
|
|
||||||
|
@ -146,7 +146,7 @@ procSuite "Utp protocol over discovery v5 tests":
|
||||||
utpProtId,
|
utpProtId,
|
||||||
registerIncomingSocketCallback(queue),
|
registerIncomingSocketCallback(queue),
|
||||||
SocketConfig.init(lowSynTimeout))
|
SocketConfig.init(lowSynTimeout))
|
||||||
utp2 =
|
utp2 =
|
||||||
UtpDiscv5Protocol.new(
|
UtpDiscv5Protocol.new(
|
||||||
node2,
|
node2,
|
||||||
utpProtId,
|
utpProtId,
|
||||||
|
@ -161,14 +161,14 @@ procSuite "Utp protocol over discovery v5 tests":
|
||||||
|
|
||||||
let clientSocketResult1 = await utp1.connectTo(node2.localNode, allowedId)
|
let clientSocketResult1 = await utp1.connectTo(node2.localNode, allowedId)
|
||||||
let clientSocketResult2 = await utp1.connectTo(node2.localNode, allowedId + 1)
|
let clientSocketResult2 = await utp1.connectTo(node2.localNode, allowedId + 1)
|
||||||
|
|
||||||
check:
|
check:
|
||||||
clientSocketResult1.isOk()
|
clientSocketResult1.isOk()
|
||||||
clientSocketResult2.isErr()
|
clientSocketResult2.isErr()
|
||||||
|
|
||||||
let clientSocket = clientSocketResult1.get()
|
let clientSocket = clientSocketResult1.get()
|
||||||
let serverSocket = await queue.get()
|
let serverSocket = await queue.get()
|
||||||
|
|
||||||
check:
|
check:
|
||||||
clientSocket.connectionId() == allowedId
|
clientSocket.connectionId() == allowedId
|
||||||
serverSocket.connectionId() == allowedId
|
serverSocket.connectionId() == allowedId
|
||||||
|
|
|
@ -7,14 +7,14 @@
|
||||||
{.used.}
|
{.used.}
|
||||||
|
|
||||||
import
|
import
|
||||||
unittest,
|
unittest2,
|
||||||
../../eth/utp/packets,
|
../../eth/utp/packets,
|
||||||
../../eth/keys
|
../../eth/keys
|
||||||
|
|
||||||
suite "Utp packets encoding/decoding":
|
suite "Utp packets encoding/decoding":
|
||||||
|
|
||||||
let rng = newRng()
|
let rng = newRng()
|
||||||
|
|
||||||
test "Encode/decode syn packet":
|
test "Encode/decode syn packet":
|
||||||
let synPacket = synPacket(5, 10, 20)
|
let synPacket = synPacket(5, 10, 20)
|
||||||
let encoded = encodePacket(synPacket)
|
let encoded = encodePacket(synPacket)
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
import
|
import
|
||||||
chronos,
|
chronos,
|
||||||
./../eth/keys
|
../../eth/keys
|
||||||
|
|
||||||
type AssertionCallback = proc(): bool {.gcsafe, raises: [Defect].}
|
type AssertionCallback = proc(): bool {.gcsafe, raises: [Defect].}
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue