handle LFS fixtures for the test suite (#339)
* handle LFS fixtures for the test suite * test_fixture_ssz_static.nim: allow the tests to fail properly - switch to a debug build for the failing tests * try -d:debug
This commit is contained in:
parent
cba25b087c
commit
7fff9a09fc
|
@ -11,6 +11,7 @@ cache:
|
|||
- sqlite-dll-win64-x64-3240000.zip -> .appveyor.yml
|
||||
- nimbus-deps.zip -> .appveyor.yml
|
||||
- NimBinaries
|
||||
- jsonTestsCache
|
||||
|
||||
matrix:
|
||||
# We always want 32 and 64-bit compilation
|
||||
|
@ -60,8 +61,11 @@ install:
|
|||
- curl -O -L -s -S https://raw.githubusercontent.com/status-im/nimbus/devel/build_nim.sh
|
||||
- env MAKE="mingw32-make -j2" ARCH_OVERRIDE=%PLATFORM% bash build_nim.sh Nim csources dist/nimble NimBinaries
|
||||
|
||||
# LFS test fixtures
|
||||
- bash scripts\process_lfs.sh jsonTestsCache
|
||||
|
||||
build_script:
|
||||
- cd C:\projects\%APPVEYOR_PROJECT_SLUG%
|
||||
#- cd C:\projects\%APPVEYOR_PROJECT_SLUG%
|
||||
- bash -c "nimble install -y --depsOnly"
|
||||
|
||||
test_script:
|
||||
|
|
|
@ -5,6 +5,7 @@ cache:
|
|||
directories:
|
||||
- NimBinaries
|
||||
- rocksdbCache
|
||||
- jsonTestsCache
|
||||
|
||||
git:
|
||||
# when multiple CI builds are queued, the tested commit needs to be in the last X commits cloned with "--depth X"
|
||||
|
@ -34,6 +35,9 @@ install:
|
|||
- curl -O -L -s -S https://raw.githubusercontent.com/status-im/nimbus/devel/build_rocksdb.sh
|
||||
- bash build_rocksdb.sh rocksdbCache
|
||||
|
||||
# LFS test fixtures
|
||||
- scripts/process_lfs.sh jsonTestsCache
|
||||
|
||||
script:
|
||||
- nimble install -y --depsOnly
|
||||
- nimble test
|
||||
|
|
3
Makefile
3
Makefile
|
@ -40,7 +40,8 @@ p2pd: | deps
|
|||
# Windows 10 with WSL enabled, but no distro installed, fails if "../../nimble.sh" is executed directly
|
||||
# in a Makefile recipe but works when prefixing it with `bash`. No idea how the PATH is overridden.
|
||||
test: | build deps nat-libs
|
||||
bash ../../nimble.sh test $(NIM_PARAMS)
|
||||
bash scripts/process_lfs.sh $(HANDLE_OUTPUT)
|
||||
bash ../../nimble.sh test $(NIM_PARAMS) && rm -f 0000-*.json
|
||||
|
||||
$(TOOLS): | build deps nat-libs p2pd
|
||||
for D in $(TOOLS_DIRS); do [ -e "$${D}/$@.nim" ] && TOOL_DIR="$${D}" && break; done && \
|
||||
|
|
|
@ -49,7 +49,7 @@ task test, "Run all tests":
|
|||
# Minimal config
|
||||
buildBinary "all_tests", "tests/", "-r -d:release -d:chronicles_log_level=ERROR -d:const_preset=minimal"
|
||||
|
||||
buildBinary "test_fixture_ssz_static", "tests/official/", "-r -d:release -d:chronicles_log_level=DEBUG -d:const_preset=minimal"
|
||||
buildBinary "test_fixture_ssz_static", "tests/official/", "-r -d:debug -d:chronicles_log_level=DEBUG -d:const_preset=minimal"
|
||||
buildBinary "test_fixture_ssz_static", "tests/official/", "-r -d:release -d:chronicles_log_level=DEBUG -d:const_preset=mainnet"
|
||||
|
||||
# State sim; getting into 3rd epoch useful
|
||||
|
|
|
@ -0,0 +1,73 @@
|
|||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
|
||||
ARCHIVE_NAME="json_tests.tar.xz"
|
||||
TMP_CACHE_DIR="tmpcache"
|
||||
SUBREPO_DIR="tests/official/fixtures"
|
||||
LFS_DIR="json_tests"
|
||||
CACHE_DIR="$1" # optional parameter pointing to a CI cache dir. Without it, we just download the LFS files for a local `make test`.
|
||||
|
||||
[[ -d "${SUBREPO_DIR}" ]] || { echo "This script should be run from the \"nim-beacon-chain\" repo top dir."; exit 1; }
|
||||
|
||||
# macOS quirks
|
||||
if uname | grep -qi "darwin"; then
|
||||
ON_MACOS=1
|
||||
STAT_FORMAT="-f %m"
|
||||
else
|
||||
ON_MACOS=0
|
||||
STAT_FORMAT="-c %Y"
|
||||
fi
|
||||
|
||||
# to and from stdout
|
||||
DECOMPRESS_XZ="false"
|
||||
COMPRESS_XZ="false"
|
||||
which 7z &>/dev/null && { DECOMPRESS_XZ="7z e -txz -bd -so"; COMPRESS_XZ="7z a -txz -an -bd -si -so"; }
|
||||
which xz &>/dev/null && { DECOMPRESS_XZ="xz -d -c -T 0"; COMPRESS_XZ="xz -c -T 0"; }
|
||||
|
||||
download_lfs_files() {
|
||||
echo "Downloading LFS files."
|
||||
pushd "${SUBREPO_DIR}"
|
||||
git lfs install # redundant after running it once per repo, but fast enough not to worry about detecting whether it ran before
|
||||
git lfs pull -I "${LFS_DIR}" # we just care about test fixtures converted from YAML to JSON
|
||||
popd
|
||||
}
|
||||
|
||||
UPDATE_CACHE=0
|
||||
if [[ -n "${CACHE_DIR}" ]]; then
|
||||
if [[ -e "${CACHE_DIR}/${ARCHIVE_NAME}" ]]; then
|
||||
# compare the archive's mtime to the date of the last commit
|
||||
if [[ $(stat ${STAT_FORMAT} "${CACHE_DIR}/${ARCHIVE_NAME}") -gt $(cd "${SUBREPO_DIR}"; git log --pretty=format:%cd -n 1 --date=unix "${LFS_DIR}") ]]; then
|
||||
# the cache is valid
|
||||
echo "Copying cached files into ${SUBREPO_DIR}/${LFS_DIR}/"
|
||||
mkdir -p "${TMP_CACHE_DIR}"
|
||||
${DECOMPRESS_XZ} "${CACHE_DIR}/${ARCHIVE_NAME}" | tar -x -C "${TMP_CACHE_DIR}" -f -
|
||||
cp -a "${TMP_CACHE_DIR}/${LFS_DIR}"/* "${SUBREPO_DIR}/${LFS_DIR}/"
|
||||
rm -rf "${TMP_CACHE_DIR}"
|
||||
else
|
||||
# old cache
|
||||
echo "Invalidating cache."
|
||||
UPDATE_CACHE=1
|
||||
fi
|
||||
else
|
||||
# creating the archive for the first time
|
||||
mkdir -p "${CACHE_DIR}"
|
||||
UPDATE_CACHE=1
|
||||
fi
|
||||
if [[ "${UPDATE_CACHE}" == "1" ]]; then
|
||||
if [[ "${ON_MACOS}" == "1" ]]; then
|
||||
brew install git-lfs # this takes almost 5 minutes on Travis, so only run it if needed
|
||||
fi
|
||||
download_lfs_files
|
||||
echo "Updating the cache."
|
||||
pushd "${SUBREPO_DIR}"
|
||||
# the archive will contain ${LFS_DIR} as its top dir
|
||||
git archive --format=tar HEAD "${LFS_DIR}" | ${COMPRESS_XZ} > "${ARCHIVE_NAME}"
|
||||
popd
|
||||
mv "${SUBREPO_DIR}/${ARCHIVE_NAME}" "${CACHE_DIR}/"
|
||||
fi
|
||||
else
|
||||
# no caching
|
||||
download_lfs_files
|
||||
fi
|
||||
|
|
@ -74,16 +74,17 @@ proc readSszValueRef*(input: openarray[byte], T: type): ref T =
|
|||
new result
|
||||
result[] = readSszValue(input, T)
|
||||
|
||||
proc testerImpl[T](path: string, test: SszStaticTest) {.cdecl, gcsafe.} =
|
||||
doAssert test.obj != nil
|
||||
var obj = SpecObject[T](test.obj)
|
||||
proc testerImpl[T](path: string, sszTest: SszStaticTest) {.cdecl, gcsafe.} =
|
||||
doAssert sszTest.obj != nil
|
||||
var obj = SpecObject[T](sszTest.obj)
|
||||
|
||||
test &"test case on line {sszTest.line}":
|
||||
template execTest(testOpName, testOp, expectedRes) =
|
||||
let ourRes = testOp
|
||||
let success = valuesAreEqual(ourRes, expectedRes)
|
||||
if not success and traceOnFailure:
|
||||
{.gcsafe.}:
|
||||
echo "====== ", testOpName, " failed ", path, ":", test.line
|
||||
echo "====== ", testOpName, " failed ", path, ":", sszTest.line
|
||||
echo " our result:"
|
||||
echo " ", ourRes
|
||||
echo " expected result:"
|
||||
|
@ -107,21 +108,21 @@ proc testerImpl[T](path: string, test: SszStaticTest) {.cdecl, gcsafe.} =
|
|||
when false:
|
||||
execTest "serialization",
|
||||
(let ourBytes = SSZ.encode(obj.obj); ourBytes),
|
||||
test.expectedBytes
|
||||
sszTest.expectedBytes
|
||||
|
||||
execTest "root hash check",
|
||||
hashTreeRoot(obj.obj),
|
||||
test.expectedRootHash
|
||||
sszTest.expectedRootHash
|
||||
|
||||
when hasSigningRoot(T):
|
||||
doAssert test.hasSigHash
|
||||
doAssert sszTest.hasSigHash
|
||||
execTest "sig hash check",
|
||||
signingRoot(obj.obj),
|
||||
test.expectedSigHash
|
||||
sszTest.expectedSigHash
|
||||
|
||||
when true:
|
||||
execTest "roundtrip",
|
||||
readSszValueRef(test.expectedBytes, T),
|
||||
readSszValueRef(sszTest.expectedBytes, T),
|
||||
obj.obj
|
||||
|
||||
template addSpecTypeRTTI(T: type) =
|
||||
|
@ -207,7 +208,6 @@ proc executeSuite(path: string) =
|
|||
let sszSuite = path.parseTests SszStaticTest
|
||||
suite &"{path}: {sszSuite.title}":
|
||||
for sszTest in sszSuite.test_cases:
|
||||
test &"test case on line {sszTest.line}":
|
||||
runTest path, sszTest
|
||||
|
||||
if fileExists(minDevTestFile):
|
||||
|
|
Loading…
Reference in New Issue