mirror of
https://github.com/status-im/libp2p-test-plans.git
synced 2025-02-21 11:08:07 +00:00
Check for variables
This commit is contained in:
parent
b69481561c
commit
8cb70b09fd
@ -37,14 +37,22 @@ runs:
|
|||||||
using: "composite"
|
using: "composite"
|
||||||
steps:
|
steps:
|
||||||
|
|
||||||
- name: Restore image from cache
|
- name: Get values
|
||||||
id: deps-cache
|
run: |
|
||||||
uses: actions/cache@v3
|
echo ${{ github.api_url }}
|
||||||
with:
|
echo ${{ github.event_name }}
|
||||||
path: nimbledeps
|
echo ${{ github.repository }}
|
||||||
# Using nim.branch as a simple way to differentiate between nimble using the "pkgs" or "pkgs2" directories.
|
echo ${{ github.repositoryUrl }}
|
||||||
# The change happened on Nimble v0.14.0.
|
echo ${github.token:0:4}$(printf '%*s' $((${#secret}-4)) | tr ' ' '*')
|
||||||
key: nimbledeps-${{ matrix.nim.branch }}-${{ hashFiles('.pinned') }} # hashFiles returns a different value on windows
|
|
||||||
|
# - name: Restore image from cache
|
||||||
|
# id: deps-cache
|
||||||
|
# uses: actions/cache@v3
|
||||||
|
# with:
|
||||||
|
# path: nimbledeps
|
||||||
|
# # Using nim.branch as a simple way to differentiate between nimble using the "pkgs" or "pkgs2" directories.
|
||||||
|
# # The change happened on Nimble v0.14.0.
|
||||||
|
# key: nimbledeps-${{ matrix.nim.branch }}-${{ hashFiles('.pinned') }} # hashFiles returns a different value on windows
|
||||||
|
|
||||||
# -----< CACHED STEPS >-----
|
# -----< CACHED STEPS >-----
|
||||||
|
|
||||||
@ -54,8 +62,8 @@ runs:
|
|||||||
# echo "PUSH_CACHE=true" >> $GITHUB_ENV
|
# echo "PUSH_CACHE=true" >> $GITHUB_ENV
|
||||||
# shell: bash
|
# shell: bash
|
||||||
|
|
||||||
# This depends on where this file is within this repository. This walks up
|
# # This depends on where this file is within this repository. This walks up
|
||||||
# from here to the transport-interop folder
|
# # from here to the transport-interop folder
|
||||||
# - run: |
|
# - run: |
|
||||||
# WORK_DIR=$(realpath "$GITHUB_ACTION_PATH/../../../transport-interop")
|
# WORK_DIR=$(realpath "$GITHUB_ACTION_PATH/../../../transport-interop")
|
||||||
# echo "WORK_DIR=$WORK_DIR" >> $GITHUB_OUTPUT
|
# echo "WORK_DIR=$WORK_DIR" >> $GITHUB_OUTPUT
|
||||||
@ -66,10 +74,10 @@ runs:
|
|||||||
# with:
|
# with:
|
||||||
# node-version: 18
|
# node-version: 18
|
||||||
|
|
||||||
# Existence of /etc/buildkit/buildkitd.toml indicates that this is a
|
# # Existence of /etc/buildkit/buildkitd.toml indicates that this is a
|
||||||
# self-hosted runner. If so, we need to pass the config to the buildx
|
# # self-hosted runner. If so, we need to pass the config to the buildx
|
||||||
# action. The config enables docker.io proxy which is required to
|
# # action. The config enables docker.io proxy which is required to
|
||||||
# work around docker hub rate limiting.
|
# # work around docker hub rate limiting.
|
||||||
# - run: |
|
# - run: |
|
||||||
# if test -f /etc/buildkit/buildkitd.toml; then
|
# if test -f /etc/buildkit/buildkitd.toml; then
|
||||||
# echo "config=/etc/buildkit/buildkitd.toml" >> $GITHUB_OUTPUT
|
# echo "config=/etc/buildkit/buildkitd.toml" >> $GITHUB_OUTPUT
|
||||||
@ -116,44 +124,44 @@ runs:
|
|||||||
|
|
||||||
# -----< CACHED STEPS >-----
|
# -----< CACHED STEPS >-----
|
||||||
|
|
||||||
- name: Run the test
|
# - name: Run the test
|
||||||
working-directory: ${{ steps.find-workdir.outputs.WORK_DIR }}
|
# working-directory: ${{ steps.find-workdir.outputs.WORK_DIR }}
|
||||||
env:
|
# env:
|
||||||
WORKER_COUNT: ${{ inputs.worker-count }}
|
# WORKER_COUNT: ${{ inputs.worker-count }}
|
||||||
EXTRA_VERSION: ${{ inputs.extra-versions }}
|
# EXTRA_VERSION: ${{ inputs.extra-versions }}
|
||||||
NAME_FILTER: ${{ inputs.test-filter }}
|
# NAME_FILTER: ${{ inputs.test-filter }}
|
||||||
NAME_IGNORE: ${{ inputs.test-ignore }}
|
# NAME_IGNORE: ${{ inputs.test-ignore }}
|
||||||
run: npm run test -- --extra-version=$EXTRA_VERSION --name-filter=$NAME_FILTER --name-ignore=$NAME_IGNORE
|
# run: npm run test -- --extra-version=$EXTRA_VERSION --name-filter=$NAME_FILTER --name-ignore=$NAME_IGNORE
|
||||||
shell: bash
|
# shell: bash
|
||||||
|
|
||||||
- name: Print the results
|
# - name: Print the results
|
||||||
working-directory: ${{ steps.find-workdir.outputs.WORK_DIR }}
|
# working-directory: ${{ steps.find-workdir.outputs.WORK_DIR }}
|
||||||
run: cat results.csv
|
# run: cat results.csv
|
||||||
shell: bash
|
# shell: bash
|
||||||
|
|
||||||
- name: Render results
|
# - name: Render results
|
||||||
working-directory: ${{ steps.find-workdir.outputs.WORK_DIR }}
|
# working-directory: ${{ steps.find-workdir.outputs.WORK_DIR }}
|
||||||
run: npm run renderResults > ./dashboard.md
|
# run: npm run renderResults > ./dashboard.md
|
||||||
shell: bash
|
# shell: bash
|
||||||
|
|
||||||
- name: Show Dashboard Output
|
# - name: Show Dashboard Output
|
||||||
working-directory: ${{ steps.find-workdir.outputs.WORK_DIR }}
|
# working-directory: ${{ steps.find-workdir.outputs.WORK_DIR }}
|
||||||
run: cat ./dashboard.md >> $GITHUB_STEP_SUMMARY
|
# run: cat ./dashboard.md >> $GITHUB_STEP_SUMMARY
|
||||||
shell: bash
|
# shell: bash
|
||||||
|
|
||||||
- name: Exit with Error
|
# - name: Exit with Error
|
||||||
working-directory: ${{ steps.find-workdir.outputs.WORK_DIR }}
|
# working-directory: ${{ steps.find-workdir.outputs.WORK_DIR }}
|
||||||
run: |
|
# run: |
|
||||||
if grep -q ":red_circle:" ./dashboard.md; then
|
# if grep -q ":red_circle:" ./dashboard.md; then
|
||||||
exit 1
|
# exit 1
|
||||||
else
|
# else
|
||||||
exit 0
|
# exit 0
|
||||||
fi
|
# fi
|
||||||
shell: bash
|
# shell: bash
|
||||||
|
|
||||||
- uses: actions/upload-artifact@v3
|
# - uses: actions/upload-artifact@v3
|
||||||
with:
|
# with:
|
||||||
name: test-plans-output
|
# name: test-plans-output
|
||||||
path: |
|
# path: |
|
||||||
${{ steps.find-workdir.outputs.WORK_DIR }}/results.csv
|
# ${{ steps.find-workdir.outputs.WORK_DIR }}/results.csv
|
||||||
${{ steps.find-workdir.outputs.WORK_DIR }}/dashboard.md
|
# ${{ steps.find-workdir.outputs.WORK_DIR }}/dashboard.md
|
||||||
|
88
.github/workflows/transport-interop.yml
vendored
88
.github/workflows/transport-interop.yml
vendored
@ -12,16 +12,17 @@ on:
|
|||||||
name: libp2p transport interop test
|
name: libp2p transport interop test
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
# run-transport-interop:
|
run-transport-interop:
|
||||||
# runs-on: ['self-hosted', 'linux', 'x64', '4xlarge'] # https://github.com/pl-strflt/tf-aws-gh-runner/blob/main/runners.tf
|
runs-on: ['self-hosted', 'linux', 'x64', '4xlarge'] # https://github.com/pl-strflt/tf-aws-gh-runner/blob/main/runners.tf
|
||||||
# steps:
|
steps:
|
||||||
# - uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
# - uses: ./.github/actions/run-transport-interop-test
|
- uses: ./.github/actions/run-transport-interop-test
|
||||||
# with:
|
with:
|
||||||
# s3-cache-bucket: libp2p-by-tf-aws-bootstrap
|
s3-cache-bucket: libp2p-by-tf-aws-bootstrap
|
||||||
# s3-access-key-id: ${{ vars.S3_AWS_ACCESS_KEY_ID }}
|
s3-access-key-id: ${{ vars.S3_AWS_ACCESS_KEY_ID }}
|
||||||
# s3-secret-access-key: ${{ secrets.S3_AWS_SECRET_ACCESS_KEY }}
|
s3-secret-access-key: ${{ secrets.S3_AWS_SECRET_ACCESS_KEY }}
|
||||||
# worker-count: 16
|
worker-count: 16
|
||||||
|
|
||||||
# build-without-secrets:
|
# build-without-secrets:
|
||||||
# runs-on: ubuntu-latest
|
# runs-on: ubuntu-latest
|
||||||
# steps:
|
# steps:
|
||||||
@ -32,42 +33,43 @@ jobs:
|
|||||||
# # It's okay to not run the tests, we only care to check if the tests build without cache.
|
# # It's okay to not run the tests, we only care to check if the tests build without cache.
|
||||||
|
|
||||||
# test-filter: '"no test matches this, skip all"'
|
# test-filter: '"no test matches this, skip all"'
|
||||||
build-docker-container-and-cache:
|
|
||||||
runs-on: ubuntu-latest
|
# build-docker-container-and-cache:
|
||||||
steps:
|
# runs-on: ubuntu-latest
|
||||||
- name: Checkout repository
|
# steps:
|
||||||
uses: actions/checkout@v3
|
# - name: Checkout repository
|
||||||
|
# uses: actions/checkout@v3
|
||||||
|
|
||||||
- name: Compute file hash
|
# - name: Compute file hash
|
||||||
run: |
|
# run: |
|
||||||
echo "Computing file hash"
|
# echo "Computing file hash"
|
||||||
# echo "File hash: ${{ hashFiles('**') }}"
|
# # echo "File hash: ${{ hashFiles('**') }}"
|
||||||
ARCH=docker info -f "{{.Architecture}}"
|
# ARCH=docker info -f "{{.Architecture}}"
|
||||||
echo $ARCH
|
# echo $ARCH
|
||||||
ls /var/lib/docker
|
# ls /var/lib/docker
|
||||||
|
|
||||||
# - name: Fetch cache
|
# # - name: Fetch cache
|
||||||
# uses: actions/cache@v4
|
# # uses: actions/cache@v4
|
||||||
# id: fetch-cache
|
# # id: fetch-cache
|
||||||
# with:
|
# # with:
|
||||||
# path: /tmp/cache
|
# # path: /tmp/cache
|
||||||
# key: ${{ runner.os }}-docker-${{ hashFiles('**') }}
|
# # key: ${{ runner.os }}-docker-${{ hashFiles('**') }}
|
||||||
|
|
||||||
# - name: Build docker container
|
# # - name: Build docker container
|
||||||
# if: steps.cache.outputs.cache-hit != 'true'
|
# # if: steps.cache.outputs.cache-hit != 'true'
|
||||||
# id: docker-build
|
# # id: docker-build
|
||||||
# run: |
|
# # run: |
|
||||||
# echo "Building docker container"
|
# # echo "Building docker container"
|
||||||
|
|
||||||
# - name: Dump docker container
|
# # - name: Dump docker container
|
||||||
# if: steps.docker-build.conclusion == 'success'
|
# # if: steps.docker-build.conclusion == 'success'
|
||||||
# run: |
|
# # run: |
|
||||||
# echo "Dumping docker container"
|
# # echo "Dumping docker container"
|
||||||
|
|
||||||
- name: Load docker container
|
# - name: Load docker container
|
||||||
run: |
|
# run: |
|
||||||
echo "Loading docker container"
|
# echo "Loading docker container"
|
||||||
|
|
||||||
- name: Hello world
|
# - name: Hello world
|
||||||
run: |
|
# run: |
|
||||||
echo "Hello, World!"
|
# echo "Hello, World!"
|
||||||
|
Loading…
x
Reference in New Issue
Block a user