Rename `multidim-interop` to `transport-interop` (#308)

This commit is contained in:
Thomas Eizinger 2023-10-05 14:12:39 +11:00 committed by GitHub
parent fbe8eb620f
commit 6ab15df0ec
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
51 changed files with 165 additions and 20 deletions

View File

@ -43,9 +43,9 @@ runs:
shell: bash
# This depends on where this file is within this repository. This walks up
# from here to the multidim-interop folder
# from here to the transport-interop folder
- run: |
WORK_DIR=$(realpath "$GITHUB_ACTION_PATH/../../../multidim-interop")
WORK_DIR=$(realpath "$GITHUB_ACTION_PATH/../../../transport-interop")
echo "WORK_DIR=$WORK_DIR" >> $GITHUB_OUTPUT
shell: bash
id: find-workdir

View File

@ -0,0 +1,145 @@
name: "libp2p transport interop test"
description: "Run the libp2p transport interoperability test suite"
inputs:
test-filter:
description: "Filter which tests to run out of the created matrix"
required: false
default: ""
test-ignore:
description: "Exclude tests from the created matrix that include this string in their name"
required: false
default: ""
extra-versions:
description: "Space-separated paths to JSON files describing additional images"
required: false
default: ""
s3-cache-bucket:
description: "Which S3 bucket to use for container layer caching"
required: false
default: ""
s3-access-key-id:
description: "S3 Access key id for the cache"
required: false
default: ""
s3-secret-access-key:
description: "S3 secret key id for the cache"
required: false
default: ""
aws-region:
description: "Which AWS region to use"
required: false
default: "us-east-1"
worker-count:
description: "How many workers to use for the test"
required: false
default: "2"
runs:
using: "composite"
steps:
- name: Configure AWS credentials for S3 build cache
if: inputs.s3-access-key-id != '' && inputs.s3-secret-access-key != ''
run: |
echo "PUSH_CACHE=true" >> $GITHUB_ENV
shell: bash
# This depends on where this file is within this repository. This walks up
# from here to the transport-interop folder
- run: |
WORK_DIR=$(realpath "$GITHUB_ACTION_PATH/../../../transport-interop")
echo "WORK_DIR=$WORK_DIR" >> $GITHUB_OUTPUT
shell: bash
id: find-workdir
- uses: actions/setup-node@v3
with:
node-version: 18
# Existence of /etc/buildkit/buildkitd.toml indicates that this is a
# self-hosted runner. If so, we need to pass the config to the buildx
# action. The config enables docker.io proxy which is required to
# work around docker hub rate limiting.
- run: |
if test -f /etc/buildkit/buildkitd.toml; then
echo "config=/etc/buildkit/buildkitd.toml" >> $GITHUB_OUTPUT
fi
shell: bash
id: buildkit
- name: Set up Docker Buildx
id: buildx
uses: docker/setup-buildx-action@v2
with:
config: ${{ steps.buildkit.outputs.config }}
- name: Install deps
working-directory: ${{ steps.find-workdir.outputs.WORK_DIR }}
run: npm ci
shell: bash
- name: Load cache and build
working-directory: ${{ steps.find-workdir.outputs.WORK_DIR }}
run: npm run cache -- load
shell: bash
- name: Assert Git tree is clean.
working-directory: ${{ steps.find-workdir.outputs.WORK_DIR }}
shell: bash
run: |
if [[ -n "$(git status --porcelain)" ]]; then
echo "Git tree is dirty. This means that building an impl generated something that should probably be .gitignore'd"
git status
exit 1
fi
- name: Push the image cache
if: env.PUSH_CACHE == 'true'
working-directory: ${{ steps.find-workdir.outputs.WORK_DIR }}
env:
AWS_BUCKET: ${{ inputs.s3-cache-bucket }}
AWS_REGION: ${{ inputs.aws-region }}
AWS_ACCESS_KEY_ID: ${{ inputs.s3-access-key-id }}
AWS_SECRET_ACCESS_KEY: ${{ inputs.s3-secret-access-key }}
run: npm run cache -- push
shell: bash
- name: Run the test
working-directory: ${{ steps.find-workdir.outputs.WORK_DIR }}
env:
WORKER_COUNT: ${{ inputs.worker-count }}
EXTRA_VERSION: ${{ inputs.extra-versions }}
NAME_FILTER: ${{ inputs.test-filter }}
NAME_IGNORE: ${{ inputs.test-ignore }}
run: npm run test -- --extra-version=$EXTRA_VERSION --name-filter=$NAME_FILTER --name-ignore=$NAME_IGNORE
shell: bash
- name: Print the results
working-directory: ${{ steps.find-workdir.outputs.WORK_DIR }}
run: cat results.csv
shell: bash
- name: Render results
working-directory: ${{ steps.find-workdir.outputs.WORK_DIR }}
run: npm run renderResults > ./dashboard.md
shell: bash
- name: Show Dashboard Output
working-directory: ${{ steps.find-workdir.outputs.WORK_DIR }}
run: cat ./dashboard.md >> $GITHUB_STEP_SUMMARY
shell: bash
- name: Exit with Error
working-directory: ${{ steps.find-workdir.outputs.WORK_DIR }}
run: |
if grep -q ":red_circle:" ./dashboard.md; then
exit 1
else
exit 0
fi
shell: bash
- uses: actions/upload-artifact@v3
with:
name: test-plans-output
path: |
${{ steps.find-workdir.outputs.WORK_DIR }}/results.csv
${{ steps.find-workdir.outputs.WORK_DIR }}/dashboard.md

View File

@ -2,21 +2,21 @@ on:
workflow_dispatch:
pull_request:
paths:
- 'multidim-interop/**'
- 'transport-interop/**'
push:
branches:
- "master"
paths:
- 'multidim-interop/**'
- 'transport-interop/**'
name: libp2p multidimensional interop test
name: libp2p transport interop test
jobs:
run-multidim-interop:
run-transport-interop:
runs-on: ['self-hosted', 'linux', 'x64', '4xlarge'] # https://github.com/pl-strflt/tf-aws-gh-runner/blob/main/runners.tf
steps:
- uses: actions/checkout@v3
- uses: ./.github/actions/run-interop-ping-test
- uses: ./.github/actions/run-transport-interop-test
with:
s3-cache-bucket: libp2p-by-tf-aws-bootstrap
s3-access-key-id: ${{ vars.S3_AWS_ACCESS_KEY_ID }}
@ -27,7 +27,7 @@ jobs:
steps:
- uses: actions/checkout@v3
# Purposely not using secrets to replicate how forks will behave.
- uses: ./.github/actions/run-interop-ping-test
- uses: ./.github/actions/run-transport-interop-test
with:
# It's okay to not run the tests, we only care to check if the tests build without cache.
test-filter: '"no test matches this, skip all"'

View File

@ -3,7 +3,7 @@ name: Update Badge
on:
workflow_run:
workflows:
- libp2p multidimensional interop test
- libp2p transport interop test
types:
- completed
branches:

View File

@ -1,17 +1,17 @@
# Interoperability/end to end test-plans & performance benchmarking for libp2p
[![Interop Dashboard](https://github.com/libp2p/test-plans/workflows/libp2p%20multidimensional%20interop%20test/badge.svg?branch=master)](https://github.com/libp2p/test-plans/actions/runs/6190795509/attempts/1#summary-16807793452)
[![Interop Dashboard](https://github.com/libp2p/test-plans/workflows/libp2p%transport%20interop%20test/badge.svg?branch=master)](https://github.com/libp2p/test-plans/actions/runs/6190795509/attempts/1#summary-16807793452)
[![Made by Protocol Labs](https://img.shields.io/badge/made%20by-Protocol%20Labs-blue.svg?style=flat-square)](http://protocol.ai)
This repository contains:
* interoperability and end to end tests for libp2p modules across different implementations and versions
* interoperability tests for libp2p's transport layers modules across different implementations and versions
* components to run performance benchmarks for different libp2p implementations
## Multidimensional Interop
## Transport Interop
### Specs
Please see our first specification for interoperability tests between transports, multiplexers, and secure channels here: [Interoperability Test Specs](./multidim-interop/README.md)
Please see our first specification for interoperability tests between transports, multiplexers, and secure channels here: [Transport Interoperability Test Specs](transport-interop/README.md)
More specs to come soon!

View File

@ -1,7 +1,7 @@
# Interoperability test
# Transport Interoperability tests
This tests that different libp2p implementations can communicate with each other
on each of their supported capabilities.
on each of their supported (transport) capabilities.
Each version of libp2p is defined in `versions.ts`. There the version defines
its capabilities along with the id of its container image.

View File

@ -7,7 +7,7 @@ import * as path from 'path';
import * as child_process from 'child_process';
import ignore, { Ignore } from 'ignore'
const multidimInteropDir = path.join(scriptDir, '..')
const root = path.join(scriptDir, '..')
const arch = child_process.execSync('docker info -f "{{.Architecture}}"').toString().trim();
enum Mode {
@ -28,13 +28,13 @@ switch (modeStr) {
}
(async () => {
for (const implFamily of fs.readdirSync(path.join(multidimInteropDir, 'impl'))) {
for (const implFamily of fs.readdirSync(path.join(root, 'impl'))) {
const ig = ignore()
addGitignoreIfPresent(ig, path.join(multidimInteropDir, ".gitignore"))
addGitignoreIfPresent(ig, path.join(multidimInteropDir, "..", ".gitignore"))
addGitignoreIfPresent(ig, path.join(root, ".gitignore"))
addGitignoreIfPresent(ig, path.join(root, "..", ".gitignore"))
const implFamilyDir = path.join(multidimInteropDir, 'impl', implFamily)
const implFamilyDir = path.join(root, 'impl', implFamily)
addGitignoreIfPresent(ig, path.join(implFamilyDir, ".gitignore"))
for (const impl of fs.readdirSync(implFamilyDir)) {