mirror of
https://github.com/status-im/libp2p-test-plans.git
synced 2025-02-20 02:28:11 +00:00
Merge remote-tracking branch 'upstream/master'
* upstream/master: chore: update the link to the interop dashboard [skip ci] chore: parameterise s3 build cache setup (#465) chore: Create funding.json
This commit is contained in:
commit
e7d9b15158
@ -86,6 +86,11 @@ runs:
|
|||||||
|
|
||||||
- name: Load cache and build
|
- name: Load cache and build
|
||||||
working-directory: ${{ steps.find-workdir.outputs.WORK_DIR }}
|
working-directory: ${{ steps.find-workdir.outputs.WORK_DIR }}
|
||||||
|
env:
|
||||||
|
AWS_BUCKET: ${{ inputs.s3-cache-bucket }}
|
||||||
|
AWS_REGION: ${{ inputs.aws-region }}
|
||||||
|
AWS_ACCESS_KEY_ID: ${{ inputs.s3-access-key-id }}
|
||||||
|
AWS_SECRET_ACCESS_KEY: ${{ inputs.s3-secret-access-key }}
|
||||||
run: npm run cache -- load
|
run: npm run cache -- load
|
||||||
shell: bash
|
shell: bash
|
||||||
|
|
||||||
|
@ -78,6 +78,11 @@ runs:
|
|||||||
|
|
||||||
- name: Load cache and build
|
- name: Load cache and build
|
||||||
working-directory: ${{ steps.find-workdir.outputs.WORK_DIR }}
|
working-directory: ${{ steps.find-workdir.outputs.WORK_DIR }}
|
||||||
|
env:
|
||||||
|
AWS_BUCKET: ${{ inputs.s3-cache-bucket }}
|
||||||
|
AWS_REGION: ${{ inputs.aws-region }}
|
||||||
|
AWS_ACCESS_KEY_ID: ${{ inputs.s3-access-key-id }}
|
||||||
|
AWS_SECRET_ACCESS_KEY: ${{ inputs.s3-secret-access-key }}
|
||||||
run: npm run cache -- load
|
run: npm run cache -- load
|
||||||
shell: bash
|
shell: bash
|
||||||
|
|
||||||
|
@ -78,6 +78,11 @@ runs:
|
|||||||
|
|
||||||
- name: Load cache and build
|
- name: Load cache and build
|
||||||
working-directory: ${{ steps.find-workdir.outputs.WORK_DIR }}
|
working-directory: ${{ steps.find-workdir.outputs.WORK_DIR }}
|
||||||
|
env:
|
||||||
|
AWS_BUCKET: ${{ inputs.s3-cache-bucket }}
|
||||||
|
AWS_REGION: ${{ inputs.aws-region }}
|
||||||
|
AWS_ACCESS_KEY_ID: ${{ inputs.s3-access-key-id }}
|
||||||
|
AWS_SECRET_ACCESS_KEY: ${{ inputs.s3-secret-access-key }}
|
||||||
run: npm run cache -- load
|
run: npm run cache -- load
|
||||||
shell: bash
|
shell: bash
|
||||||
|
|
||||||
|
6
.github/workflows/hole-punch-interop.yml
vendored
6
.github/workflows/hole-punch-interop.yml
vendored
@ -28,7 +28,7 @@ jobs:
|
|||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
- uses: ./.github/actions/run-interop-hole-punch-test
|
- uses: ./.github/actions/run-interop-hole-punch-test
|
||||||
with:
|
with:
|
||||||
s3-cache-bucket: libp2p-by-tf-aws-bootstrap
|
s3-cache-bucket: ${{ vars.S3_LIBP2P_BUILD_CACHE_BUCKET_NAME }}
|
||||||
s3-access-key-id: ${{ vars.S3_AWS_ACCESS_KEY_ID }}
|
s3-access-key-id: ${{ vars.S3_LIBP2P_BUILD_CACHE_AWS_ACCESS_KEY_ID }}
|
||||||
s3-secret-access-key: ${{ secrets.S3_AWS_SECRET_ACCESS_KEY }}
|
s3-secret-access-key: ${{ secrets.S3_LIBP2P_BUILD_CACHE_AWS_SECRET_ACCESS_KEY }}
|
||||||
worker-count: 16
|
worker-count: 16
|
||||||
|
8
.github/workflows/transport-interop.yml
vendored
8
.github/workflows/transport-interop.yml
vendored
@ -18,12 +18,12 @@ jobs:
|
|||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
- uses: ./.github/actions/run-transport-interop-test
|
- uses: ./.github/actions/run-transport-interop-test
|
||||||
with:
|
with:
|
||||||
s3-cache-bucket: libp2p-by-tf-aws-bootstrap
|
s3-cache-bucket: ${{ vars.S3_LIBP2P_BUILD_CACHE_BUCKET_NAME }}
|
||||||
s3-access-key-id: ${{ vars.S3_AWS_ACCESS_KEY_ID }}
|
s3-access-key-id: ${{ vars.S3_LIBP2P_BUILD_CACHE_AWS_ACCESS_KEY_ID }}
|
||||||
s3-secret-access-key: ${{ secrets.S3_AWS_SECRET_ACCESS_KEY }}
|
s3-secret-access-key: ${{ secrets.S3_LIBP2P_BUILD_CACHE_AWS_SECRET_ACCESS_KEY }}
|
||||||
worker-count: 16
|
worker-count: 16
|
||||||
build-without-secrets:
|
build-without-secrets:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ['self-hosted', 'linux', 'x64', '4xlarge'] # https://github.com/pl-strflt/tf-aws-gh-runner/blob/main/runners.tf
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
# Purposely not using secrets to replicate how forks will behave.
|
# Purposely not using secrets to replicate how forks will behave.
|
||||||
|
5
funding.json
Normal file
5
funding.json
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
{
|
||||||
|
"opRetro": {
|
||||||
|
"projectId": "0x966804cb492e1a4bde5d781a676a44a23d69aa5dd2562fa7a4f95bb606021c8b"
|
||||||
|
}
|
||||||
|
}
|
@ -1,8 +1,9 @@
|
|||||||
const AWS_BUCKET = process.env.AWS_BUCKET || 'libp2p-by-tf-aws-bootstrap';
|
const AWS_BUCKET = process.env.AWS_BUCKET;
|
||||||
const scriptDir = __dirname;
|
const scriptDir = __dirname;
|
||||||
|
|
||||||
import * as crypto from 'crypto';
|
import * as crypto from 'crypto';
|
||||||
import * as fs from 'fs';
|
import * as fs from 'fs';
|
||||||
|
import * as os from 'os';
|
||||||
import * as path from 'path';
|
import * as path from 'path';
|
||||||
import * as child_process from 'child_process';
|
import * as child_process from 'child_process';
|
||||||
import ignore, { Ignore } from 'ignore'
|
import ignore, { Ignore } from 'ignore'
|
||||||
@ -76,10 +77,14 @@ async function loadCacheOrBuild(dir: string, ig: Ignore) {
|
|||||||
if (mode == Mode.PushCache) {
|
if (mode == Mode.PushCache) {
|
||||||
console.log("Pushing cache")
|
console.log("Pushing cache")
|
||||||
try {
|
try {
|
||||||
const res = await fetch(`https://s3.amazonaws.com/${AWS_BUCKET}/imageCache/${cacheKey}-${arch}.tar.gz`, {method: "HEAD"})
|
if (!AWS_BUCKET) {
|
||||||
if (res.ok) {
|
throw new Error("AWS_BUCKET not set")
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
child_process.execSync(`aws s3 ls s3://${AWS_BUCKET}/imageCache/${cacheKey}-${arch}.tar.gz`)
|
||||||
console.log("Cache already exists")
|
console.log("Cache already exists")
|
||||||
} else {
|
} catch (e) {
|
||||||
|
console.log("Cache doesn't exist", e)
|
||||||
// Read image id from image.json
|
// Read image id from image.json
|
||||||
const imageID = JSON.parse(fs.readFileSync(path.join(dir, 'image.json')).toString()).imageID;
|
const imageID = JSON.parse(fs.readFileSync(path.join(dir, 'image.json')).toString()).imageID;
|
||||||
console.log(`Pushing cache for ${dir}: ${imageID}`)
|
console.log(`Pushing cache for ${dir}: ${imageID}`)
|
||||||
@ -96,18 +101,17 @@ async function loadCacheOrBuild(dir: string, ig: Ignore) {
|
|||||||
console.log("Loading cache")
|
console.log("Loading cache")
|
||||||
let cacheHit = false
|
let cacheHit = false
|
||||||
try {
|
try {
|
||||||
// Check if the cache exists
|
if (!AWS_BUCKET) {
|
||||||
const res = await fetch(`https://s3.amazonaws.com/${AWS_BUCKET}/imageCache/${cacheKey}-${arch}.tar.gz`, {method: "HEAD"})
|
throw new Error("AWS_BUCKET not set")
|
||||||
if (res.ok) {
|
}
|
||||||
const dockerLoadedMsg = child_process.execSync(`curl https://s3.amazonaws.com/${AWS_BUCKET}/imageCache/${cacheKey}-${arch}.tar.gz | docker image load`).toString();
|
const cachePath = fs.mkdtempSync(path.join(os.tmpdir(), 'cache'))
|
||||||
const loadedImageId = dockerLoadedMsg.match(/Loaded image( ID)?: (.*)/)[2];
|
const archivePath = path.join(cachePath, 'archive.tar.gz')
|
||||||
if (loadedImageId) {
|
const dockerLoadedMsg = child_process.execSync(`aws s3 cp s3://${AWS_BUCKET}/imageCache/${cacheKey}-${arch}.tar.gz ${archivePath} && docker image load -i ${archivePath}`).toString();
|
||||||
console.log(`Cache hit for ${loadedImageId}`);
|
const loadedImageId = dockerLoadedMsg.match(/Loaded image( ID)?: (.*)/)[2];
|
||||||
fs.writeFileSync(path.join(dir, 'image.json'), JSON.stringify({imageID: loadedImageId}) + "\n");
|
if (loadedImageId) {
|
||||||
cacheHit = true
|
console.log(`Cache hit for ${loadedImageId}`);
|
||||||
}
|
fs.writeFileSync(path.join(dir, 'image.json'), JSON.stringify({imageID: loadedImageId}) + "\n");
|
||||||
} else {
|
cacheHit = true
|
||||||
console.log("Cache not found")
|
|
||||||
}
|
}
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
console.log("Cache not found:", e)
|
console.log("Cache not found:", e)
|
||||||
|
@ -1,8 +1,9 @@
|
|||||||
const AWS_BUCKET = process.env.AWS_BUCKET || 'libp2p-by-tf-aws-bootstrap';
|
const AWS_BUCKET = process.env.AWS_BUCKET;
|
||||||
const scriptDir = __dirname;
|
const scriptDir = __dirname;
|
||||||
|
|
||||||
import * as crypto from 'crypto';
|
import * as crypto from 'crypto';
|
||||||
import * as fs from 'fs';
|
import * as fs from 'fs';
|
||||||
|
import * as os from 'os';
|
||||||
import * as path from 'path';
|
import * as path from 'path';
|
||||||
import * as child_process from 'child_process';
|
import * as child_process from 'child_process';
|
||||||
import ignore, { Ignore } from 'ignore'
|
import ignore, { Ignore } from 'ignore'
|
||||||
@ -65,10 +66,14 @@ switch (modeStr) {
|
|||||||
if (mode == Mode.PushCache) {
|
if (mode == Mode.PushCache) {
|
||||||
console.log("Pushing cache")
|
console.log("Pushing cache")
|
||||||
try {
|
try {
|
||||||
const res = await fetch(`https://s3.amazonaws.com/${AWS_BUCKET}/imageCache/${cacheKey}-${arch}.tar.gz`, { method: "HEAD" })
|
if (!AWS_BUCKET) {
|
||||||
if (res.ok) {
|
throw new Error("AWS_BUCKET not set")
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
child_process.execSync(`aws s3 ls s3://${AWS_BUCKET}/imageCache/${cacheKey}-${arch}.tar.gz`)
|
||||||
console.log("Cache already exists")
|
console.log("Cache already exists")
|
||||||
} else {
|
} catch (e) {
|
||||||
|
console.log("Cache doesn't exist", e)
|
||||||
// Read image id from image.json
|
// Read image id from image.json
|
||||||
const imageID = JSON.parse(fs.readFileSync(path.join(implFolder, 'image.json')).toString()).imageID;
|
const imageID = JSON.parse(fs.readFileSync(path.join(implFolder, 'image.json')).toString()).imageID;
|
||||||
console.log(`Pushing cache for ${impl}: ${imageID}`)
|
console.log(`Pushing cache for ${impl}: ${imageID}`)
|
||||||
@ -85,18 +90,17 @@ switch (modeStr) {
|
|||||||
console.log("Loading cache")
|
console.log("Loading cache")
|
||||||
let cacheHit = false
|
let cacheHit = false
|
||||||
try {
|
try {
|
||||||
// Check if the cache exists
|
if (!AWS_BUCKET) {
|
||||||
const res = await fetch(`https://s3.amazonaws.com/${AWS_BUCKET}/imageCache/${cacheKey}-${arch}.tar.gz`, { method: "HEAD" })
|
throw new Error("AWS_BUCKET not set")
|
||||||
if (res.ok) {
|
}
|
||||||
const dockerLoadedMsg = child_process.execSync(`curl https://s3.amazonaws.com/${AWS_BUCKET}/imageCache/${cacheKey}-${arch}.tar.gz | docker image load`).toString();
|
const cachePath = fs.mkdtempSync(path.join(os.tmpdir(), 'cache'))
|
||||||
const loadedImageId = dockerLoadedMsg.match(/Loaded image( ID)?: (.*)/)[2];
|
const archivePath = path.join(cachePath, 'archive.tar.gz')
|
||||||
if (loadedImageId) {
|
const dockerLoadedMsg = child_process.execSync(`aws s3 cp s3://${AWS_BUCKET}/imageCache/${cacheKey}-${arch}.tar.gz ${archivePath} && docker image load -i ${archivePath}`).toString();
|
||||||
console.log(`Cache hit for ${loadedImageId}`);
|
const loadedImageId = dockerLoadedMsg.match(/Loaded image( ID)?: (.*)/)[2];
|
||||||
fs.writeFileSync(path.join(implFolder, 'image.json'), JSON.stringify({ imageID: loadedImageId }) + "\n");
|
if (loadedImageId) {
|
||||||
cacheHit = true
|
console.log(`Cache hit for ${loadedImageId}`);
|
||||||
}
|
fs.writeFileSync(path.join(implFolder, 'image.json'), JSON.stringify({ imageID: loadedImageId }) + "\n");
|
||||||
} else {
|
cacheHit = true
|
||||||
console.log("Cache not found")
|
|
||||||
}
|
}
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
console.log("Cache not found:", e)
|
console.log("Cache not found:", e)
|
||||||
|
Loading…
x
Reference in New Issue
Block a user