Add blockscout

This commit is contained in:
Alejandro Cabeza Romero 2025-04-24 13:36:57 +02:00 committed by Giacomo Pasini
parent 4c348f89b8
commit 9ee3c0dd52
No known key found for this signature in database
GPG Key ID: FC08489D2D895D4B
2972 changed files with 462546 additions and 0 deletions

View File

@ -0,0 +1,626 @@
version: 2
jobs:
build:
docker:
# Ensure .tool-versions matches
- image: circleci/elixir:1.10.3-node-browsers
environment:
MIX_ENV: test
# match POSTGRES_PASSWORD for postgres image below
PGPASSWORD: postgres
# match POSTGRES_USER for postgres image below
PGUSER: postgres
working_directory: ~/app
steps:
- run: sudo apt-get update; sudo apt-get -y install autoconf build-essential libgmp3-dev libtool
- checkout
- run:
command: ./bin/install_chrome_headless.sh
no_output_timeout: 2400
- run: mix local.hex --force
- run: mix local.rebar --force
- run:
name: "ELIXIR_VERSION.lock"
command: echo "${ELIXIR_VERSION}" > ELIXIR_VERSION.lock
- run:
name: "OTP_VERSION.lock"
command: echo "${OTP_VERSION}" > OTP_VERSION.lock
- restore_cache:
keys:
- v8-mix-compile-{{ checksum "OTP_VERSION.lock" }}-{{ checksum "ELIXIR_VERSION.lock" }}-{{ checksum "mix.lock" }}
- v8-mix-compile-{{ checksum "OTP_VERSION.lock" }}-{{ checksum "ELIXIR_VERSION.lock" }}-{{ checksum "mix.exs" }}
- v8-mix-compile-{{ checksum "OTP_VERSION.lock" }}-{{ checksum "ELIXIR_VERSION.lock" }}
- run: mix deps.get
- restore_cache:
keys:
- v8-npm-install-{{ .Branch }}-{{ checksum "apps/block_scout_web/assets/package-lock.json" }}
- v8-npm-install-{{ .Branch }}
- v8-npm-install
- run:
command: npm install
working_directory: "apps/explorer"
- save_cache:
key: v3-npm-install-{{ .Branch }}-{{ checksum "apps/explorer/package-lock.json" }}
paths: "apps/explorer/node_modules"
- save_cache:
key: v3-npm-install-{{ .Branch }}
paths: "apps/explorer/node_modules"
- save_cache:
key: v3-npm-install
paths: "apps/explorer/node_modules"
- run:
command: npm install
working_directory: "apps/block_scout_web/assets"
- save_cache:
key: v8-npm-install-{{ .Branch }}-{{ checksum "apps/block_scout_web/assets/package-lock.json" }}
paths: "apps/block_scout_web/assets/node_modules"
- save_cache:
key: v8-npm-install-{{ .Branch }}
paths: "apps/block_scout_web/assets/node_modules"
- save_cache:
key: v8-npm-install
paths: "apps/block_scout_web/assets/node_modules"
- run: mix compile
- run:
command: make
working_directory: "deps/ex_secp256k1"
# `deps` needs to be cached with `_build` because `_build` will symlink into `deps`
- save_cache:
key: v8-mix-compile-{{ checksum "OTP_VERSION.lock" }}-{{ checksum "ELIXIR_VERSION.lock" }}-{{ checksum "mix.lock" }}
paths:
- deps
- _build
- save_cache:
key: v8-mix-compile-{{ checksum "OTP_VERSION.lock" }}-{{ checksum "ELIXIR_VERSION.lock" }}-{{ checksum "mix.exs" }}
paths:
- deps
- _build
- save_cache:
key: v8-mix-compile-{{ checksum "OTP_VERSION.lock" }}-{{ checksum "ELIXIR_VERSION.lock" }}
paths:
- deps
- _build
- run:
name: Build assets
command: node node_modules/webpack/bin/webpack.js --mode development
working_directory: "apps/block_scout_web/assets"
- persist_to_workspace:
root: .
paths:
- .circleci
- .credo.exs
- .dialyzer_ignore.exs
- .formatter.exs
- .git
- .gitignore
- ELIXIR_VERSION.lock
- Gemfile
- Gemfile.lock
- OTP_VERSION.lock
- _build
- apps
- bin
- config
- deps
- doc
- mix.exs
- mix.lock
- appspec.yml
- rel
check_formatted:
docker:
# Ensure .tool-versions matches
- image: circleci/elixir:1.10.3
environment:
MIX_ENV: test
working_directory: ~/app
steps:
- attach_workspace:
at: .
- run: mix format --check-formatted
credo:
docker:
# Ensure .tool-versions matches
- image: circleci/elixir:1.10.3
environment:
MIX_ENV: test
working_directory: ~/app
steps:
- attach_workspace:
at: .
- run: mix local.hex --force
- run: mix credo
deploy_aws:
docker:
# Ensure .tool-versions matches
- image: circleci/python:2.7-stretch
working_directory: ~/app
steps:
- attach_workspace:
at: .
- add_ssh_keys:
fingerprints:
- "c4:fd:a8:f8:48:a8:09:e5:3e:be:30:62:4d:6f:6f:36"
- run:
name: Deploy to AWS
command: bin/deploy
dialyzer:
docker:
# Ensure .tool-versions matches
- image: circleci/elixir:1.10.3
environment:
MIX_ENV: test
working_directory: ~/app
steps:
- attach_workspace:
at: .
- run: mix local.hex --force
- restore_cache:
keys:
- v8-mix-dialyzer-{{ checksum "OTP_VERSION.lock" }}-{{ checksum "ELIXIR_VERSION.lock" }}-{{ checksum "mix.lock" }}
- v8-mix-dialyzer-{{ checksum "OTP_VERSION.lock" }}-{{ checksum "ELIXIR_VERSION.lock" }}-{{ checksum "mix.exs" }}
- v8-mix-dialyzer-{{ checksum "OTP_VERSION.lock" }}-{{ checksum "ELIXIR_VERSION.lock" }}
- run:
name: Unpack PLT cache
command: |
mkdir -p _build/test
cp plts/dialyxir*.plt _build/test/ || true
mkdir -p ~/.mix
cp plts/dialyxir*.plt ~/.mix/ || true
- run: mix dialyzer --plt
- run:
name: Pack PLT cache
command: |
mkdir -p plts
cp _build/test/dialyxir*.plt plts/
cp ~/.mix/dialyxir*.plt plts/
- save_cache:
key: v8-mix-dialyzer-{{ checksum "OTP_VERSION.lock" }}-{{ checksum "ELIXIR_VERSION.lock" }}-{{ checksum "mix.lock" }}
paths:
- plts
- save_cache:
key: v8-mix-dialyzer-{{ checksum "OTP_VERSION.lock" }}-{{ checksum "ELIXIR_VERSION.lock" }}-{{ checksum "mix.exs" }}
paths:
- plts
- save_cache:
key: v8-mix-dialyzer-{{ checksum "OTP_VERSION.lock" }}-{{ checksum "ELIXIR_VERSION.lock" }}
paths:
- plts
- run: mix dialyzer --halt-exit-status
eslint:
docker:
# Ensure .tool-versions matches
- image: circleci/node:12.18.2-browsers-legacy
working_directory: ~/app
steps:
- attach_workspace:
at: .
- run:
name: ESLint
command: ./node_modules/.bin/eslint --format=junit --output-file="test/eslint/junit.xml" js/**
working_directory: apps/block_scout_web/assets
- store_test_results:
path: apps/block_scout_web/assets/test
gettext:
docker:
# Ensure .tool-versions matches
- image: circleci/elixir:1.10.3
environment:
MIX_ENV: test
working_directory: ~/app
steps:
- attach_workspace:
at: .
- run: mix local.hex --force
- run:
name: Check for missed translations
command: |
mix gettext.extract --merge | tee stdout.txt
! grep "Wrote " stdout.txt
working_directory: "apps/block_scout_web"
- store_artifacts:
path: apps/block_scout_web/priv/gettext
jest:
docker:
# Ensure .tool-versions matches
- image: circleci/node:12.18.2-browsers-legacy
working_directory: ~/app
steps:
- attach_workspace:
at: .
- run:
name: Jest
command: ./node_modules/.bin/jest
working_directory: apps/block_scout_web/assets
release:
docker:
# Ensure .tool-versions matches
- image: circleci/elixir:1.10.3
environment:
MIX_ENV: prod
working_directory: ~/app
steps:
- attach_workspace:
at: .
- run: mix local.hex --force
- run: mix local.rebar --force
- run: MIX_ENV=prod mix release
- run:
name: Collecting artifacts
command: |
find -name 'blockscout.tar.gz' -exec sh -c 'mkdir -p ci_artifact && cp "$@" ci_artifact/ci_artifact_blockscout.tar.gz' _ {} +
when: always
- store_artifacts:
name: Uploading CI artifacts
path: ci_artifact/ci_artifact_blockscout.tar.gz
destination: ci_artifact_blockscout.tar.gz
sobelow:
docker:
# Ensure .tool-versions matches
- image: circleci/elixir:1.10.3
environment:
MIX_ENV: test
working_directory: ~/app
steps:
- attach_workspace:
at: .
- run: mix local.hex --force
- run:
name: Scan explorer for vulnerabilities
command: mix sobelow --config
working_directory: "apps/explorer"
- run:
name: Scan block_scout_web for vulnerabilities
command: mix sobelow --config
working_directory: "apps/block_scout_web"
# test_geth_http_websocket:
# docker:
# # Ensure .tool-versions matches
# - image: circleci/elixir:1.10.3-node-browsers
# environment:
# MIX_ENV: test
# # match POSTGRES_PASSWORD for postgres image below
# PGPASSWORD: postgres
# # match POSTGRES_USER for postgres image below
# PGUSER: postgres
# ETHEREUM_JSONRPC_CASE: "EthereumJSONRPC.Case.Geth.HTTPWebSocket"
# ETHEREUM_JSONRPC_WEB_SOCKET_CASE: "EthereumJSONRPC.WebSocket.Case.Geth"
# - image: circleci/postgres:10.10-alpine
# environment:
# # Match apps/explorer/config/test.exs config :explorer, Explorer.Repo, database
# POSTGRES_DB: explorer_test
# # match PGPASSWORD for elixir image above
# POSTGRES_PASSWORD: postgres
# # match PGUSER for elixir image above
# POSTGRES_USER: postgres
# working_directory: ~/app
# steps:
# - attach_workspace:
# at: .
# - run:
# command: ./bin/install_chrome_headless.sh
# no_output_timeout: 2400
# - run: mix local.hex --force
# - run: mix local.rebar --force
# - run:
# name: Wait for DB
# command: dockerize -wait tcp://localhost:5432 -timeout 1m
# - run:
# name: mix test --exclude no_geth
# command: |
# # Don't submit coverage report for forks, but let the build succeed
# if [[ -z "$COVERALLS_REPO_TOKEN" ]]; then
# mix coveralls.html --exclude no_geth --parallel --umbrella
# else
# mix coveralls.circle --exclude no_geth --parallel --umbrella ||
# # if mix failed, then coveralls_merge won't run, so signal done here and return original exit status
# (retval=$? && curl -k https://coveralls.io/webhook?repo_token=$COVERALLS_REPO_TOKEN -d "payload[build_num]=$CIRCLE_WORKFLOW_WORKSPACE_ID&payload[status]=done" && return $retval)
# fi
# - store_artifacts:
# path: cover/excoveralls.html
# - store_test_results:
# path: _build/test/junit
# test_geth_mox:
# docker:
# # Ensure .tool-versions matches
# - image: circleci/elixir:1.10.3-node-browsers
# environment:
# MIX_ENV: test
# # match POSTGRES_PASSWORD for postgres image below
# PGPASSWORD: postgres
# # match POSTGRES_USER for postgres image below
# PGUSER: postgres
# ETHEREUM_JSONRPC_CASE: "EthereumJSONRPC.Case.Geth.Mox"
# ETHEREUM_JSONRPC_WEB_SOCKET_CASE: "EthereumJSONRPC.WebSocket.Case.Mox"
# - image: circleci/postgres:10.10-alpine
# environment:
# # Match apps/explorer/config/test.exs config :explorer, Explorer.Repo, database
# POSTGRES_DB: explorer_test
# # match PGPASSWORD for elixir image above
# POSTGRES_PASSWORD: postgres
# # match PGUSER for elixir image above
# POSTGRES_USER: postgres
# working_directory: ~/app
# steps:
# - attach_workspace:
# at: .
# - run:
# command: ./bin/install_chrome_headless.sh
# no_output_timeout: 2400
# - run: mix local.hex --force
# - run: mix local.rebar --force
# - run:
# name: Wait for DB
# command: dockerize -wait tcp://localhost:5432 -timeout 1m
# - run:
# name: mix test --exclude no_geth
# command: |
# # Don't submit coverage report for forks, but let the build succeed
# if [[ -z "$COVERALLS_REPO_TOKEN" ]]; then
# mix coveralls.html --exclude no_geth --parallel --umbrella
# else
# mix coveralls.circle --exclude no_geth --parallel --umbrella ||
# # if mix failed, then coveralls_merge won't run, so signal done here and return original exit status
# (retval=$? && curl -k https://coveralls.io/webhook?repo_token=$COVERALLS_REPO_TOKEN -d "payload[build_num]=$CIRCLE_WORKFLOW_WORKSPACE_ID&payload[status]=done" && return $retval)
# fi
# - store_artifacts:
# path: cover/excoveralls.html
# - store_test_results:
# path: _build/test/junit
# test_nethermind_http_websocket:
# docker:
# # Ensure .tool-versions matches
# - image: circleci/elixir:1.10.3-node-browsers
# environment:
# MIX_ENV: test
# # match POSTGRES_PASSWORD for postgres image below
# PGPASSWORD: postgres
# # match POSTGRES_USER for postgres image below
# PGUSER: postgres
# ETHEREUM_JSONRPC_CASE: "EthereumJSONRPC.Case.Nethermind.HTTPWebSocket"
# ETHEREUM_JSONRPC_WEB_SOCKET_CASE: "EthereumJSONRPC.WebSocket.Case.Nethermind"
# - image: circleci/postgres:10.10-alpine
# environment:
# # Match apps/explorer/config/test.exs config :explorer, Explorer.Repo, database
# POSTGRES_DB: explorer_test
# # match PGPASSWORD for elixir image above
# POSTGRES_PASSWORD: postgres
# # match PGUSER for elixir image above
# POSTGRES_USER: postgres
# working_directory: ~/app
# steps:
# - attach_workspace:
# at: .
# - run:
# command: ./bin/install_chrome_headless.sh
# no_output_timeout: 2400
# - run: mix local.hex --force
# - run: mix local.rebar --force
# - run:
# name: Wait for DB
# command: dockerize -wait tcp://localhost:5432 -timeout 1m
# - run:
# name: mix test --exclude no_nethermind
# command: |
# # Don't submit coverage report for forks, but let the build succeed
# if [[ -z "$COVERALLS_REPO_TOKEN" ]]; then
# mix coveralls.html --exclude no_nethermind --parallel --umbrella
# else
# mix coveralls.circle --exclude no_nethermind --parallel --umbrella ||
# # if mix failed, then coveralls_merge won't run, so signal done here and return original exit status
# (retval=$? && curl -k https://coveralls.io/webhook?repo_token=$COVERALLS_REPO_TOKEN -d "payload[build_num]=$CIRCLE_WORKFLOW_WORKSPACE_ID&payload[status]=done" && return $retval)
# fi
# - store_artifacts:
# path: cover/excoveralls.html
# - store_test_results:
# path: _build/test/junit
test_nethermind_mox:
docker:
# Ensure .tool-versions matches
- image: circleci/elixir:1.10.3-node-browsers
environment:
MIX_ENV: test
# match POSTGRES_PASSWORD for postgres image below
PGPASSWORD: postgres
# match POSTGRES_USER for postgres image below
PGUSER: postgres
ETHEREUM_JSONRPC_CASE: "EthereumJSONRPC.Case.Nethermind.Mox"
ETHEREUM_JSONRPC_WEB_SOCKET_CASE: "EthereumJSONRPC.WebSocket.Case.Mox"
- image: circleci/postgres:10.10-alpine
environment:
# Match apps/explorer/config/test.exs config :explorer, Explorer.Repo, database
POSTGRES_DB: explorer_test
# match PGPASSWORD for elixir image above
POSTGRES_PASSWORD: postgres
# match PGUSER for elixir image above
POSTGRES_USER: postgres
working_directory: ~/app
steps:
- attach_workspace:
at: .
- run:
command: ./bin/install_chrome_headless.sh
no_output_timeout: 2400
- run: mix local.hex --force
- run: mix local.rebar --force
- run:
name: Wait for DB
command: dockerize -wait tcp://localhost:5432 -timeout 1m
- run:
name: mix test --exclude no_nethermind
command: |
# Don't submit coverage report for forks, but let the build succeed
if [[ -z "$COVERALLS_REPO_TOKEN" ]]; then
mix coveralls.html --exclude no_nethermind --parallel --umbrella
else
mix coveralls.circle --exclude no_nethermind --parallel --umbrella ||
# if mix failed, then coveralls_merge won't run, so signal done here and return original exit status
(retval=$? && curl -k https://coveralls.io/webhook?repo_token=$COVERALLS_REPO_TOKEN -d "payload[build_num]=$CIRCLE_WORKFLOW_WORKSPACE_ID&payload[status]=done" && return $retval)
fi
- store_artifacts:
path: cover/excoveralls.html
- store_test_results:
path: _build/test/junit
coveralls_merge:
docker:
# Ensure .tool-versions matches
- image: circleci/elixir:1.10.3
environment:
MIX_ENV: test
steps:
- run:
name: Tell coveralls.io build is done
command: curl -k https://coveralls.io/webhook?repo_token=$COVERALLS_REPO_TOKEN -d "payload[build_num]=$CIRCLE_WORKFLOW_WORKSPACE_ID&payload[status]=done"
workflows:
version: 2
primary:
jobs:
- build
- check_formatted:
requires:
- build
# This unfortunately will only fire if all the tests pass because of how `requires` works
- coveralls_merge:
requires:
# - test_nethermind_http_websocket
- test_nethermind_mox
# - test_geth_http_websocket
# - test_geth_mox
- credo:
requires:
- build
- deploy_aws:
filters:
branches:
only:
- production
- staging
- /deploy-[A-Za-z0-9]+$/
requires:
- check_formatted
- credo
- eslint
- jest
- sobelow
# - test_nethermind_http_websocket
- test_nethermind_mox
# - test_geth_http_websocket
# - test_geth_mox
- dialyzer:
requires:
- build
- eslint:
requires:
- build
- gettext:
requires:
- build
- jest:
requires:
- build
- release:
requires:
- build
- sobelow:
requires:
- build
# - test_nethermind_http_websocket:
# requires:
# - build
- test_nethermind_mox:
requires:
- build
# - test_geth_http_websocket:
# requires:
# - build
# - test_geth_mox:
# requires:
# - build

View File

@ -0,0 +1,151 @@
# This file contains the configuration for Credo and you are probably reading
# this after creating it with `mix credo.gen.config`.
#
# If you find anything wrong or unclear in this file, please report an
# issue on GitHub: https://github.com/rrrene/credo/issues
#
%{
#
# You can have as many configs as you like in the `configs:` field.
configs: [
%{
#
# Run any exec using `mix credo -C <name>`. If no exec name is given
# "default" is used.
#
name: "default",
#
# These are the files included in the analysis:
files: %{
#
# You can give explicit globs or simply directories.
# In the latter case `**/*.{ex,exs}` will be used.
#
included: ["lib/", "src/", "web/", "apps/*/lib/**/*.{ex,exs}"],
excluded: [
~r"/_build/",
~r"/deps/",
~r"/node_modules/",
~r"/apps/block_scout_web/lib/block_scout_web.ex"
]
},
#
# If you create your own checks, you must specify the source files for
# them here, so they can be loaded by Credo before running the analysis.
#
requires: ["apps/utils/lib/credo/**/*.ex"],
#
# If you want to enforce a style guide and need a more traditional linting
# experience, you can change `strict` to `true` below:
#
strict: true,
#
# If you want to use uncolored output by default, you can change `color`
# to `false` below:
#
color: true,
#
# You can customize the parameters of any check by adding a second element
# to the tuple.
#
# To disable a check put `false` as second element:
#
# {Credo.Check.Design.DuplicatedCode, false}
#
checks: [
# outdated by formatter in Elixir 1.6. See https://github.com/rrrene/credo/issues/505
{Credo.Check.Consistency.LineEndings, false},
{Credo.Check.Consistency.SpaceAroundOperators, false},
{Credo.Check.Consistency.SpaceInParentheses, false},
{Credo.Check.Consistency.TabsOrSpaces, false},
{Credo.Check.Readability.LargeNumbers, false},
{Credo.Check.Readability.MaxLineLength, false},
{Credo.Check.Readability.ParenthesesInCondition, false},
{Credo.Check.Readability.RedundantBlankLines, false},
{Credo.Check.Readability.Semicolons, false},
{Credo.Check.Readability.SpaceAfterCommas, false},
{Credo.Check.Readability.TrailingBlankLine, false},
{Credo.Check.Readability.TrailingWhiteSpace, false},
# outdated by lazy Logger in Elixir 1.7. See https://elixir-lang.org/blog/2018/07/25/elixir-v1-7-0-released/
{Credo.Check.Warning.LazyLogging, false},
# not handled by formatter
{Credo.Check.Consistency.ExceptionNames},
{Credo.Check.Consistency.ParameterPatternMatching},
# You can customize the priority of any check
# Priority values are: `low, normal, high, higher`
#
{Credo.Check.Design.AliasUsage,
excluded_namespaces: ~w(Block Blocks Import Runner Socket SpandexDatadog Task),
excluded_lastnames:
~w(Address DateTime Exporter Fetcher Full Instrumenter Logger Monitor Name Number Repo Spec Time Unit),
priority: :low},
# For some checks, you can also set other parameters
#
# If you don't want the `setup` and `test` macro calls in ExUnit tests
# or the `schema` macro in Ecto schemas to trigger DuplicatedCode, just
# set the `excluded_macros` parameter to `[:schema, :setup, :test]`.
#
{Credo.Check.Design.DuplicatedCode, excluded_macros: [], mass_threshold: 80},
# You can also customize the exit_status of each check.
# If you don't want TODO comments to cause `mix credo` to fail, just
# set this value to 0 (zero).
#
{Credo.Check.Design.TagTODO, exit_status: 0},
{Credo.Check.Design.TagFIXME},
{Credo.Check.Readability.FunctionNames},
{Credo.Check.Readability.ModuleAttributeNames},
{Credo.Check.Readability.ModuleDoc},
{Credo.Check.Readability.ModuleNames},
{Credo.Check.Readability.ParenthesesOnZeroArityDefs},
{Credo.Check.Readability.PredicateFunctionNames},
{Credo.Check.Readability.PreferImplicitTry},
{Credo.Check.Readability.StringSigils},
{Credo.Check.Readability.VariableNames},
{Credo.Check.Refactor.DoubleBooleanNegation},
{Credo.Check.Refactor.CondStatements},
{Credo.Check.Refactor.CyclomaticComplexity},
{Credo.Check.Refactor.FunctionArity},
{Credo.Check.Refactor.LongQuoteBlocks},
{Credo.Check.Refactor.MatchInCondition},
{Credo.Check.Refactor.NegatedConditionsInUnless},
{Credo.Check.Refactor.NegatedConditionsWithElse},
{Credo.Check.Refactor.Nesting},
{Credo.Check.Refactor.PipeChainStart},
{Credo.Check.Refactor.UnlessWithElse},
{Credo.Check.Warning.BoolOperationOnSameValues},
{Credo.Check.Warning.ExpensiveEmptyEnumCheck},
{Credo.Check.Warning.IExPry},
{Credo.Check.Warning.IoInspect},
{Credo.Check.Warning.OperationOnSameValues},
{Credo.Check.Warning.OperationWithConstantResult},
{Credo.Check.Warning.UnusedEnumOperation},
{Credo.Check.Warning.UnusedFileOperation},
{Credo.Check.Warning.UnusedKeywordOperation},
{Credo.Check.Warning.UnusedListOperation},
{Credo.Check.Warning.UnusedPathOperation},
{Credo.Check.Warning.UnusedRegexOperation},
{Credo.Check.Warning.UnusedStringOperation},
{Credo.Check.Warning.UnusedTupleOperation},
{Credo.Check.Warning.RaiseInsideRescue},
# Controversial and experimental checks (opt-in, just remove `, false`)
#
# TODO reenable before merging optimized-indexer branch
{Credo.Check.Refactor.ABCSize, false},
{Credo.Check.Refactor.AppendSingleItem},
{Credo.Check.Refactor.VariableRebinding},
{Credo.Check.Warning.MapGetUnsafePass},
{Credo.Check.Consistency.MultiAliasImportRequireUse},
# Custom checks can be created using `mix credo.gen.check`.
{Utils.Credo.Checks.CompileEnvUsage}
#
]
}
]
}

View File

@ -0,0 +1,50 @@
# Since this is a copy of https://github.com/blockscout/devcontainer-elixir/blob/main/Dockerfile
# So after successful testing this file, the original one must be updated as well.
ARG VARIANT="1.17.3-erlang-27.1-debian-bullseye-20240926"
FROM hexpm/elixir:${VARIANT}
# ARGs declared before FROM are not persisted beyond the FROM instruction.
# They must be redeclared here to be available in the rest of the Dockerfile.
ARG PHOENIX_VERSION="1.7.10"
ARG NODE_VERSION="20"
# This Dockerfile adds a non-root user with sudo access. Update the “remoteUser” property in
# devcontainer.json to use it. More info: https://aka.ms/vscode-remote/containers/non-root-user.
ARG USERNAME=vscode
ARG USER_UID=1000
ARG USER_GID=$USER_UID
# Options for common package install script
ARG INSTALL_ZSH="true"
ARG UPGRADE_PACKAGES="true"
ARG COMMON_SCRIPT_SOURCE="https://raw.githubusercontent.com/microsoft/vscode-dev-containers/main/script-library/common-debian.sh"
# Options for setup nodejs
ARG NODE_SCRIPT_SOURCE="https://raw.githubusercontent.com/microsoft/vscode-dev-containers/main/script-library/node-debian.sh"
ENV NVM_DIR=/usr/local/share/nvm
ENV NVM_SYMLINK_CURRENT=true
ENV PATH=${NVM_DIR}/current/bin:${PATH}
# Install needed packages and setup non-root user. Use a separate RUN statement to add your own dependencies.
RUN apt-get update \
&& export DEBIAN_FRONTEND=noninteractive \
&& apt-get -y install --no-install-recommends curl ca-certificates 2>&1 \
&& curl -sSL ${COMMON_SCRIPT_SOURCE} -o /tmp/common-setup.sh \
&& /bin/bash /tmp/common-setup.sh "${INSTALL_ZSH}" "${USERNAME}" "${USER_UID}" "${USER_GID}" "${UPGRADE_PACKAGES}" \
#
# Install Node.js for use with web applications
&& curl -sSL ${NODE_SCRIPT_SOURCE} -o /tmp/node-setup.sh \
&& /bin/bash /tmp/node-setup.sh "${NVM_DIR}" "${NODE_VERSION}" "${USERNAME}" \
&& npm install -g cspell@latest \
#
# Install dependencies
&& apt-get install -y build-essential inotify-tools \
#
# Clean up
&& apt-get autoremove -y \
&& apt-get clean -y \
&& rm -rf /var/lib/apt/lists/* /tmp/common-setup.sh /tmp/node-setup.sh
RUN su ${USERNAME} -c "mix local.hex --force \
&& mix local.rebar --force \
&& mix archive.install --force hex phx_new ${PHOENIX_VERSION}"

View File

@ -0,0 +1,200 @@
# Blockscout Backend Development with VSCode Devcontainers and GitHub Codespaces
## Table of Contents
1. [Motivation](#motivation)
2. [Setting Up VSCode Devcontainer Locally](#setting-up-vscode-devcontainer-locally)
3. [Using GitHub Codespaces in the Browser](#using-github-codespaces-in-the-browser)
4. [Configuring Postgres DB Access](#configuring-postgres-db-access)
5. [Developing Blockscout Backend](#developing-blockscout-backend)
6. [Upgrading Elixir Version](#upgrading-elixir-version)
7. [Contributing](#contributing)
## Motivation
Setting up a local development environment for Blockscout can be time-consuming and error-prone. This devcontainer setup streamlines the process by providing a pre-configured environment with all necessary dependencies. It ensures consistency across development environments, reduces setup time, and allows developers to focus on coding rather than configuration.
Key benefits include:
- Pre-configured environment with Elixir, Phoenix, and Node.js
- Integrated PostgreSQL database
- Essential VS Code extensions pre-installed
- Simplified database management
- Consistent development environment across team members
## Setting Up VSCode Devcontainer Locally
1. Clone the Blockscout repository:
```
git clone https://github.com/blockscout/blockscout.git
cd blockscout
```
2. Open the project in VS Code:
```
code .
```
3. Before re-opening in the container, you may find it useful to configure SSH authorization. To do this:
a. Ensure you have SSH access to GitHub configured on your local machine.
b. Open `.devcontainer/devcontainer.json`.
c. Uncomment the `mounts` section:
```json
"mounts": [
"source=${localEnv:HOME}/.ssh/config,target=/home/vscode/.ssh/config,type=bind,consistency=cached",
"source=${localEnv:HOME}/.ssh/id_rsa,target=/home/vscode/.ssh/id_rsa,type=bind,consistency=cached"
],
```
d. Adjust the paths if your SSH keys are stored in a different location.
e. Use `git update-index --assume-unchanged .devcontainer/devcontainer.json` to prevent the changes to `devcontainer.json` from appearing in `git status` and VS Code's Source Control. To undo the changes, use `git update-index --no-assume-unchanged .devcontainer/devcontainer.json`.
4. When prompted, click "Reopen in Container". If not prompted, press `F1`, type "Remote-Containers: Reopen in Container", and press Enter.
5. VS Code will build the devcontainer. This process includes:
- Pulling the base Docker image
- Installing specified VS Code extensions
- Setting up the PostgreSQL database
- Installing project dependencies
This may take several minutes the first time.
6. Once the devcontainer is built, you'll be working inside the containerized environment.
7. If you modified the `devcontainer.json` file in step 3, you may want to execute `git update-index --assume-unchanged .devcontainer/devcontainer.json` in a terminal within your devcontainer to prevent the changes to `devcontainer.json` from appearing in `git status` and VS Code's Source Control.
### Additional Setup for Cursor.ai Users
If you're using Cursor.ai instead of VSCode, you may need to perform some additional setup steps. Please note that these changes will not persist after reloading the devcontainer, so you may need to repeat these steps each time you start a new session.
1. **Git Configuration**: You may encounter issues when trying to perform Git operations from the terminal or the "Source Control" tab. To resolve this, set up your Git configuration inside the devcontainer:
a. Open a terminal in your devcontainer.
b. Set your Git username:
```
git config --global user.name "Your Name"
```
c. Set your Git email:
```
git config --global user.email "your.email@example.com"
```
Replace "Your Name" and "your.email@example.com" with your actual name and email associated with your GitHub account.
2. **ElixirLS: Elixir support and debugger** (JakeBecker.elixir-ls): This extension may not be automatically installed in Cursor.ai, even though it's specified in the devcontainer configuration. To install it manually:
a. Open the Extensions tab.
b. Search for "JakeBecker.elixir-ls".
c. Look for the extension "ElixirLS: Elixir support and debugger" by JakeBecker and click "Install".
Remember, you may need to repeat these steps each time you start a new Cursor.ai session with the devcontainer.
### Signing in to GitHub for Pull Request Extension
1. In the devcontainer, click on the GitHub icon in the Primary sidebar.
2. Click on "Sign in to GitHub" and follow the prompts to authenticate.
## Using GitHub Codespaces in the Browser
To open the project in GitHub Codespaces:
1. Navigate to the Blockscout repository on GitHub.
2. Switch to the branch you want to work on.
3. Click the "Code" button.
4. Instead of clicking "Create codespace on [branch]" (which would use the default machine type that may not be sufficient for this Elixir-based project), click on the three dots (...) next to it.
5. Select "New with options".
6. Choose the "4-core/16GB RAM" machine type for optimal performance.
7. Click "Create codespace".
This will create a new Codespace with the specified resources, ensuring adequate performance for the Elixir-based project.
Note: After the container opens, you may see an error about the inability to use "GitHub Copilot Chat". This Copilot functionality will not be accessible in the Codespace environment.
## Configuring Postgres DB Access
To configure access to the PostgreSQL database using the VS Code extension:
1. Click on the PostgreSQL icon in the Primary sidebar.
2. Click "+" (Add Connection) in the PostgreSQL explorer.
3. Use the following details:
- Host: `db`
- User: `postgres`
- Password: `postgres`
- Port: `5432`
- Use an ssl connection: "Standard connection"
- Database: `app`
- The display name: "<some name>"
These credentials are derived from the `DATABASE_URL` in the `bs` script.
## Developing Blockscout Backend
### Configuration
Before running the Blockscout server, you need to set up the configuration:
1. Copy the `.devcontainer/.blockscout_config.example` file to `.devcontainer/.blockscout_config`.
2. Adjust the settings in `.devcontainer/.blockscout_config` as needed for your development environment.
For a comprehensive list of environment variables that can be set in this configuration file, refer to the [Blockscout documentation](https://docs.blockscout.com/setup/env-variables).
### Using the `bs` Script
The `bs` script in `.devcontainer/bin/` helps orchestrate common development tasks. Here are some key commands:
- Initialize the project: `bs --init`
- Initialize or re-initialize the database: `bs --db-init` (This will remove all data and tables from the DB and re-create the tables)
- Run the server: `bs`
- Run the server without syncing: `bs --no-sync`
- Recompile the project: `bs --recompile` (Use this when new dependencies arrive after a merge or when switching to another `CHAIN_TYPE`)
- Run various checks: `bs --spellcheck`, `bs --dialyzer`, `bs --credo`, `bs --format`
For a full list of options, run `bs --help`.
### Interacting with the Blockscout API
For local devcontainer setups (not applicable to GitHub Codespaces), you can use API testing tools like Postman or Insomnia on your host machine to interact with the Blockscout API running in the container:
1. Ensure the Blockscout server is running in the devcontainer.
2. In the API testing tool on your host machine, use `http://127.0.0.1:4000` as the base URL.
3. Example endpoint: `GET http://127.0.0.1:4000/api/v2/blocks`
This allows testing API endpoints directly from your host machine while the server runs in the container.
### Troubleshooting
If you face issues with dependency compilation or dialyzer after container creation:
1. Check for untracked files: `git ls-files --others`
2. Remove compilation artifacts or generated files if present.
3. For persistent issues, consider cleaning all untracked files (use with caution):
```
git clean -fdX
bs --recompile
```
This ensures a clean compilation environment within the container.
## Upgrading Elixir Version
To upgrade the Elixir version:
1. Open `.devcontainer/Dockerfile`.
2. Update the `VARIANT` argument with the desired Elixir version.
3. Rebuild the devcontainer.
Note: Ensure that the version you choose is compatible with the project dependencies.
After testing the new Elixir version, propagate the corresponding changes in the Dockerfile to the repo https://github.com/blockscout/devcontainer-elixir. Once a new release tag is published there and a new docker image `ghcr.io/blockscout/devcontainer-elixir` appears in the GitHub registry, modify the `docker-compose.yml` file in the `.devcontainer` directory to reflect the proper docker image tag.
## Contributing
When contributing changes that require additional checks for specific blockchain types:
1. Open `.devcontainer/bin/chain-specific-checks`.
2. Add your checks under the appropriate `CHAIN_TYPE` case.
3. Ensure your checks exit with a non-zero code if unsuccessful.
Remember to document any new checks or configuration options in this README.

View File

@ -0,0 +1,320 @@
#!/bin/bash
# Blockscout Development Helper Script
#
# This script provides a unified interface for common development tasks when working
# with the Blockscout backend server. It handles environment configuration, project
# initialization, and various development workflows.
#
# Main usage scenarios:
# 1. Project Setup
# - Initialize project directory: bs --init
# - Setup/reset database: bs --db-init
#
# 2. Development Tasks
# - Run backend server: bs
# - Run server (API only): bs --no-sync
# - Compile/recompile changes: bs --compile
# - Recompile dependencies: bs --recompile
#
# 3. Code Quality
# - Run formatter: bs --format
# - Run static analysis: bs --dialyzer
# - Run code style checks: bs --credo
# - Run spell checker: bs --spellcheck
#
# 4. Documentation
# - Generate project docs: bs --docs
# - Show usage help: bs --help
#
# Environment:
# - Loads configuration from .devcontainer/.blockscout_config if present
# - Uses default DATABASE_URL if not specified
# - Supports chain-specific configurations via CHAIN_TYPE
source $(dirname $0)/utils
# Source and export environment variables related to the backend configuration
BLOCKSCOUT_CONFIG_FILE=".devcontainer/.blockscout_config"
if [ -f "./${BLOCKSCOUT_CONFIG_FILE}" ]; then
set -a # Automatically export all variables
source ./${BLOCKSCOUT_CONFIG_FILE}
set +a # Disable automatic export
else
echo "Warning: ${BLOCKSCOUT_CONFIG_FILE} file not found. Skipping configuration loading."
fi
if [ "${DATABASE_URL}" == "" ]; then
export DATABASE_URL="postgresql://postgres:postgres@db:5432/app"
fi
# Initialize variables
INIT=false
NO_SYNC=false
DB_INIT=false
COMPILE=false
RECOMPILE=false
SPELLCHECK=false
DIALYZER=false
CREDO=false
FORMAT=false
DOCS=false
HELP=false
# Define the help function
show_help() {
echo "Usage: bs [OPTION]"
echo "Orchestrate typical tasks when developing Blockscout backend server"
echo
echo "Options:"
echo " --help Show this help message and exit"
echo " --init Initialize the project directory"
echo " --format Run code formatter"
echo " --spellcheck Run spellcheck"
echo " --dialyzer Run dialyzer"
echo " --credo Run credo"
echo " --docs Generate documentation"
echo " --compile Compile/recompile changes"
echo " --recompile Re-fetch dependencies and recompile"
echo " --db-init (Re)initialize the database"
echo " --no-sync Run the server with disabled indexer, so only the API is available"
echo
echo "If no option is provided, the script will run the backend server."
}
# Define valid arguments
VALID_ARGS=(
"--help"
"--init"
"--no-sync"
"--db-init"
"--compile"
"--recompile"
"--spellcheck"
"--dialyzer"
"--credo"
"--format"
"--docs"
)
# Validate arguments
for arg in "$@"
do
if [[ ! " ${VALID_ARGS[@]} " =~ " ${arg} " ]]; then
echo "Error: Unknown argument '${arg}'"
echo
show_help
exit 1
fi
done
# Parse command line arguments
for arg in "$@"
do
case $arg in
--help)
HELP=true
shift # Remove --help from processing
;;
--init)
INIT=true
shift # Remove --init from processing
;;
--no-sync)
NO_SYNC=true
shift # Remove --no-sync from processing
;;
--db-init)
DB_INIT=true
shift # Remove --db-init from processing
;;
--compile)
COMPILE=true
shift # Remove --compile from processing
;;
--recompile)
RECOMPILE=true
shift # Remove --recompile from processing
;;
--spellcheck)
SPELLCHECK=true
shift # Remove --spellcheck from processing
;;
--dialyzer)
DIALYZER=true
shift # Remove --dialyzer from processing
;;
--credo)
CREDO=true
shift # Remove --credo from processing
;;
--format)
FORMAT=true
shift # Remove --format from processing
;;
--docs)
DOCS=true
shift # Remove --docs from processing
;;
esac
done
# If --help argument is passed, show help and exit
if [ "$HELP" = true ]; then
show_help
exit 0
fi
# Define the project directory initialization subroutine
initialize_project() {
if [ ! -d "apps/block_scout_web/priv/cert" ]; then
mix local.rebar --force
mix deps.compile
mix compile
# cd apps/block_scout_web/assets
# npm install && node_modules/webpack/bin/webpack.js --mode production
# cd -
# cd apps/explorer
# npm install
# cd -
cd apps/block_scout_web
mix phx.gen.cert blockscout blockscout.local
cd -
else
echo "Looks like the project directory is already initialized"
fi
}
# Define the initialization subroutine
initialize_db() {
echo "Initializing database. Step 1 of 2: Dropping database"
if OUTPUT=$(mix ecto.drop 2>&1); then
echo "Initializing database. Step 2 of 2: Creating database"
mix do ecto.create, ecto.migrate | grep Runn
else
echo "Failed to drop database. Initialization aborted."
echo "Error output:"
echo "$OUTPUT"
return 1
fi
}
# Define the compile subroutine
compile() {
mix compile
}
# Define the recompile subroutine
recompile() {
FALLBACK_APPS="block_scout_web ethereum_jsonrpc explorer indexer utils nft_media_handler"
APPS=$($(dirname $0)/extract_apps.exs) || APPS="$FALLBACK_APPS"
[ -z "$APPS" ] && APPS="$FALLBACK_APPS"
mix deps.clean $APPS
mix deps.get
mix deps.compile --force
}
# Define the spellcheck subroutine
spellcheck() {
cspell | less
}
# Define the dialyzer subroutine
dialyzer() {
if ! mix dialyzer; then
echo -e "\nDepending on the error you see, try either:"
echo " rm -rf 'priv/plts'"
echo " MIX_ENV=test bs --recompile"
return 1
fi
}
# Define the credo subroutine
credo() {
mix credo
}
# Define the format subroutine
format() {
mix format
}
# Define the generate_docs subroutine
generate_docs() {
mix docs
}
# If --init argument is passed, run the project dir initialization subroutine and exit
if [ "$INIT" = true ]; then
initialize_project
exit 0
fi
# If --db-init argument is passed, run the database initialization subroutine and exit
if [ "$DB_INIT" = true ]; then
initialize_db
exit 0
fi
# If --compile argument is passed, run the compile subroutine and exit
if [ "$COMPILE" = true ]; then
compile
exit 0
fi
# If --recompile argument is passed, run the recompile subroutine and exit
if [ "$RECOMPILE" = true ]; then
recompile
exit 0
fi
# If --spellcheck argument is passed, run the spellcheck subroutine and exit
if [ "$SPELLCHECK" = true ]; then
spellcheck
exit 0
fi
# If --dialyzer argument is passed, run the dialyzer subroutine and exit
if [ "$DIALYZER" = true ]; then
dialyzer
exit 0
fi
# If --credo argument is passed, run the credo subroutine and exit
if [ "$CREDO" = true ]; then
credo
exit 0
fi
# If --format argument is passed, run the format subroutine and exit
if [ "$FORMAT" = true ]; then
format
exit 0
fi
# If --doc argument is passed, run the format subroutine and exit
if [ "$DOCS" = true ]; then
generate_docs
exit 0
fi
if [ "${ETHEREUM_JSONRPC_HTTP_URL}" != "" ]; then
check_server_availability ${ETHEREUM_JSONRPC_HTTP_URL}
check_server_accessibility ${ETHEREUM_JSONRPC_HTTP_URL}
fi
if [ "${CHAIN_TYPE}" != "" -o "${CHAIN_TYPE}" != "ethereum" -o "${CHAIN_TYPE}" != "default" ]; then
source $(dirname $0)/chain-specific-checks
fi
if [ ! -d "apps/block_scout_web/priv/cert" ]; then
echo "Project directory is not initialized"
echo "Run 'bs --init' to initialize the project directory"
exit 1
fi
export DISABLE_INDEXER=${NO_SYNC}
mix phx.server

View File

@ -0,0 +1,17 @@
# The script is sourced from the main script, so the unsuccessful check must exit
# with non-zero code to terminate the main script.
source $(dirname $0)/utils
# Run the appropriate checks based on CHAIN_TYPE
case "${CHAIN_TYPE}" in
"arbitrum")
echo "Arbitrum sepcific checks"
# if the check is not successful, exit with code 1
check_server_availability ${INDEXER_ARBITRUM_L1_RPC}
check_server_accessibility ${INDEXER_ARBITRUM_L1_RPC}
;;
*)
echo "No special checks for CHAIN_TYPE: $CHAIN_TYPE"
;;
esac

View File

@ -0,0 +1,45 @@
#!/usr/bin/env elixir
defmodule LocalHelper do
# Helper function to safely get configuration values
def get_config_value(config, key, name) do
case Keyword.get(config, key) do
nil -> {:error, name}
value -> {:ok, value}
end
end
end
# Start Mix application
Mix.start()
# Set the Mix environment to dev (or whatever environment you need)
Mix.env(:dev)
# Read and evaluate the mix.exs file
Code.require_file("mix.exs")
# Get the applications from the project configuration
apps =
try do
project = BlockScout.Mixfile.project()
with {:ok, releases} <- LocalHelper.get_config_value(project, :releases, "releases"),
{:ok, blockscout} <- LocalHelper.get_config_value(releases, :blockscout, "blockscout release"),
{:ok, applications} <- LocalHelper.get_config_value(blockscout, :applications, "applications") do
applications
|> Keyword.keys()
|> Enum.join("\n")
else
{:error, message} ->
IO.puts(:stderr, "Error: #{message} not found in mix.exs configuration")
System.halt(1)
end
rescue
error ->
IO.puts(:stderr, "Error: Failed to read mix.exs configuration - #{Exception.message(error)}")
System.halt(1)
end
# Print the applications to stdout
IO.puts(apps)

View File

@ -0,0 +1,22 @@
# Function to check server availability
check_server_availability() {
local url=$1
curl --connect-timeout 3 --silent ${url} 1>/dev/null
if [ $? -ne 0 ]; then
echo "VPN must be enabled to connect to ${url}"
exit 1
fi
}
# Function to check server accessibility with a POST request
check_server_accessibility() {
local url=$1
local payload='[{"id":0,"params":["latest",false],"method":"eth_getBlockByNumber","jsonrpc":"2.0"}]'
http_code=$(curl -s -o /dev/null -w "%{http_code}" -X POST ${url} -H "Content-Type: application/json" -d "${payload}")
if [ "$http_code" -ne 200 ]; then
echo "VPN must be enabled to access ${url} (HTTP status code: ${http_code})"
exit 1
fi
}

View File

@ -0,0 +1,47 @@
{
"name": "Blockscout Elixir",
"dockerComposeFile": "docker-compose.yml",
"service": "elixir",
"workspaceFolder": "/workspace",
"postCreateCommand": {
"safe-directory": "git config --global --add safe.directory ${containerWorkspaceFolder}",
"deps": "mix deps.get",
"known_hosts": "sudo chown vscode:vscode /home/vscode/.ssh && ssh-keyscan github.com > /home/vscode/.ssh/known_hosts"
},
"remoteEnv": {
"PATH": "${containerEnv:PATH}:${containerWorkspaceFolder}/.devcontainer/bin"
},
// Configure tool-specific properties.
"customizations": {
// Configure properties specific to VS Code.
"vscode": {
// Add the IDs of extensions you want installed when the container is created.
"extensions": [
"JakeBecker.elixir-ls",
"ckolkman.vscode-postgres",
"GitHub.copilot",
"GitHub.copilot-chat",
"GitHub.vscode-pull-request-github"
]
}
},
"features": {
"ghcr.io/stuartleeks/dev-container-features/shell-history:0": {}
},
// Use 'forwardPorts' to make a list of ports inside the container available locally.
// This can be used to network with other containers or with the host.
"forwardPorts": [
4000,
4001,
5432
],
// Uncomment and adjust the private key path to the one you use to authenticate on GitHub
// if you want to have ability to push to GitHub from the container.
// "mounts": [
// "source=${localEnv:HOME}/.ssh/config,target=/home/vscode/.ssh/config,type=bind,consistency=cached",
// // Make sure that the private key can be used to authenticate on GitHub
// "source=${localEnv:HOME}/.ssh/id_rsa,target=/home/vscode/.ssh/id_rsa,type=bind,consistency=cached"
// ],
// Uncomment to connect as a non-root user. See https://aka.ms/vscode-remote/containers/non-root.
"remoteUser": "vscode"
}

View File

@ -0,0 +1,30 @@
services:
elixir:
image: ghcr.io/blockscout/devcontainer-elixir:1.17.3-erlang-27.1
# Uncomment next lines to use test Dockerfile with new Elixir version
# build:
# context: .
# dockerfile: Dockerfile
volumes:
- ..:/workspace:cached
# Runs app on the same network as the database container, allows "forwardPorts" in devcontainer.json function.
network_mode: service:db
# Overrides default command so things don't shut down after the process ends.
command: sleep infinity
db:
image: postgres:17
command: postgres -c 'max_connections=250'
restart: unless-stopped
volumes:
- postgres-data:/var/lib/postgresql/data
environment:
POSTGRES_USER: postgres
POSTGRES_PASSWORD: postgres
POSTGRES_DB: app
volumes:
postgres-data:

View File

@ -0,0 +1,16 @@
[
{"lib/ethereum_jsonrpc/rolling_window.ex", :improper_list_constr, 171},
{"lib/explorer/smart_contract/solidity/publisher_worker.ex", :pattern_match, 1},
{"lib/explorer/smart_contract/solidity/publisher_worker.ex", :exact_eq, 8},
{"lib/explorer/smart_contract/solidity/publisher_worker.ex", :pattern_match, 8},
{"lib/explorer/smart_contract/vyper/publisher_worker.ex", :pattern_match, 1},
{"lib/explorer/smart_contract/vyper/publisher_worker.ex", :exact_eq, 8},
{"lib/explorer/smart_contract/vyper/publisher_worker.ex", :pattern_match, 8},
{"lib/explorer/smart_contract/stylus/publisher_worker.ex", :pattern_match, 1},
{"lib/explorer/smart_contract/stylus/publisher_worker.ex", :exact_eq, 14},
{"lib/explorer/smart_contract/stylus/publisher_worker.ex", :pattern_match, 14},
~r/lib\/phoenix\/router.ex/,
{"lib/explorer/chain/search.ex", :pattern_match, 80},
{"lib/explorer/chain/search.ex", :pattern_match, 227},
{"lib/explorer/chain/search.ex", :pattern_match, 322}
]

View File

@ -0,0 +1,13 @@
_build
deps
apps/block_scout_web/assets/node_modules
apps/explorer/node_modules
test
.git
.circleci
.vscode
.elixir_ls
erl_crash.dump
logs
apps/*/test
.devcontainer

View File

@ -0,0 +1,11 @@
[
inputs: [
".credo.exs",
".formatter.exs",
"apps/*/mix.exs",
"apps/*/{benchmarks,config,lib,priv,test}/**/*.{ex,exs}",
"mix.exs",
"{config}/**/*.{ex,exs}"
],
line_length: 120
]

View File

@ -0,0 +1,100 @@
name: Bug Report
description: File a bug report
labels: [ "triage" ]
body:
- type: markdown
attributes:
value: |
Thanks for reporting a bug 🐛!
Please search open/closed issues before submitting. Someone might have had the similar problem before 😉!
- type: textarea
id: description
attributes:
label: Description
description: A brief description of the issue.
validations:
required: true
- type: dropdown
id: installation-type
attributes:
label: Type of the installation
description: How the application has been deployed.
options:
- Docker-compose
- Helm charts (k8s)
- Manual from the source code
- Docker
validations:
required: true
- type: input
id: archive-node-type
attributes:
label: Type of the JSON RPC archive node
description: Which type of archive node is used.
placeholder: "Erigon/Geth/Nethermind/Reth/PolygonEdge/Besu/OpenEthereum/..."
validations:
required: true
- type: input
id: chain-type
attributes:
label: Type of the chain
description: Type of the chain.
placeholder: L1/L2/...
- type: input
id: link
attributes:
label: Link to the page
description: The link to the page where the issue occurs.
placeholder: https://eth.blockscout.com
- type: textarea
id: steps
attributes:
label: Steps to reproduce
description: |
Explain how to reproduce the issue in the development environment.
- type: input
id: backend-version
attributes:
label: Backend version
description: The release version of the backend or branch/commit.
placeholder: v6.1.0
validations:
required: true
- type: input
id: frontend-version
attributes:
label: Frontend version
description: The release version of the frontend or branch/commit.
placeholder: v1.11.1
- type: input
id: elixir-version
attributes:
label: Elixir & Erlang/OTP versions
description: Elixir & Erlang/OTP versions.
placeholder: Elixir 1.17.3 (compiled with Erlang/OTP 27)
validations:
required: true
- type: input
id: os-version
attributes:
label: Operating system
description: The operating system this issue occurred with.
placeholder: Linux/macOS/Windows
- type: textarea
id: additional-information
attributes:
label: Additional information
description: |
Use this section to provide any additional information you might have (e.g screenshots or screencasts).

View File

@ -0,0 +1,11 @@
blank_issues_enabled: false
contact_links:
- name: Feature Request
url: https://blockscout.canny.io/feature-requests
about: Request a feature or enhancement
- name: Ask a question
url: https://github.com/orgs/blockscout/discussions
about: Ask questions and discuss topics with other community members
- name: Join our Discord Server
url: https://discord.gg/blockscout
about: The official Blockscout Discord community

View File

@ -0,0 +1,86 @@
name: 'Setup repo'
description: 'Setup repo: checkout/login/extract metadata, Set up Docker Buildx'
inputs:
github-token:
description: 'GitHub token for ghcr.io authentication'
required: true
docker-remote-multi-platform:
description: 'Docker remote multi-platform builder'
required: true
default: 'false'
docker-arm-host:
description: 'Docker remote arm builder'
required: false
docker-arm-host-key:
description: 'Docker remote arm builder ssh private key'
required: false
docker-image:
description: 'Docker image'
required: true
default: ghcr.io/blockscout/blockscout
outputs:
docker-builder:
description: 'Docker builder'
value: ${{ steps.builder_local.outputs.name || steps.builder_multi.outputs.name }}
docker-tags:
description: 'Docker metadata tags'
value: ${{ steps.meta.outputs.tags }}
docker-labels:
description: 'Docker metadata labels'
value: ${{ steps.meta.outputs.labels }}
docker-platforms:
description: 'Docker build platforms'
value: ${{ steps.builder_local.outputs.platforms || steps.builder_multi.outputs.platforms }}
runs:
using: "composite"
steps:
- name: Set up SSH key
shell: bash
run: |
mkdir -p ~/.ssh
echo "${{ inputs.docker-arm-host-key }}" > ~/.ssh/id_rsa
chmod 600 ~/.ssh/id_rsa
- name: Find builder
if: ${{ inputs.docker-remote-multi-platform }}
shell: bash
run: echo "BUILDER_IP=$(./.github/scripts/select-builder.sh ${{ inputs.docker-arm-host }} root ~/.ssh/id_rsa)" >> $GITHUB_ENV
- name: Set up SSH
if: ${{ inputs.docker-remote-multi-platform }}
uses: MrSquaare/ssh-setup-action@523473d91581ccbf89565e12b40faba93f2708bd # v1.1.0
with:
host: ${{ env.BUILDER_IP }}
private-key: ${{ inputs.docker-arm-host-key }}
- name: Set up Docker Buildx
if: ${{ !inputs.docker-remote-multi-platform }}
uses: docker/setup-buildx-action@v3
id: builder_local
with:
platforms: linux/amd64
- name: Set up Multi-platform Docker Buildx
if: ${{ inputs.docker-remote-multi-platform }}
uses: docker/setup-buildx-action@v3
id: builder_multi
with:
platforms: linux/amd64
append: |
- endpoint: ssh://root@${{ env.BUILDER_IP }}
platforms: linux/arm64/v8
- name: Log in to GitHub Container Registry
uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ inputs.github-token }}
- name: Extract metadata (tags, labels) for Docker
id: meta
uses: docker/metadata-action@v5
with:
images: ${{ inputs.docker-image }}
- name: Add SHORT_SHA env property with commit short sha
shell: bash
run: echo "SHORT_SHA=`echo ${GITHUB_SHA} | cut -c1-8`" >> $GITHUB_ENV

View File

@ -0,0 +1,23 @@
version: 2
updates:
- package-ecosystem: "mix"
directory: "/"
open-pull-requests-limit: 20
schedule:
interval: "weekly"
- package-ecosystem: "npm"
directory: "/apps/block_scout_web/assets"
open-pull-requests-limit: 10
schedule:
interval: "monthly"
ignore:
- dependency-name: "bootstrap"
- dependency-name: "web3"
versions: ["4.x"]
- package-ecosystem: "npm"
directory: "/apps/explorer"
open-pull-requests-limit: 10
schedule:
interval: "monthly"

View File

@ -0,0 +1,44 @@
#!/bin/bash
# Check if a domain is provided as an argument
if [ -z "$1" ]; then
echo "Usage: $0 <domain>"
exit 1
fi
DOMAIN=$1
SSH_USER=$2
SSH_KEY=$3
# Resolve A records
IP_LIST=$(dig +short A $DOMAIN)
if [ -z "$IP_LIST" ]; then
echo "No IPs found for domain $DOMAIN"
exit 1
fi
MIN_LA=1000000
BEST_BUILDER=""
for IP in $IP_LIST; do
# Check if the host is reachable via SSH
ssh -o StrictHostKeychecking=no -o ConnectTimeout=5 -o BatchMode=yes -i $SSH_KEY $SSH_USER@$IP "exit" 2>/dev/null
if [ $? -eq 0 ]; then
# Get the load average
LA=$(ssh -o StrictHostKeychecking=no -i $SSH_KEY $SSH_USER@$IP "uptime | awk -F'load average:' '{ print \$2 }' | cut -d, -f1" 2>/dev/null)
if [ $? -eq 0 ]; then
# Compare and find the minimum load average
LA=$(echo $LA | xargs) # Trim whitespace
if (( $(echo "$LA < $MIN_LA" | bc -l) )); then
MIN_LA=$LA
BEST_BUILDER=$IP
fi
fi
fi
done
if [ -n "$BEST_BUILDER" ]; then
echo "$BEST_BUILDER" | tr -d '[:space:]'
else
echo "No reachable hosts found."
fi

View File

@ -0,0 +1,29 @@
name: antiscam
on:
issue_comment:
types:
- created
- edited
discussion_comment:
types:
- created
- edited
permissions:
pull-requests: write
issues: write
jobs:
build:
if: ${{ !github.event.issue.pull_request }}
name: Antiscam
runs-on: ubuntu-latest
steps:
- uses: vbaranov/antiscam-action@main
with:
token: ${{ github.token }}
env:
SCAM_ACTION_WHITELISTED_LOGINS: ${{ vars.SCAM_ACTION_WHITELISTED_LOGINS }}

View File

@ -0,0 +1,24 @@
name: antispam
on:
issues:
types:
- opened
- edited
- reopened
permissions:
pull-requests: write
issues: write
jobs:
build:
name: Antispam
runs-on: ubuntu-latest
steps:
- uses: vbaranov/antispam-action@main
with:
token: ${{ github.token }}
env:
SCAM_ACTION_WHITELISTED_LOGINS: ${{ vars.SCAM_ACTION_WHITELISTED_LOGINS }}

View File

@ -0,0 +1,72 @@
# For most projects, this workflow file will not need changing; you simply need
# to commit it to your repository.
#
# You may wish to alter this file to override the set of languages analyzed,
# or to provide custom queries or build logic.
#
# ******** NOTE ********
# We have attempted to detect the languages in your repository. Please check
# the `language` matrix defined below to confirm you have the correct set of
# supported CodeQL languages.
#
name: "CodeQL"
on:
push:
branches: [ "master" ]
pull_request:
# The branches below must be a subset of the branches above
branches: [ "master" ]
schedule:
- cron: '45 11 * * 5'
jobs:
analyze:
name: Analyze
runs-on: ubuntu-latest
permissions:
actions: read
contents: read
security-events: write
strategy:
fail-fast: false
matrix:
language: [ 'javascript' ]
# CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python', 'ruby' ]
# Learn more about CodeQL language support at https://aka.ms/codeql-docs/language-support
steps:
- name: Checkout repository
uses: actions/checkout@v4
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL
uses: github/codeql-action/init@v2
with:
languages: ${{ matrix.language }}
# If you wish to specify custom queries, you can do so here or in a config file.
# By default, queries listed here will override any specified in a config file.
# Prefix the list here with "+" to use these queries and those in the config file.
# Details on CodeQL's query packs refer to : https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs
# queries: security-extended,security-and-quality
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
# If this step fails, then you should remove it and run the build manually (see below)
- name: Autobuild
uses: github/codeql-action/autobuild@v2
# Command-line programs to run using the OS shell.
# 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
# If the Autobuild fails above, remove it and uncomment the following three lines.
# modify them (or add more) to build your code if your project, please refer to the EXAMPLE below for guidance.
# - run: |
# echo "Run, Build Application using script"
# ./location_of_script_within_repo/buildscript.sh
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v2

View File

@ -0,0 +1,797 @@
name: Blockscout
on:
push:
branches:
- master
- production-arbitrum
- production-core
- production-eth-sepolia
- production-filecoin
- production-fuse
- production-optimism
- production-immutable
- production-iota
- production-lukso
- production-rsk
- production-sokol
- production-suave
- production-xdai
- production-zkevm
- production-zksync
- staging-l2
paths-ignore:
- "CHANGELOG.md"
- "**/README.md"
- "docker/*"
- "docker-compose/*"
pull_request:
types: [opened, synchronize, reopened, labeled]
branches:
- master
env:
MIX_ENV: test
OTP_VERSION: ${{ github.ref_name == '10284/merge' && '27.1' || vars.OTP_VERSION }}
ELIXIR_VERSION: ${{ github.ref_name == '10284/merge' && '1.17.3' || vars.ELIXIR_VERSION }}
ACCOUNT_AUTH0_DOMAIN: "blockscoutcom.us.auth0.com"
jobs:
matrix-builder:
name: Build matrix
runs-on: ubuntu-latest
outputs:
matrix: ${{ steps.set-matrix.outputs.matrix }}
steps:
- id: set-matrix
run: |
echo "matrix=$(node -e '
// Add/remove CI matrix chain types here
const defaultChainTypes = ["default"];
const chainTypes = [
"arbitrum",
"berachain",
"blackfort",
"celo",
"ethereum",
"filecoin",
"optimism",
"polygon_zkevm",
"rsk",
"scroll",
"shibarium",
"stability",
"zetachain",
"zilliqa",
"zksync",
"neon"
];
const extraChainTypes = ["suave", "polygon_edge"];
// Chain type matrix we use in master branch
const allChainTypes = [].concat(defaultChainTypes, chainTypes, extraChainTypes);
const labels = ${{ github.event_name == 'pull_request' && toJson(github.event.pull_request.labels.*.name) || '[]' }};
const ciLabels = labels.filter(label => label.startsWith("ci:"));
const labeledChainTypes = [].concat(
defaultChainTypes.filter(chainType => ciLabels.includes("ci:" + chainType)),
chainTypes.filter(chainType => ciLabels.includes("ci:all") || ciLabels.includes("ci:" + chainType)),
extraChainTypes.filter(chainType => ciLabels.includes("ci:" + chainType))
);
// Chain type matrix we use in PRs to master branch
const ciChainTypes = labeledChainTypes.length > 0 ? labeledChainTypes : defaultChainTypes;
const matrix = { "chain-type": ${{ github.event_name == 'pull_request' && 'ciChainTypes' || 'allChainTypes' }} };
console.log(JSON.stringify(matrix));
')" >> $GITHUB_OUTPUT
build-and-cache:
name: Build and Cache deps
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: erlef/setup-beam@v1
with:
otp-version: ${{ env.OTP_VERSION }}
elixir-version: ${{ env.ELIXIR_VERSION }}
hexpm-mirrors: |
https://builds.hex.pm
https://cdn.jsdelivr.net/hex
- name: "ELIXIR_VERSION.lock"
run: echo "${ELIXIR_VERSION}" > ELIXIR_VERSION.lock
- name: "OTP_VERSION.lock"
run: echo "${OTP_VERSION}" > OTP_VERSION.lock
- name: Restore Mix Deps Cache
uses: actions/cache@v4
id: deps-cache
with:
path: |
deps
_build
key: ${{ runner.os }}-${{ env.ELIXIR_VERSION }}-${{ env.OTP_VERSION }}-${{ env.MIX_ENV }}-deps-mixlockhash-${{ hashFiles('mix.lock') }}
restore-keys: |
${{ runner.os }}-${{ env.ELIXIR_VERSION }}-${{ env.OTP_VERSION }}-${{ env.MIX_ENV }}-deps-mixlockhash-
- name: Conditionally build Mix deps cache
if: steps.deps-cache.outputs.cache-hit != 'true'
run: |
mix local.hex --force
mix local.rebar --force
mix deps.get
mix deps.compile --skip-umbrella-children
- name: Restore Explorer NPM Cache
uses: actions/cache@v4
id: explorer-npm-cache
with:
path: apps/explorer/node_modules
key: ${{ runner.os }}-${{ env.ELIXIR_VERSION }}-${{ env.OTP_VERSION }}-${{ env.MIX_ENV }}-explorer-npm-${{ hashFiles('apps/explorer/package-lock.json') }}
restore-keys: |
${{ runner.os }}-${{ env.ELIXIR_VERSION }}-${{ env.OTP_VERSION }}-${{ env.MIX_ENV }}-explorer-npm-
- name: Conditionally build Explorer NPM Cache
if: steps.explorer-npm-cache.outputs.cache-hit != 'true'
run: npm install
working-directory: apps/explorer
- name: Restore Blockscout Web NPM Cache
uses: actions/cache@v4
id: blockscoutweb-npm-cache
with:
path: apps/block_scout_web/assets/node_modules
key: ${{ runner.os }}-${{ env.ELIXIR_VERSION }}-${{ env.OTP_VERSION }}-${{ env.MIX_ENV }}-blockscoutweb-npm-${{ hashFiles('apps/block_scout_web/assets/package-lock.json') }}
restore-keys: |
${{ runner.os }}-${{ env.ELIXIR_VERSION }}-${{ env.OTP_VERSION }}-${{ env.MIX_ENV }}-blockscoutweb-npm-
- name: Conditionally build Blockscout Web NPM Cache
if: steps.blockscoutweb-npm-cache.outputs.cache-hit != 'true'
run: npm install
working-directory: apps/block_scout_web/assets
credo:
name: Credo
runs-on: ubuntu-latest
needs: build-and-cache
steps:
- uses: actions/checkout@v4
- uses: erlef/setup-beam@v1
with:
otp-version: ${{ env.OTP_VERSION }}
elixir-version: ${{ env.ELIXIR_VERSION }}
hexpm-mirrors: |
https://builds.hex.pm
https://cdn.jsdelivr.net/hex
- name: Restore Mix Deps Cache
uses: actions/cache/restore@v4
id: deps-cache
with:
path: |
deps
_build
key: ${{ runner.os }}-${{ env.ELIXIR_VERSION }}-${{ env.OTP_VERSION }}-${{ env.MIX_ENV }}-deps-mixlockhash-${{ hashFiles('mix.lock') }}
restore-keys: |
${{ runner.os }}-${{ env.ELIXIR_VERSION }}-${{ env.OTP_VERSION }}-${{ env.MIX_ENV }}-deps-mixlockhash-
- run: mix credo
check_formatted:
name: Code formatting checks
runs-on: ubuntu-latest
needs: build-and-cache
steps:
- uses: actions/checkout@v4
- uses: erlef/setup-beam@v1
with:
otp-version: ${{ env.OTP_VERSION }}
elixir-version: ${{ env.ELIXIR_VERSION }}
hexpm-mirrors: |
https://builds.hex.pm
https://cdn.jsdelivr.net/hex
- name: Restore Mix Deps Cache
uses: actions/cache/restore@v4
id: deps-cache
with:
path: |
deps
_build
key: ${{ runner.os }}-${{ env.ELIXIR_VERSION }}-${{ env.OTP_VERSION }}-${{ env.MIX_ENV }}-deps-mixlockhash-${{ hashFiles('mix.lock') }}
restore-keys: |
${{ runner.os }}-${{ env.ELIXIR_VERSION }}-${{ env.OTP_VERSION }}-${{ env.MIX_ENV }}-deps-mixlockhash-
- run: mix format --check-formatted
dialyzer:
strategy:
fail-fast: false
matrix: ${{ fromJson(needs.matrix-builder.outputs.matrix) }}
name: Dialyzer static analysis
runs-on: ubuntu-latest
needs:
- build-and-cache
- matrix-builder
steps:
- uses: actions/checkout@v4
- uses: erlef/setup-beam@v1
with:
otp-version: ${{ env.OTP_VERSION }}
elixir-version: ${{ env.ELIXIR_VERSION }}
hexpm-mirrors: |
https://builds.hex.pm
https://cdn.jsdelivr.net/hex
- name: Restore Mix Deps Cache
uses: actions/cache/restore@v4
id: deps-cache
with:
path: |
deps
_build
key: ${{ runner.os }}-${{ env.ELIXIR_VERSION }}-${{ env.OTP_VERSION }}-${{ env.MIX_ENV }}-deps-mixlockhash-${{ hashFiles('mix.lock') }}
restore-keys: |
${{ runner.os }}-${{ env.ELIXIR_VERSION }}-${{ env.OTP_VERSION }}-${{ env.MIX_ENV }}-deps-mixlockhash-
- name: Restore Dialyzer Cache
uses: actions/cache@v4
id: dialyzer-cache
with:
path: priv/plts
key: ${{ runner.os }}-${{ env.ELIXIR_VERSION }}-${{ env.OTP_VERSION }}-${{ env.MIX_ENV }}-${{ matrix.chain-type }}-dialyzer-mixlockhash-${{ hashFiles('mix.lock') }}
restore-keys: |
${{ runner.os }}-${{ env.ELIXIR_VERSION }}-${{ env.OTP_VERSION }}-${{ env.MIX_ENV }}-${{ matrix.chain-type }}-dialyzer-mixlockhash-
- name: Conditionally build Dialyzer Cache
if: steps.dialyzer-cache.output.cache-hit != 'true'
run: |
mkdir -p priv/plts
mix dialyzer --plt
env:
CHAIN_TYPE: ${{ matrix.chain-type != 'default' && matrix.chain-type || '' }}
- name: Run Dialyzer
run: mix dialyzer --halt-exit-status
env:
CHAIN_TYPE: ${{ matrix.chain-type != 'default' && matrix.chain-type || '' }}
gettext:
name: Missing translation keys check
runs-on: ubuntu-latest
needs: build-and-cache
steps:
- uses: actions/checkout@v4
- uses: erlef/setup-beam@v1
with:
otp-version: ${{ env.OTP_VERSION }}
elixir-version: ${{ env.ELIXIR_VERSION }}
hexpm-mirrors: |
https://builds.hex.pm
https://cdn.jsdelivr.net/hex
- name: Restore Mix Deps Cache
uses: actions/cache/restore@v4
id: deps-cache
with:
path: |
deps
_build
key: ${{ runner.os }}-${{ env.ELIXIR_VERSION }}-${{ env.OTP_VERSION }}-${{ env.MIX_ENV }}-deps-mixlockhash-${{ hashFiles('mix.lock') }}
restore-keys: |
${{ runner.os }}-${{ env.ELIXIR_VERSION }}-${{ env.OTP_VERSION }}-${{ env.MIX_ENV }}-deps-mixlockhash-
- run: |
mix gettext.extract --merge | tee stdout.txt
grep "Wrote priv/gettext/en/LC_MESSAGES/default.po (0 new messages, 0 removed, " stdout.txt
working-directory: "apps/block_scout_web"
sobelow:
name: Sobelow security analysis
runs-on: ubuntu-latest
needs: build-and-cache
steps:
- uses: actions/checkout@v4
- uses: erlef/setup-beam@v1
with:
otp-version: ${{ env.OTP_VERSION }}
elixir-version: ${{ env.ELIXIR_VERSION }}
hexpm-mirrors: |
https://builds.hex.pm
https://cdn.jsdelivr.net/hex
- name: Mix Deps Cache
uses: actions/cache/restore@v4
id: deps-cache
with:
path: |
deps
_build
key: ${{ runner.os }}-${{ env.ELIXIR_VERSION }}-${{ env.OTP_VERSION }}-${{ env.MIX_ENV }}-deps-mixlockhash-${{ hashFiles('mix.lock') }}
restore-keys: |
${{ runner.os }}-${{ env.ELIXIR_VERSION }}-${{ env.OTP_VERSION }}-${{ env.MIX_ENV }}-deps-mixlockhash-
- name: Scan explorer for vulnerabilities
run: mix sobelow --config
working-directory: "apps/explorer"
- name: Scan block_scout_web for vulnerabilities
run: mix sobelow --config
working-directory: "apps/block_scout_web"
cspell:
name: Check spelling
runs-on: ubuntu-latest
needs: build-and-cache
steps:
- uses: actions/checkout@v4
- uses: erlef/setup-beam@v1
with:
otp-version: ${{ env.OTP_VERSION }}
elixir-version: ${{ env.ELIXIR_VERSION }}
hexpm-mirrors: |
https://builds.hex.pm
https://cdn.jsdelivr.net/hex
- name: Mix Deps Cache
uses: actions/cache/restore@v4
id: deps-cache
with:
path: |
deps
_build
key: ${{ runner.os }}-${{ env.ELIXIR_VERSION }}-${{ env.OTP_VERSION }}-${{ env.MIX_ENV }}-deps-mixlockhash-${{ hashFiles('mix.lock') }}
restore-keys: |
${{ runner.os }}-${{ env.ELIXIR_VERSION }}-${{ env.OTP_VERSION }}-${{ env.MIX_ENV }}-deps-mixlockhash-
- name: Restore Explorer NPM Cache
uses: actions/cache@v4
id: explorer-npm-cache
with:
path: apps/explorer/node_modules
key: ${{ runner.os }}-${{ env.ELIXIR_VERSION }}-${{ env.OTP_VERSION }}-${{ env.MIX_ENV }}-explorer-npm-${{ hashFiles('apps/explorer/package-lock.json') }}
restore-keys: |
${{ runner.os }}-${{ env.ELIXIR_VERSION }}-${{ env.OTP_VERSION }}-${{ env.MIX_ENV }}-explorer-npm-
- name: Restore Blockscout Web NPM Cache
uses: actions/cache@v4
id: blockscoutweb-npm-cache
with:
path: apps/block_scout_web/assets/node_modules
key: ${{ runner.os }}-${{ env.ELIXIR_VERSION }}-${{ env.OTP_VERSION }}-${{ env.MIX_ENV }}-blockscoutweb-npm-${{ hashFiles('apps/block_scout_web/assets/package-lock.json') }}
restore-keys: |
${{ runner.os }}-${{ env.ELIXIR_VERSION }}-${{ env.OTP_VERSION }}-${{ env.MIX_ENV }}-blockscoutweb-npm-
- name: Run cspell
uses: streetsidesoftware/cspell-action@v6
with:
use_cspell_files: true
incremental_files_only: false
eslint:
name: ESLint
runs-on: ubuntu-latest
needs: build-and-cache
steps:
- uses: actions/checkout@v4
- uses: erlef/setup-beam@v1
with:
otp-version: ${{ env.OTP_VERSION }}
elixir-version: ${{ env.ELIXIR_VERSION }}
hexpm-mirrors: |
https://builds.hex.pm
https://cdn.jsdelivr.net/hex
- name: Mix Deps Cache
uses: actions/cache/restore@v4
id: deps-cache
with:
path: |
deps
_build
key: ${{ runner.os }}-${{ env.ELIXIR_VERSION }}-${{ env.OTP_VERSION }}-${{ env.MIX_ENV }}-deps-mixlockhash-${{ hashFiles('mix.lock') }}
restore-keys: |
${{ runner.os }}-${{ env.ELIXIR_VERSION }}-${{ env.OTP_VERSION }}-${{ env.MIX_ENV }}-deps-mixlockhash-
- name: Restore Explorer NPM Cache
uses: actions/cache@v4
id: explorer-npm-cache
with:
path: apps/explorer/node_modules
key: ${{ runner.os }}-${{ env.ELIXIR_VERSION }}-${{ env.OTP_VERSION }}-${{ env.MIX_ENV }}-explorer-npm-${{ hashFiles('apps/explorer/package-lock.json') }}
restore-keys: |
${{ runner.os }}-${{ env.ELIXIR_VERSION }}-${{ env.OTP_VERSION }}-${{ env.MIX_ENV }}-explorer-npm-
- name: Restore Blockscout Web NPM Cache
uses: actions/cache@v4
id: blockscoutweb-npm-cache
with:
path: apps/block_scout_web/assets/node_modules
key: ${{ runner.os }}-${{ env.ELIXIR_VERSION }}-${{ env.OTP_VERSION }}-${{ env.MIX_ENV }}-blockscoutweb-npm-${{ hashFiles('apps/block_scout_web/assets/package-lock.json') }}
restore-keys: |
${{ runner.os }}-${{ env.ELIXIR_VERSION }}-${{ env.OTP_VERSION }}-${{ env.MIX_ENV }}-blockscoutweb-npm-
- name: Build assets
run: node node_modules/webpack/bin/webpack.js --mode development
working-directory: "apps/block_scout_web/assets"
- run: ./node_modules/.bin/eslint --format=junit --output-file="test/eslint/junit.xml" js/**
working-directory: apps/block_scout_web/assets
jest:
name: JS Tests
runs-on: ubuntu-latest
needs: build-and-cache
steps:
- uses: actions/checkout@v4
- uses: erlef/setup-beam@v1
with:
otp-version: ${{ env.OTP_VERSION }}
elixir-version: ${{ env.ELIXIR_VERSION }}
hexpm-mirrors: |
https://builds.hex.pm
https://cdn.jsdelivr.net/hex
- name: Mix Deps Cache
uses: actions/cache/restore@v4
id: deps-cache
with:
path: |
deps
_build
key: ${{ runner.os }}-${{ env.ELIXIR_VERSION }}-${{ env.OTP_VERSION }}-${{ env.MIX_ENV }}-deps-mixlockhash-${{ hashFiles('mix.lock') }}
restore-keys: |
${{ runner.os }}-${{ env.ELIXIR_VERSION }}-${{ env.OTP_VERSION }}-${{ env.MIX_ENV }}-deps-mixlockhash-
- name: Restore Blockscout Web NPM Cache
uses: actions/cache@v4
id: blockscoutweb-npm-cache
with:
path: apps/block_scout_web/assets/node_modules
key: ${{ runner.os }}-${{ env.ELIXIR_VERSION }}-${{ env.OTP_VERSION }}-${{ env.MIX_ENV }}-blockscoutweb-npm-${{ hashFiles('apps/block_scout_web/assets/package-lock.json') }}
restore-keys: |
${{ runner.os }}-${{ env.ELIXIR_VERSION }}-${{ env.OTP_VERSION }}-${{ env.MIX_ENV }}-blockscoutweb-npm-
- name: Build assets
run: node node_modules/webpack/bin/webpack.js --mode development
working-directory: "apps/block_scout_web/assets"
- run: ./node_modules/.bin/jest
working-directory: apps/block_scout_web/assets
test_utils:
name: Utils Tests
runs-on: ubuntu-latest
needs: build-and-cache
steps:
- uses: actions/checkout@v4
- uses: erlef/setup-beam@v1
with:
otp-version: ${{ env.OTP_VERSION }}
elixir-version: ${{ env.ELIXIR_VERSION }}
hexpm-mirrors: |
https://builds.hex.pm
https://cdn.jsdelivr.net/hex
- name: Restore Mix Deps Cache
uses: actions/cache/restore@v4
id: deps-cache
with:
path: |
deps
_build
key: ${{ runner.os }}-${{ env.ELIXIR_VERSION }}-${{ env.OTP_VERSION }}-${{ env.MIX_ENV }}-deps-mixlockhash-${{ hashFiles('mix.lock') }}
restore-keys: |
${{ runner.os }}-${{ env.ELIXIR_VERSION }}-${{ env.OTP_VERSION }}-${{ env.MIX_ENV }}-deps-mixlockhash-
- working-directory: apps/utils
run: mix test
test_nethermind_mox_ethereum_jsonrpc:
strategy:
fail-fast: false
matrix: ${{ fromJson(needs.matrix-builder.outputs.matrix) }}
name: EthereumJSONRPC Tests
runs-on: ubuntu-latest
needs:
- build-and-cache
- matrix-builder
services:
postgres:
image: postgres:17
env:
# Match apps/explorer/config/test.exs config :explorer, Explorer.Repo, database
POSTGRES_DB: explorer_test
# match PGPASSWORD for elixir image above
POSTGRES_PASSWORD: postgres
# match PGUSER for elixir image above
POSTGRES_USER: postgres
# Set health checks to wait until postgres has started
options: >-
--health-cmd pg_isready
--health-interval 10s
--health-timeout 5s
--health-retries 5
ports:
# Maps tcp port 5432 on service container to the host
- 5432:5432
steps:
- uses: actions/checkout@v4
- uses: erlef/setup-beam@v1
with:
otp-version: ${{ env.OTP_VERSION }}
elixir-version: ${{ env.ELIXIR_VERSION }}
hexpm-mirrors: |
https://builds.hex.pm
https://cdn.jsdelivr.net/hex
- name: Mix Deps Cache
uses: actions/cache/restore@v4
id: deps-cache
with:
path: |
deps
_build
key: ${{ runner.os }}-${{ env.ELIXIR_VERSION }}-${{ env.OTP_VERSION }}-${{ env.MIX_ENV }}-deps-mixlockhash-${{ hashFiles('mix.lock') }}
restore-keys: |
${{ runner.os }}-${{ env.ELIXIR_VERSION }}-${{ env.OTP_VERSION }}-${{ env.MIX_ENV }}-deps-mixlockhash-
- run: ./bin/install_chrome_headless.sh
- name: mix test --exclude no_nethermind
run: |
cd apps/ethereum_jsonrpc
mix compile
mix test --no-start --exclude no_nethermind
env:
# match POSTGRES_PASSWORD for postgres image below
PGPASSWORD: postgres
# match POSTGRES_USER for postgres image below
PGUSER: postgres
ETHEREUM_JSONRPC_CASE: "EthereumJSONRPC.Case.Nethermind.Mox"
ETHEREUM_JSONRPC_WEB_SOCKET_CASE: "EthereumJSONRPC.WebSocket.Case.Mox"
CHAIN_TYPE: ${{ matrix.chain-type != 'default' && matrix.chain-type || '' }}
test_nethermind_mox_explorer:
strategy:
fail-fast: false
matrix: ${{ fromJson(needs.matrix-builder.outputs.matrix) }}
name: Explorer Tests
runs-on: ubuntu-latest
needs:
- build-and-cache
- matrix-builder
services:
postgres:
image: postgres:17
env:
# Match apps/explorer/config/test.exs config :explorer, Explorer.Repo, database
POSTGRES_DB: explorer_test
# match PGPASSWORD for elixir image above
POSTGRES_PASSWORD: postgres
# match PGUSER for elixir image above
POSTGRES_USER: postgres
# Set health checks to wait until postgres has started
options: >-
--health-cmd pg_isready
--health-interval 10s
--health-timeout 5s
--health-retries 5
ports:
# Maps tcp port 5432 on service container to the host
- 5432:5432
steps:
- uses: actions/checkout@v4
- uses: erlef/setup-beam@v1
with:
otp-version: ${{ env.OTP_VERSION }}
elixir-version: ${{ env.ELIXIR_VERSION }}
hexpm-mirrors: |
https://builds.hex.pm
https://cdn.jsdelivr.net/hex
- name: Mix Deps Cache
uses: actions/cache/restore@v4
id: deps-cache
with:
path: |
deps
_build
key: ${{ runner.os }}-${{ env.ELIXIR_VERSION }}-${{ env.OTP_VERSION }}-${{ env.MIX_ENV }}-deps-mixlockhash-${{ hashFiles('mix.lock') }}
restore-keys: |
${{ runner.os }}-${{ env.ELIXIR_VERSION }}-${{ env.OTP_VERSION }}-${{ env.MIX_ENV }}-deps-mixlockhash-
- name: Restore Explorer NPM Cache
uses: actions/cache@v4
id: explorer-npm-cache
with:
path: apps/explorer/node_modules
key: ${{ runner.os }}-${{ env.ELIXIR_VERSION }}-${{ env.OTP_VERSION }}-${{ env.MIX_ENV }}-explorer-npm-${{ hashFiles('apps/explorer/package-lock.json') }}
restore-keys: |
${{ runner.os }}-${{ env.ELIXIR_VERSION }}-${{ env.OTP_VERSION }}-${{ env.MIX_ENV }}-explorer-npm
- run: ./bin/install_chrome_headless.sh
- name: mix test --exclude no_nethermind
run: |
mix ecto.create --quiet
mix ecto.migrate
cd apps/explorer
mix compile
mix test --no-start --exclude no_nethermind
env:
# match POSTGRES_PASSWORD for postgres image below
PGPASSWORD: postgres
# match POSTGRES_USER for postgres image below
PGUSER: postgres
ETHEREUM_JSONRPC_CASE: "EthereumJSONRPC.Case.Nethermind.Mox"
ETHEREUM_JSONRPC_WEB_SOCKET_CASE: "EthereumJSONRPC.WebSocket.Case.Mox"
CHAIN_TYPE: ${{ matrix.chain-type != 'default' && matrix.chain-type || '' }}
WETH_TOKEN_TRANSFERS_FILTERING_ENABLED: "true"
test_nethermind_mox_indexer:
strategy:
fail-fast: false
matrix: ${{ fromJson(needs.matrix-builder.outputs.matrix) }}
name: Indexer Tests
runs-on: ubuntu-latest
needs:
- build-and-cache
- matrix-builder
services:
postgres:
image: postgres:17
env:
# Match apps/explorer/config/test.exs config :explorer, Explorer.Repo, database
POSTGRES_DB: explorer_test
# match PGPASSWORD for elixir image above
POSTGRES_PASSWORD: postgres
# match PGUSER for elixir image above
POSTGRES_USER: postgres
# Set health checks to wait until postgres has started
options: >-
--health-cmd pg_isready
--health-interval 10s
--health-timeout 5s
--health-retries 5
ports:
# Maps tcp port 5432 on service container to the host
- 5432:5432
steps:
- uses: actions/checkout@v4
- uses: erlef/setup-beam@v1
with:
otp-version: ${{ env.OTP_VERSION }}
elixir-version: ${{ env.ELIXIR_VERSION }}
hexpm-mirrors: |
https://builds.hex.pm
https://cdn.jsdelivr.net/hex
- name: Mix Deps Cache
uses: actions/cache/restore@v4
id: deps-cache
with:
path: |
deps
_build
key: ${{ runner.os }}-${{ env.ELIXIR_VERSION }}-${{ env.OTP_VERSION }}-${{ env.MIX_ENV }}-deps-mixlockhash-${{ hashFiles('mix.lock') }}
restore-keys: |
${{ runner.os }}-${{ env.ELIXIR_VERSION }}-${{ env.OTP_VERSION }}-${{ env.MIX_ENV }}-deps-mixlockhash-
- run: ./bin/install_chrome_headless.sh
- name: mix test --exclude no_nethermind
run: |
mix ecto.create --quiet
mix ecto.migrate
cd apps/indexer
mix compile
mix test --no-start --exclude no_nethermind
env:
# match POSTGRES_PASSWORD for postgres image below
PGPASSWORD: postgres
# match POSTGRES_USER for postgres image below
PGUSER: postgres
ETHEREUM_JSONRPC_CASE: "EthereumJSONRPC.Case.Nethermind.Mox"
ETHEREUM_JSONRPC_WEB_SOCKET_CASE: "EthereumJSONRPC.WebSocket.Case.Mox"
CHAIN_TYPE: ${{ matrix.chain-type != 'default' && matrix.chain-type || '' }}
WETH_TOKEN_TRANSFERS_FILTERING_ENABLED: "true"
test_nethermind_mox_block_scout_web:
strategy:
fail-fast: false
matrix: ${{ fromJson(needs.matrix-builder.outputs.matrix) }}
name: Blockscout Web Tests
runs-on: ubuntu-latest
needs:
- build-and-cache
- matrix-builder
services:
redis-db:
image: "redis:alpine"
ports:
- 6379:6379
postgres:
image: postgres:17
env:
# Match apps/explorer/config/test.exs config :explorer, Explorer.Repo, database
POSTGRES_DB: explorer_test
# match PGPASSWORD for elixir image above
POSTGRES_PASSWORD: postgres
# match PGUSER for elixir image above
POSTGRES_USER: postgres
# Set health checks to wait until postgres has started
options: >-
--health-cmd pg_isready
--health-interval 10s
--health-timeout 5s
--health-retries 5
ports:
# Maps tcp port 5432 on service container to the host
- 5432:5432
steps:
- uses: actions/checkout@v4
- uses: erlef/setup-beam@v1
with:
otp-version: ${{ env.OTP_VERSION }}
elixir-version: ${{ env.ELIXIR_VERSION }}
hexpm-mirrors: |
https://builds.hex.pm
https://cdn.jsdelivr.net/hex
- name: Mix Deps Cache
uses: actions/cache/restore@v4
id: deps-cache
with:
path: |
deps
_build
key: ${{ runner.os }}-${{ env.ELIXIR_VERSION }}-${{ env.OTP_VERSION }}-${{ env.MIX_ENV }}-deps-mixlockhash-${{ hashFiles('mix.lock') }}
restore-keys: |
${{ runner.os }}-${{ env.ELIXIR_VERSION }}-${{ env.OTP_VERSION }}-${{ env.MIX_ENV }}-deps-mixlockhash-
- name: Restore Explorer NPM Cache
uses: actions/cache@v4
id: explorer-npm-cache
with:
path: apps/explorer/node_modules
key: ${{ runner.os }}-${{ env.ELIXIR_VERSION }}-${{ env.OTP_VERSION }}-${{ env.MIX_ENV }}-explorer-npm-${{ hashFiles('apps/explorer/package-lock.json') }}
restore-keys: |
${{ runner.os }}-${{ env.ELIXIR_VERSION }}-${{ env.OTP_VERSION }}-${{ env.MIX_ENV }}-explorer-npm-
- name: Restore Blockscout Web NPM Cache
uses: actions/cache@v4
id: blockscoutweb-npm-cache
with:
path: apps/block_scout_web/assets/node_modules
key: ${{ runner.os }}-${{ env.ELIXIR_VERSION }}-${{ env.OTP_VERSION }}-${{ env.MIX_ENV }}-blockscoutweb-npm-${{ hashFiles('apps/block_scout_web/assets/package-lock.json') }}
restore-keys: |
${{ runner.os }}-${{ env.ELIXIR_VERSION }}-${{ env.OTP_VERSION }}-${{ env.MIX_ENV }}-blockscoutweb-npm-
- name: Build assets
run: node node_modules/webpack/bin/webpack.js --mode development
working-directory: "apps/block_scout_web/assets"
- run: ./bin/install_chrome_headless.sh
- name: mix test --exclude no_nethermind
run: |
mix ecto.create --quiet
mix ecto.migrate
cd apps/block_scout_web
mix compile
mix test --no-start --exclude no_nethermind
env:
# match POSTGRES_PASSWORD for postgres image below
PGPASSWORD: postgres
# match POSTGRES_USER for postgres image below
PGUSER: postgres
ETHEREUM_JSONRPC_CASE: "EthereumJSONRPC.Case.Nethermind.Mox"
ETHEREUM_JSONRPC_WEB_SOCKET_CASE: "EthereumJSONRPC.WebSocket.Case.Mox"
CHAIN_ID: "10200"
API_RATE_LIMIT_DISABLED: "true"
API_GRAPHQL_RATE_LIMIT_DISABLED: "true"
ADMIN_PANEL_ENABLED: "true"
ACCOUNT_ENABLED: "true"
ACCOUNT_REDIS_URL: "redis://localhost:6379"
SOURCIFY_INTEGRATION_ENABLED: "true"
CHAIN_TYPE: ${{ matrix.chain-type != 'default' && matrix.chain-type || '' }}
WETH_TOKEN_TRANSFERS_FILTERING_ENABLED: "true"

View File

@ -0,0 +1,62 @@
name: Pre-release for Arbitrum
on:
workflow_dispatch:
inputs:
number:
type: number
required: true
env:
OTP_VERSION: ${{ vars.OTP_VERSION }}
ELIXIR_VERSION: ${{ vars.ELIXIR_VERSION }}
jobs:
push_to_registry:
name: Push Docker image to GitHub Container Registry
runs-on: ubuntu-latest
env:
RELEASE_VERSION: 8.0.2
steps:
- uses: actions/checkout@v4
- name: Setup repo
uses: ./.github/actions/setup-repo
id: setup
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
docker-remote-multi-platform: true
docker-arm-host: ${{ secrets.ARM_RUNNER_HOSTNAME }}
docker-arm-host-key: ${{ secrets.ARM_RUNNER_KEY }}
- name: Build and push Docker image for Arbitrum (indexer + API)
uses: docker/build-push-action@v6
with:
context: .
file: ./docker/Dockerfile
push: true
tags: ghcr.io/blockscout/blockscout-arbitrum:${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }}
labels: ${{ steps.setup.outputs.docker-labels }}
platforms: |
linux/amd64
linux/arm64/v8
build-args: |
BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }}
RELEASE_VERSION=${{ env.RELEASE_VERSION }}
CHAIN_TYPE=arbitrum
- name: Build and push Docker image for Arbitrum (indexer)
uses: docker/build-push-action@v6
with:
context: .
file: ./docker/Dockerfile
push: true
tags: ghcr.io/blockscout/blockscout-arbitrum:${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }}-indexer
labels: ${{ steps.setup.outputs.docker-labels }}
platforms: |
linux/amd64
linux/arm64/v8
build-args: |
DISABLE_API=true
BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }}
RELEASE_VERSION=${{ env.RELEASE_VERSION }}
CHAIN_TYPE=arbitrum

View File

@ -0,0 +1,62 @@
name: Pre-release for Berachain
on:
workflow_dispatch:
inputs:
number:
type: number
required: true
env:
OTP_VERSION: ${{ vars.OTP_VERSION }}
ELIXIR_VERSION: ${{ vars.ELIXIR_VERSION }}
jobs:
push_to_registry:
name: Push Docker image to GitHub Container Registry
runs-on: ubuntu-latest
env:
RELEASE_VERSION: 8.0.2
steps:
- uses: actions/checkout@v4
- name: Setup repo
uses: ./.github/actions/setup-repo
id: setup
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
docker-remote-multi-platform: true
docker-arm-host: ${{ secrets.ARM_RUNNER_HOSTNAME }}
docker-arm-host-key: ${{ secrets.ARM_RUNNER_KEY }}
- name: Build and push Docker image for Ethereum (indexer + API)
uses: docker/build-push-action@v6
with:
context: .
file: ./docker/Dockerfile
push: true
tags: ghcr.io/blockscout/blockscout-berachain:${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }}
labels: ${{ steps.setup.outputs.docker-labels }}
platforms: |
linux/amd64
linux/arm64/v8
build-args: |
BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }}
RELEASE_VERSION=${{ env.RELEASE_VERSION }}
CHAIN_TYPE=berachain
- name: Build and push Docker image for Ethereum (indexer)
uses: docker/build-push-action@v6
with:
context: .
file: ./docker/Dockerfile
push: true
tags: ghcr.io/blockscout/blockscout-berachain:${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }}-indexer
labels: ${{ steps.setup.outputs.docker-labels }}
platforms: |
linux/amd64
linux/arm64/v8
build-args: |
DISABLE_API=true
BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }}
RELEASE_VERSION=${{ env.RELEASE_VERSION }}
CHAIN_TYPE=berachain

View File

@ -0,0 +1,63 @@
name: Pre-release for CELO
on:
workflow_dispatch:
inputs:
number:
type: number
required: true
env:
OTP_VERSION: ${{ vars.OTP_VERSION }}
ELIXIR_VERSION: ${{ vars.ELIXIR_VERSION }}
jobs:
push_to_registry:
name: Push Docker image to GitHub Container Registry
runs-on: ubuntu-latest
env:
RELEASE_VERSION: 8.0.2
API_GRAPHQL_MAX_COMPLEXITY: 10400
steps:
- uses: actions/checkout@v4
- name: Setup repo
uses: ./.github/actions/setup-repo
id: setup
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
docker-remote-multi-platform: true
docker-arm-host: ${{ secrets.ARM_RUNNER_HOSTNAME }}
docker-arm-host-key: ${{ secrets.ARM_RUNNER_KEY }}
- name: Build and push Docker image for CELO (indexer + API)
uses: docker/build-push-action@v6
with:
context: .
file: ./docker/Dockerfile
push: true
tags: ghcr.io/blockscout/blockscout-celo:${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }}
labels: ${{ steps.setup.outputs.docker-labels }}
platforms: |
linux/amd64
linux/arm64/v8
build-args: |
BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }}
RELEASE_VERSION=${{ env.RELEASE_VERSION }}
CHAIN_TYPE=celo
- name: Build and push Docker image for CELO (indexer)
uses: docker/build-push-action@v6
with:
context: .
file: ./docker/Dockerfile
push: true
tags: ghcr.io/blockscout/blockscout-celo:${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }}-indexer
labels: ${{ steps.setup.outputs.docker-labels }}
platforms: |
linux/amd64
linux/arm64/v8
build-args: |
DISABLE_API=true
BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }}
RELEASE_VERSION=${{ env.RELEASE_VERSION }}
CHAIN_TYPE=celo

View File

@ -0,0 +1,62 @@
name: Pre-release for Ethereum
on:
workflow_dispatch:
inputs:
number:
type: number
required: true
env:
OTP_VERSION: ${{ vars.OTP_VERSION }}
ELIXIR_VERSION: ${{ vars.ELIXIR_VERSION }}
jobs:
push_to_registry:
name: Push Docker image to GitHub Container Registry
runs-on: ubuntu-latest
env:
RELEASE_VERSION: 8.0.2
steps:
- uses: actions/checkout@v4
- name: Setup repo
uses: ./.github/actions/setup-repo
id: setup
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
docker-remote-multi-platform: true
docker-arm-host: ${{ secrets.ARM_RUNNER_HOSTNAME }}
docker-arm-host-key: ${{ secrets.ARM_RUNNER_KEY }}
- name: Build and push Docker image for Ethereum (indexer + API)
uses: docker/build-push-action@v6
with:
context: .
file: ./docker/Dockerfile
push: true
tags: ghcr.io/blockscout/blockscout-ethereum:${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }}
labels: ${{ steps.setup.outputs.docker-labels }}
platforms: |
linux/amd64
linux/arm64/v8
build-args: |
BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }}
RELEASE_VERSION=${{ env.RELEASE_VERSION }}
CHAIN_TYPE=ethereum
- name: Build and push Docker image for Ethereum (indexer)
uses: docker/build-push-action@v6
with:
context: .
file: ./docker/Dockerfile
push: true
tags: ghcr.io/blockscout/blockscout-ethereum:${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }}-indexer
labels: ${{ steps.setup.outputs.docker-labels }}
platforms: |
linux/amd64
linux/arm64/v8
build-args: |
DISABLE_API=true
BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }}
RELEASE_VERSION=${{ env.RELEASE_VERSION }}
CHAIN_TYPE=ethereum

View File

@ -0,0 +1,62 @@
name: Pre-release for Filecoin
on:
workflow_dispatch:
inputs:
number:
type: number
required: true
env:
OTP_VERSION: ${{ vars.OTP_VERSION }}
ELIXIR_VERSION: ${{ vars.ELIXIR_VERSION }}
jobs:
push_to_registry:
name: Push Docker image to GitHub Container Registry
runs-on: ubuntu-latest
env:
RELEASE_VERSION: 8.0.2
steps:
- uses: actions/checkout@v4
- name: Setup repo
uses: ./.github/actions/setup-repo
id: setup
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
docker-remote-multi-platform: true
docker-arm-host: ${{ secrets.ARM_RUNNER_HOSTNAME }}
docker-arm-host-key: ${{ secrets.ARM_RUNNER_KEY }}
- name: Build and push Docker image for Filecoin (indexer + API)
uses: docker/build-push-action@v6
with:
context: .
file: ./docker/Dockerfile
push: true
tags: ghcr.io/blockscout/blockscout-filecoin:latest, ghcr.io/blockscout/blockscout-filecoin:${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }}
labels: ${{ steps.setup.outputs.docker-labels }}
platforms: |
linux/amd64
linux/arm64/v8
build-args: |
BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }}
RELEASE_VERSION=${{ env.RELEASE_VERSION }}
CHAIN_TYPE=filecoin
- name: Build and push Docker image for Filecoin (indexer)
uses: docker/build-push-action@v6
with:
context: .
file: ./docker/Dockerfile
push: true
tags: ghcr.io/blockscout/blockscout-filecoin:${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }}-indexer
labels: ${{ steps.setup.outputs.docker-labels }}
platforms: |
linux/amd64
linux/arm64/v8
build-args: |
DISABLE_API=true
BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }}
RELEASE_VERSION=${{ env.RELEASE_VERSION }}
CHAIN_TYPE=filecoin

View File

@ -0,0 +1,62 @@
name: Pre-release for Fuse
on:
workflow_dispatch:
inputs:
number:
type: number
required: true
env:
OTP_VERSION: ${{ vars.OTP_VERSION }}
ELIXIR_VERSION: ${{ vars.ELIXIR_VERSION }}
jobs:
push_to_registry:
name: Push Docker image to GitHub Container Registry
runs-on: ubuntu-latest
env:
RELEASE_VERSION: 8.0.2
steps:
- uses: actions/checkout@v4
- name: Setup repo
uses: ./.github/actions/setup-repo
id: setup
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
docker-remote-multi-platform: true
docker-arm-host: ${{ secrets.ARM_RUNNER_HOSTNAME }}
docker-arm-host-key: ${{ secrets.ARM_RUNNER_KEY }}
- name: Build and push Docker image for Fuse (indexer + API)
uses: docker/build-push-action@v6
with:
context: .
file: ./docker/Dockerfile
push: true
tags: ghcr.io/blockscout/blockscout-fuse:${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }}
labels: ${{ steps.setup.outputs.docker-labels }}
platforms: |
linux/amd64
linux/arm64/v8
build-args: |
BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }}
RELEASE_VERSION=${{ env.RELEASE_VERSION }}
BRIDGED_TOKENS_ENABLED=true
- name: Build and push Docker image for Fuse (indexer)
uses: docker/build-push-action@v6
with:
context: .
file: ./docker/Dockerfile
push: true
tags: ghcr.io/blockscout/blockscout-fuse:${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }}-indexer
labels: ${{ steps.setup.outputs.docker-labels }}
platforms: |
linux/amd64
linux/arm64/v8
build-args: |
DISABLE_API=true
BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }}
RELEASE_VERSION=${{ env.RELEASE_VERSION }}
BRIDGED_TOKENS_ENABLED=true

View File

@ -0,0 +1,62 @@
name: Pre-release for Optimism
on:
workflow_dispatch:
inputs:
number:
type: number
required: true
env:
OTP_VERSION: ${{ vars.OTP_VERSION }}
ELIXIR_VERSION: ${{ vars.ELIXIR_VERSION }}
jobs:
push_to_registry:
name: Push Docker image to GitHub Container Registry
runs-on: ubuntu-latest
env:
RELEASE_VERSION: 8.0.2
steps:
- uses: actions/checkout@v4
- name: Setup repo
uses: ./.github/actions/setup-repo
id: setup
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
docker-remote-multi-platform: true
docker-arm-host: ${{ secrets.ARM_RUNNER_HOSTNAME }}
docker-arm-host-key: ${{ secrets.ARM_RUNNER_KEY }}
- name: Build and push Docker image for Optimism (indexer + API)
uses: docker/build-push-action@v6
with:
context: .
file: ./docker/Dockerfile
push: true
tags: ghcr.io/blockscout/blockscout-optimism:${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }}
labels: ${{ steps.setup.outputs.docker-labels }}
platforms: |
linux/amd64
linux/arm64/v8
build-args: |
BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }}
RELEASE_VERSION=${{ env.RELEASE_VERSION }}
CHAIN_TYPE=optimism
- name: Build and push Docker image for Optimism (indexer)
uses: docker/build-push-action@v6
with:
context: .
file: ./docker/Dockerfile
push: true
tags: ghcr.io/blockscout/blockscout-optimism:${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }}-indexer
labels: ${{ steps.setup.outputs.docker-labels }}
platforms: |
linux/amd64
linux/arm64/v8
build-args: |
DISABLE_API=true
BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }}
RELEASE_VERSION=${{ env.RELEASE_VERSION }}
CHAIN_TYPE=optimism

View File

@ -0,0 +1,62 @@
name: Pre-release for Polygon ZkEVM
on:
workflow_dispatch:
inputs:
number:
type: number
required: true
env:
OTP_VERSION: ${{ vars.OTP_VERSION }}
ELIXIR_VERSION: ${{ vars.ELIXIR_VERSION }}
jobs:
push_to_registry:
name: Push Docker image to GitHub Container Registry
runs-on: ubuntu-latest
env:
RELEASE_VERSION: 8.0.2
steps:
- uses: actions/checkout@v4
- name: Setup repo
uses: ./.github/actions/setup-repo
id: setup
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
docker-remote-multi-platform: true
docker-arm-host: ${{ secrets.ARM_RUNNER_HOSTNAME }}
docker-arm-host-key: ${{ secrets.ARM_RUNNER_KEY }}
- name: Build and push Docker image for Polygon ZkEVM (indexer + API)
uses: docker/build-push-action@v6
with:
context: .
file: ./docker/Dockerfile
push: true
tags: ghcr.io/blockscout/blockscout-zkevm:${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }}
labels: ${{ steps.setup.outputs.docker-labels }}
platforms: |
linux/amd64
linux/arm64/v8
build-args: |
BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }}
RELEASE_VERSION=${{ env.RELEASE_VERSION }}
CHAIN_TYPE=polygon_zkevm
- name: Build and push Docker image for Polygon ZkEVM (indexer)
uses: docker/build-push-action@v6
with:
context: .
file: ./docker/Dockerfile
push: true
tags: ghcr.io/blockscout/blockscout-zkevm:${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }}-indexer
labels: ${{ steps.setup.outputs.docker-labels }}
platforms: |
linux/amd64
linux/arm64/v8
build-args: |
DISABLE_API=true
BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }}
RELEASE_VERSION=${{ env.RELEASE_VERSION }}
CHAIN_TYPE=polygon_zkevm

View File

@ -0,0 +1,62 @@
name: Pre-release for Rootstock
on:
workflow_dispatch:
inputs:
number:
type: number
required: true
env:
OTP_VERSION: ${{ vars.OTP_VERSION }}
ELIXIR_VERSION: ${{ vars.ELIXIR_VERSION }}
jobs:
push_to_registry:
name: Push Docker image to GitHub Container Registry
runs-on: ubuntu-latest
env:
RELEASE_VERSION: 8.0.2
steps:
- uses: actions/checkout@v4
- name: Setup repo
uses: ./.github/actions/setup-repo
id: setup
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
docker-remote-multi-platform: true
docker-arm-host: ${{ secrets.ARM_RUNNER_HOSTNAME }}
docker-arm-host-key: ${{ secrets.ARM_RUNNER_KEY }}
- name: Build and push Docker image for Rootstock (indexer + API)
uses: docker/build-push-action@v6
with:
context: .
file: ./docker/Dockerfile
push: true
tags: ghcr.io/blockscout/blockscout-rsk:${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }}
labels: ${{ steps.setup.outputs.docker-labels }}
platforms: |
linux/amd64
linux/arm64/v8
build-args: |
BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }}
RELEASE_VERSION=${{ env.RELEASE_VERSION }}
CHAIN_TYPE=rsk
- name: Build and push Docker image for Rootstock (indexer)
uses: docker/build-push-action@v6
with:
context: .
file: ./docker/Dockerfile
push: true
tags: ghcr.io/blockscout/blockscout-rsk:${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }}-indexer
labels: ${{ steps.setup.outputs.docker-labels }}
platforms: |
linux/amd64
linux/arm64/v8
build-args: |
DISABLE_API=true
BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }}
RELEASE_VERSION=${{ env.RELEASE_VERSION }}
CHAIN_TYPE=rsk

View File

@ -0,0 +1,62 @@
name: Pre-release for Scroll
on:
workflow_dispatch:
inputs:
number:
type: number
required: true
env:
OTP_VERSION: ${{ vars.OTP_VERSION }}
ELIXIR_VERSION: ${{ vars.ELIXIR_VERSION }}
jobs:
push_to_registry:
name: Push Docker image to GitHub Container Registry
runs-on: ubuntu-latest
env:
RELEASE_VERSION: 8.0.2
steps:
- uses: actions/checkout@v4
- name: Setup repo
uses: ./.github/actions/setup-repo
id: setup
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
docker-remote-multi-platform: true
docker-arm-host: ${{ secrets.ARM_RUNNER_HOSTNAME }}
docker-arm-host-key: ${{ secrets.ARM_RUNNER_KEY }}
- name: Build and push Docker image for Scroll (indexer + API)
uses: docker/build-push-action@v6
with:
context: .
file: ./docker/Dockerfile
push: true
tags: ghcr.io/blockscout/blockscout-scroll:${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }}
labels: ${{ steps.setup.outputs.docker-labels }}
platforms: |
linux/amd64
linux/arm64/v8
build-args: |
BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }}
RELEASE_VERSION=${{ env.RELEASE_VERSION }}
CHAIN_TYPE=scroll
- name: Build and push Docker image for Scroll (indexer)
uses: docker/build-push-action@v6
with:
context: .
file: ./docker/Dockerfile
push: true
tags: ghcr.io/blockscout/blockscout-scroll:${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }}-indexer
labels: ${{ steps.setup.outputs.docker-labels }}
platforms: |
linux/amd64
linux/arm64/v8
build-args: |
DISABLE_API=true
BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }}
RELEASE_VERSION=${{ env.RELEASE_VERSION }}
CHAIN_TYPE=scroll

View File

@ -0,0 +1,62 @@
name: Pre-release for Zilliqa
on:
workflow_dispatch:
inputs:
number:
type: number
required: true
env:
OTP_VERSION: ${{ vars.OTP_VERSION }}
ELIXIR_VERSION: ${{ vars.ELIXIR_VERSION }}
jobs:
push_to_registry:
name: Push Docker image to GitHub Container Registry
runs-on: ubuntu-latest
env:
RELEASE_VERSION: 8.0.2
steps:
- uses: actions/checkout@v4
- name: Setup repo
uses: ./.github/actions/setup-repo
id: setup
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
docker-remote-multi-platform: true
docker-arm-host: ${{ secrets.ARM_RUNNER_HOSTNAME }}
docker-arm-host-key: ${{ secrets.ARM_RUNNER_KEY }}
- name: Build and push Docker image (indexer + API)
uses: docker/build-push-action@v6
with:
context: .
file: ./docker/Dockerfile
push: true
tags: ghcr.io/blockscout/blockscout-zilliqa:${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }}
labels: ${{ steps.setup.outputs.docker-labels }}
platforms: |
linux/amd64
linux/arm64/v8
build-args: |
BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }}
RELEASE_VERSION=${{ env.RELEASE_VERSION }}
CHAIN_TYPE=zilliqa
- name: Build and push Docker image (indexer)
uses: docker/build-push-action@v6
with:
context: .
file: ./docker/Dockerfile
push: true
tags: ghcr.io/blockscout/blockscout-zilliqa:${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }}-indexer
labels: ${{ steps.setup.outputs.docker-labels }}
platforms: |
linux/amd64
linux/arm64/v8
build-args: |
DISABLE_API=true
BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }}
RELEASE_VERSION=${{ env.RELEASE_VERSION }}
CHAIN_TYPE=zilliqa

View File

@ -0,0 +1,62 @@
name: Pre-release for ZkSync
on:
workflow_dispatch:
inputs:
number:
type: number
required: true
env:
OTP_VERSION: ${{ vars.OTP_VERSION }}
ELIXIR_VERSION: ${{ vars.ELIXIR_VERSION }}
jobs:
push_to_registry:
name: Push Docker image to GitHub Container Registry
runs-on: ubuntu-latest
env:
RELEASE_VERSION: 8.0.2
steps:
- uses: actions/checkout@v4
- name: Setup repo
uses: ./.github/actions/setup-repo
id: setup
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
docker-remote-multi-platform: true
docker-arm-host: ${{ secrets.ARM_RUNNER_HOSTNAME }}
docker-arm-host-key: ${{ secrets.ARM_RUNNER_KEY }}
- name: Build and push Docker image for ZkSync (indexer + API)
uses: docker/build-push-action@v6
with:
context: .
file: ./docker/Dockerfile
push: true
tags: ghcr.io/blockscout/blockscout-zksync:${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }}
labels: ${{ steps.setup.outputs.docker-labels }}
platforms: |
linux/amd64
linux/arm64/v8
build-args: |
BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }}
RELEASE_VERSION=${{ env.RELEASE_VERSION }}
CHAIN_TYPE=zksync
- name: Build and push Docker image for ZkSync (indexer)
uses: docker/build-push-action@v6
with:
context: .
file: ./docker/Dockerfile
push: true
tags: ghcr.io/blockscout/blockscout-zksync:${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }}-indexer
labels: ${{ steps.setup.outputs.docker-labels }}
platforms: |
linux/amd64
linux/arm64/v8
build-args: |
DISABLE_API=true
BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }}
RELEASE_VERSION=${{ env.RELEASE_VERSION }}
CHAIN_TYPE=zksync

View File

@ -0,0 +1,74 @@
name: Pre-release
on:
workflow_dispatch:
inputs:
number:
type: number
required: true
env:
OTP_VERSION: ${{ vars.OTP_VERSION }}
ELIXIR_VERSION: ${{ vars.ELIXIR_VERSION }}
jobs:
push_to_registry:
name: Push Docker image to GitHub Container Registry
runs-on: ubuntu-latest
env:
RELEASE_VERSION: 8.0.2
steps:
- uses: actions/checkout@v4
- name: Setup repo
uses: ./.github/actions/setup-repo
id: setup
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
docker-remote-multi-platform: true
docker-arm-host: ${{ secrets.ARM_RUNNER_HOSTNAME }}
docker-arm-host-key: ${{ secrets.ARM_RUNNER_KEY }}
- name: Build & Push Core Docker image (indexer + API)
uses: docker/build-push-action@v6
with:
context: .
file: ./docker/Dockerfile
push: true
cache-from: type=registry,ref=ghcr.io/blockscout/blockscout:buildcache
cache-to: type=registry,ref=ghcr.io/blockscout/blockscout:buildcache,mode=max
tags: ghcr.io/blockscout/blockscout:master, ghcr.io/blockscout/blockscout:${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }}
labels: ${{ steps.setup.outputs.docker-labels }}
platforms: |
linux/amd64
linux/arm64/v8
build-args: |
DECODE_NOT_A_CONTRACT_CALLS=false
MIXPANEL_URL=
MIXPANEL_TOKEN=
AMPLITUDE_URL=
AMPLITUDE_API_KEY=
BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }}
RELEASE_VERSION=${{ env.RELEASE_VERSION }}
- name: Build & Push Core Docker image (indexer)
uses: docker/build-push-action@v6
with:
context: .
file: ./docker/Dockerfile
push: true
cache-from: type=registry,ref=ghcr.io/blockscout/blockscout:buildcache
cache-to: type=registry,ref=ghcr.io/blockscout/blockscout:buildcache,mode=max
tags: ghcr.io/blockscout/blockscout:${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }}-indexer
labels: ${{ steps.setup.outputs.docker-labels }}
platforms: |
linux/amd64
linux/arm64/v8
build-args: |
DISABLE_API=true
DECODE_NOT_A_CONTRACT_CALLS=false
MIXPANEL_URL=
MIXPANEL_TOKEN=
AMPLITUDE_URL=
AMPLITUDE_API_KEY=
BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}-alpha.${{ inputs.number }}
RELEASE_VERSION=${{ env.RELEASE_VERSION }}

View File

@ -0,0 +1,54 @@
name: Publish Custom Base Docker image (master + some commit(s))
on:
workflow_dispatch:
push:
branches:
- custom-build
jobs:
push_to_registry:
name: Push Docker image to GitHub Container Registry
runs-on: ubuntu-latest
env:
RELEASE_VERSION: 8.0.2
steps:
- uses: actions/checkout@v4
- name: Setup repo
uses: ./.github/actions/setup-repo
id: setup
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
docker-remote-multi-platform: true
docker-arm-host: ${{ secrets.ARM_RUNNER_HOSTNAME }}
docker-arm-host-key: ${{ secrets.ARM_RUNNER_KEY }}
- name: Build and push Docker image (indexer + API)
uses: docker/build-push-action@v6
with:
context: .
file: ./docker/Dockerfile
push: true
tags: ghcr.io/blockscout/blockscout:${{ env.RELEASE_VERSION }}-postrelease-custom-build-${{ env.SHORT_SHA }}
labels: ${{ steps.setup.outputs.docker-labels }}
platforms: |
linux/amd64
linux/arm64/v8
build-args: |
BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}.+commit.${{ env.SHORT_SHA }}
RELEASE_VERSION=${{ env.RELEASE_VERSION }}
- name: Build and push Docker image (indexer)
uses: docker/build-push-action@v6
with:
context: .
file: ./docker/Dockerfile
push: true
tags: ghcr.io/blockscout/blockscout:${{ env.RELEASE_VERSION }}-postrelease-custom-build-${{ env.SHORT_SHA }}-indexer
labels: ${{ steps.setup.outputs.docker-labels }}
platforms: |
linux/amd64
linux/arm64/v8
build-args: |
DISABLE_API=true
BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}.+commit.${{ env.SHORT_SHA }}
RELEASE_VERSION=${{ env.RELEASE_VERSION }}

View File

@ -0,0 +1,116 @@
name: Publish Docker image on every push to master branch
on:
push:
branches:
- master
paths-ignore:
- 'CHANGELOG.md'
- '**/README.md'
- 'docker-compose/*'
env:
OTP_VERSION: ${{ vars.OTP_VERSION }}
ELIXIR_VERSION: ${{ vars.ELIXIR_VERSION }}
RELEASE_VERSION: 8.0.2
jobs:
push_to_registry:
name: Push Docker image to GitHub Container Registry
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Setup repo
uses: ./.github/actions/setup-repo
id: setup
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
docker-remote-multi-platform: true
docker-arm-host: ${{ secrets.ARM_RUNNER_HOSTNAME }}
docker-arm-host-key: ${{ secrets.ARM_RUNNER_KEY }}
- name: Build and push Docker image (indexer + API)
uses: docker/build-push-action@v6
with:
context: .
file: ./docker/Dockerfile
push: true
cache-from: type=registry,ref=ghcr.io/blockscout/blockscout:buildcache
cache-to: type=registry,ref=ghcr.io/blockscout/blockscout:buildcache,mode=max
tags: ghcr.io/blockscout/blockscout:master, ghcr.io/blockscout/blockscout:${{ env.RELEASE_VERSION }}.commit.${{ env.SHORT_SHA }}
labels: ${{ steps.setup.outputs.docker-labels }}
platforms: |
linux/amd64
linux/arm64/v8
build-args: |
DECODE_NOT_A_CONTRACT_CALLS=false
MIXPANEL_URL=
MIXPANEL_TOKEN=
AMPLITUDE_URL=
AMPLITUDE_API_KEY=
BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}.+commit.${{ env.SHORT_SHA }}
RELEASE_VERSION=${{ env.RELEASE_VERSION }}
- name: Build and push Docker image (indexer)
uses: docker/build-push-action@v6
with:
context: .
file: ./docker/Dockerfile
push: true
tags: ghcr.io/blockscout/blockscout:${{ env.RELEASE_VERSION }}.commit.${{ env.SHORT_SHA }}-indexer
labels: ${{ steps.setup.outputs.docker-labels }}
platforms: |
linux/amd64
linux/arm64/v8
build-args: |
DISABLE_API=true
DECODE_NOT_A_CONTRACT_CALLS=false
MIXPANEL_URL=
MIXPANEL_TOKEN=
AMPLITUDE_URL=
AMPLITUDE_API_KEY=
BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}.+commit.${{ env.SHORT_SHA }}
RELEASE_VERSION=${{ env.RELEASE_VERSION }}
- name: Build and push Docker image for frontend
uses: docker/build-push-action@v6
with:
context: .
file: ./docker/Dockerfile
push: true
cache-from: type=registry,ref=ghcr.io/blockscout/blockscout:buildcache
tags: ghcr.io/blockscout/blockscout:frontend-main
labels: ${{ steps.setup.outputs.docker-labels }}
platforms: |
linux/amd64
linux/arm64/v8
build-args: |
SESSION_COOKIE_DOMAIN=k8s-dev.blockscout.com
BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}.+commit.${{ env.SHORT_SHA }}
RELEASE_VERSION=${{ env.RELEASE_VERSION }}
deploy_e2e:
needs: push_to_registry
runs-on: ubuntu-latest
permissions: write-all
steps:
- name: Get Vault credentials
id: retrieve-vault-secrets
uses: hashicorp/vault-action@v2.4.1
with:
url: https://vault.k8s.blockscout.com
role: ci-dev
path: github-jwt
method: jwt
tlsSkipVerify: false
exportToken: true
secrets: |
ci/data/dev/github token | WORKFLOW_TRIGGER_TOKEN ;
- name: Trigger deploy
uses: convictional/trigger-workflow-and-wait@v1.6.1
with:
owner: blockscout
repo: deployment-values
github_token: ${{env.WORKFLOW_TRIGGER_TOKEN}}
workflow_file_name: deploy_blockscout.yaml
ref: main
wait_interval: 30
client_payload: '{ "instance": "dev", "globalEnv": "e2e"}'

View File

@ -0,0 +1,57 @@
name: Arbitrum Publish Docker image
on:
workflow_dispatch:
push:
branches:
- production-arbitrum
jobs:
push_to_registry:
name: Push Docker image to GitHub Container Registry
runs-on: ubuntu-latest
env:
RELEASE_VERSION: 8.0.2
DOCKER_CHAIN_NAME: arbitrum
steps:
- uses: actions/checkout@v4
- name: Setup repo
uses: ./.github/actions/setup-repo
id: setup
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
docker-remote-multi-platform: true
docker-arm-host: ${{ secrets.ARM_RUNNER_HOSTNAME }}
docker-arm-host-key: ${{ secrets.ARM_RUNNER_KEY }}
- name: Build and push Docker image (indexer + API)
uses: docker/build-push-action@v6
with:
context: .
file: ./docker/Dockerfile
push: true
tags: ghcr.io/blockscout/blockscout-${{ env.DOCKER_CHAIN_NAME }}:${{ env.RELEASE_VERSION }}-postrelease-${{ env.SHORT_SHA }}
labels: ${{ steps.setup.outputs.docker-labels }}
platforms: |
linux/amd64
linux/arm64/v8
build-args: |
BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}.+commit.${{ env.SHORT_SHA }}
RELEASE_VERSION=${{ env.RELEASE_VERSION }}
CHAIN_TYPE=arbitrum
- name: Build and push Docker image (indexer)
uses: docker/build-push-action@v6
with:
context: .
file: ./docker/Dockerfile
push: true
tags: ghcr.io/blockscout/blockscout-${{ env.DOCKER_CHAIN_NAME }}:${{ env.RELEASE_VERSION }}-postrelease-${{ env.SHORT_SHA }}-indexer
labels: ${{ steps.setup.outputs.docker-labels }}
platforms: |
linux/amd64
linux/arm64/v8
build-args: |
DISABLE_API=true
BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}.+commit.${{ env.SHORT_SHA }}
RELEASE_VERSION=${{ env.RELEASE_VERSION }}
CHAIN_TYPE=arbitrum

View File

@ -0,0 +1,57 @@
name: Berachain Publish Docker image
on:
workflow_dispatch:
push:
branches:
- production-berachain
jobs:
push_to_registry:
name: Push Docker image to GitHub Container Registry
runs-on: ubuntu-latest
env:
RELEASE_VERSION: 8.0.2
DOCKER_CHAIN_NAME: berachain
steps:
- uses: actions/checkout@v4
- name: Setup repo
uses: ./.github/actions/setup-repo
id: setup
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
docker-remote-multi-platform: true
docker-arm-host: ${{ secrets.ARM_RUNNER_HOSTNAME }}
docker-arm-host-key: ${{ secrets.ARM_RUNNER_KEY }}
- name: Build and push Docker image (indexer + API)
uses: docker/build-push-action@v6
with:
context: .
file: ./docker/Dockerfile
push: true
tags: ghcr.io/blockscout/blockscout-${{ env.DOCKER_CHAIN_NAME }}:${{ env.RELEASE_VERSION }}-postrelease-${{ env.SHORT_SHA }}
labels: ${{ steps.setup.outputs.docker-labels }}
platforms: |
linux/amd64
linux/arm64/v8
build-args: |
BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}.+commit.${{ env.SHORT_SHA }}
RELEASE_VERSION=${{ env.RELEASE_VERSION }}
CHAIN_TYPE=berachain
- name: Build and push Docker image (indexer)
uses: docker/build-push-action@v6
with:
context: .
file: ./docker/Dockerfile
push: true
tags: ghcr.io/blockscout/blockscout-${{ env.DOCKER_CHAIN_NAME }}:${{ env.RELEASE_VERSION }}-postrelease-${{ env.SHORT_SHA }}-indexer
labels: ${{ steps.setup.outputs.docker-labels }}
platforms: |
linux/amd64
linux/arm64/v8
build-args: |
DISABLE_API=true
BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}.+commit.${{ env.SHORT_SHA }}
RELEASE_VERSION=${{ env.RELEASE_VERSION }}
CHAIN_TYPE=berachain

View File

@ -0,0 +1,61 @@
name: Celo Publish Docker image
on:
workflow_dispatch:
push:
branches:
- production-celo
jobs:
push_to_registry:
name: Push Docker image to GitHub Container Registry
runs-on: ubuntu-latest
env:
RELEASE_VERSION: 8.0.2
DOCKER_CHAIN_NAME: celo
API_GRAPHQL_MAX_COMPLEXITY: 10400
steps:
- uses: actions/checkout@v4
- name: Setup repo
uses: ./.github/actions/setup-repo
id: setup
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
docker-remote-multi-platform: true
docker-arm-host: ${{ secrets.ARM_RUNNER_HOSTNAME }}
docker-arm-host-key: ${{ secrets.ARM_RUNNER_KEY }}
- name: Build and push Docker image for CELO (indexer + API)
uses: docker/build-push-action@v6
with:
context: .
file: ./docker/Dockerfile
push: true
tags: ghcr.io/blockscout/blockscout-${{ env.DOCKER_CHAIN_NAME }}:${{ env.RELEASE_VERSION }}-postrelease-${{ env.SHORT_SHA }}
labels: ${{ steps.setup.outputs.docker-labels }}
platforms: |
linux/amd64
linux/arm64/v8
build-args: |
API_GRAPHQL_MAX_COMPLEXITY=${{ env.API_GRAPHQL_MAX_COMPLEXITY }}
BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}.+commit.${{ env.SHORT_SHA }}
RELEASE_VERSION=${{ env.RELEASE_VERSION }}
CHAIN_TYPE=${{ env.DOCKER_CHAIN_NAME }}
- name: Build and push Docker image for CELO (indexer)
uses: docker/build-push-action@v6
with:
context: .
file: ./docker/Dockerfile
push: true
tags: ghcr.io/blockscout/blockscout-${{ env.DOCKER_CHAIN_NAME }}:${{ env.RELEASE_VERSION }}-postrelease-${{ env.SHORT_SHA }}-indexer
labels: ${{ steps.setup.outputs.docker-labels }}
platforms: |
linux/amd64
linux/arm64/v8
build-args: |
API_GRAPHQL_MAX_COMPLEXITY=${{ env.API_GRAPHQL_MAX_COMPLEXITY }}
DISABLE_API=true
BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}.+commit.${{ env.SHORT_SHA }}
RELEASE_VERSION=${{ env.RELEASE_VERSION }}
CHAIN_TYPE=${{ env.DOCKER_CHAIN_NAME }}

View File

@ -0,0 +1,39 @@
name: POA Core Publish Docker image
on:
workflow_dispatch:
push:
branches:
- production-core
jobs:
push_to_registry:
name: Push Docker image to GitHub Container Registry
runs-on: ubuntu-latest
env:
RELEASE_VERSION: 8.0.2
DOCKER_CHAIN_NAME: poa
steps:
- uses: actions/checkout@v4
- name: Setup repo
uses: ./.github/actions/setup-repo
id: setup
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
docker-remote-multi-platform: true
docker-arm-host: ${{ secrets.ARM_RUNNER_HOSTNAME }}
docker-arm-host-key: ${{ secrets.ARM_RUNNER_KEY }}
- name: Build and push Docker image
uses: docker/build-push-action@v6
with:
context: .
file: ./docker/Dockerfile
push: true
tags: ghcr.io/blockscout/blockscout-${{ env.DOCKER_CHAIN_NAME }}:latest, ghcr.io/blockscout/blockscout-${{ env.DOCKER_CHAIN_NAME }}:${{ env.RELEASE_VERSION }}-postrelease-${{ env.SHORT_SHA }}
labels: ${{ steps.setup.outputs.docker-labels }}
platforms: |
linux/amd64
linux/arm64/v8
build-args: |
BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}.+commit.${{ env.SHORT_SHA }}
RELEASE_VERSION=${{ env.RELEASE_VERSION }}

View File

@ -0,0 +1,57 @@
name: ETH Sepolia Publish Docker image
on:
workflow_dispatch:
push:
branches:
- production-eth-sepolia
jobs:
push_to_registry:
name: Push Docker image to GitHub Container Registry
runs-on: ubuntu-latest
env:
RELEASE_VERSION: 8.0.2
DOCKER_CHAIN_NAME: eth-sepolia
steps:
- uses: actions/checkout@v4
- name: Setup repo
uses: ./.github/actions/setup-repo
id: setup
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
docker-remote-multi-platform: true
docker-arm-host: ${{ secrets.ARM_RUNNER_HOSTNAME }}
docker-arm-host-key: ${{ secrets.ARM_RUNNER_KEY }}
- name: Build and push Docker image (indexer + API)
uses: docker/build-push-action@v6
with:
context: .
file: ./docker/Dockerfile
push: true
tags: ghcr.io/blockscout/blockscout-${{ env.DOCKER_CHAIN_NAME }}:latest, ghcr.io/blockscout/blockscout-${{ env.DOCKER_CHAIN_NAME }}:${{ env.RELEASE_VERSION }}-postrelease-${{ env.SHORT_SHA }}
labels: ${{ steps.setup.outputs.docker-labels }}
platforms: |
linux/amd64
linux/arm64/v8
build-args: |
BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}.+commit.${{ env.SHORT_SHA }}
RELEASE_VERSION=${{ env.RELEASE_VERSION }}
CHAIN_TYPE=ethereum
- name: Build and push Docker image (indexer)
uses: docker/build-push-action@v6
with:
context: .
file: ./docker/Dockerfile
push: true
tags: ghcr.io/blockscout/blockscout-${{ env.DOCKER_CHAIN_NAME }}:${{ env.RELEASE_VERSION }}-postrelease-${{ env.SHORT_SHA }}-indexer
labels: ${{ steps.setup.outputs.docker-labels }}
platforms: |
linux/amd64
linux/arm64/v8
build-args: |
DISABLE_API=true
BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}.+commit.${{ env.SHORT_SHA }}
RELEASE_VERSION=${{ env.RELEASE_VERSION }}
CHAIN_TYPE=ethereum

View File

@ -0,0 +1,57 @@
name: ETH Publish Docker image
on:
workflow_dispatch:
push:
branches:
- production-eth
jobs:
push_to_registry:
name: Push Docker image to GitHub Container Registry
runs-on: ubuntu-latest
env:
RELEASE_VERSION: 8.0.2
DOCKER_CHAIN_NAME: ethereum
steps:
- uses: actions/checkout@v4
- name: Setup repo
uses: ./.github/actions/setup-repo
id: setup
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
docker-remote-multi-platform: true
docker-arm-host: ${{ secrets.ARM_RUNNER_HOSTNAME }}
docker-arm-host-key: ${{ secrets.ARM_RUNNER_KEY }}
- name: Build and push Docker image (indexer + API)
uses: docker/build-push-action@v6
with:
context: .
file: ./docker/Dockerfile
push: true
tags: ghcr.io/blockscout/blockscout-${{ env.DOCKER_CHAIN_NAME }}:${{ env.RELEASE_VERSION }}-postrelease-${{ env.SHORT_SHA }}
labels: ${{ steps.setup.outputs.docker-labels }}
platforms: |
linux/amd64
linux/arm64/v8
build-args: |
BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}.+commit.${{ env.SHORT_SHA }}
RELEASE_VERSION=${{ env.RELEASE_VERSION }}
CHAIN_TYPE=ethereum
- name: Build and push Docker image (indexer)
uses: docker/build-push-action@v6
with:
context: .
file: ./docker/Dockerfile
push: true
tags: ghcr.io/blockscout/blockscout-${{ env.DOCKER_CHAIN_NAME }}:${{ env.RELEASE_VERSION }}-postrelease-${{ env.SHORT_SHA }}-indexer
labels: ${{ steps.setup.outputs.docker-labels }}
platforms: |
linux/amd64
linux/arm64/v8
build-args: |
DISABLE_API=true
BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}.+commit.${{ env.SHORT_SHA }}
RELEASE_VERSION=${{ env.RELEASE_VERSION }}
CHAIN_TYPE=ethereum

View File

@ -0,0 +1,56 @@
name: Publish Docker image for specific chain branches
on:
push:
branches:
- production-filecoin
jobs:
push_to_registry:
name: Push Docker image to GitHub Container Registry
runs-on: ubuntu-latest
env:
RELEASE_VERSION: 8.0.2
DOCKER_CHAIN_NAME: filecoin
steps:
- uses: actions/checkout@v4
- name: Setup repo
uses: ./.github/actions/setup-repo
id: setup
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
docker-remote-multi-platform: true
docker-arm-host: ${{ secrets.ARM_RUNNER_HOSTNAME }}
docker-arm-host-key: ${{ secrets.ARM_RUNNER_KEY }}
- name: Build and push Docker image for Filecoin (indexer + API)
uses: docker/build-push-action@v6
with:
context: .
file: ./docker/Dockerfile
push: true
tags: ghcr.io/blockscout/blockscout-${{ env.DOCKER_CHAIN_NAME }}:${{ env.RELEASE_VERSION }}-postrelease-${{ env.SHORT_SHA }}
labels: ${{ steps.setup.outputs.docker-labels }}
platforms: |
linux/amd64
linux/arm64/v8
build-args: |
BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}.+commit.${{ env.SHORT_SHA }}
RELEASE_VERSION=${{ env.RELEASE_VERSION }}
CHAIN_TYPE=${{ env.DOCKER_CHAIN_NAME }}
- name: Build and push Docker image for Filecoin (indexer)
uses: docker/build-push-action@v6
with:
context: .
file: ./docker/Dockerfile
push: true
tags: ghcr.io/blockscout/blockscout-${{ env.DOCKER_CHAIN_NAME }}:${{ env.RELEASE_VERSION }}-postrelease-${{ env.SHORT_SHA }}-indexer
labels: ${{ steps.setup.outputs.docker-labels }}
platforms: |
linux/amd64
linux/arm64/v8
build-args: |
DISABLE_API=true
BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}.+commit.${{ env.SHORT_SHA }}
RELEASE_VERSION=${{ env.RELEASE_VERSION }}
CHAIN_TYPE=${{ env.DOCKER_CHAIN_NAME }}

View File

@ -0,0 +1,40 @@
name: Fuse Publish Docker image
on:
workflow_dispatch:
push:
branches:
- production-fuse
jobs:
push_to_registry:
name: Push Docker image to GitHub Container Registry
runs-on: ubuntu-latest
env:
RELEASE_VERSION: 8.0.2
DOCKER_CHAIN_NAME: fuse
steps:
- uses: actions/checkout@v4
- name: Setup repo
uses: ./.github/actions/setup-repo
id: setup
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
docker-remote-multi-platform: true
docker-arm-host: ${{ secrets.ARM_RUNNER_HOSTNAME }}
docker-arm-host-key: ${{ secrets.ARM_RUNNER_KEY }}
- name: Build and push Docker image
uses: docker/build-push-action@v6
with:
context: .
file: ./docker/Dockerfile
push: true
tags: ghcr.io/blockscout/blockscout-${{ env.DOCKER_CHAIN_NAME }}:${{ env.RELEASE_VERSION }}-postrelease-${{ env.SHORT_SHA }}
labels: ${{ steps.setup.outputs.docker-labels }}
platforms: |
linux/amd64
linux/arm64/v8
build-args: |
BRIDGED_TOKENS_ENABLED=true
BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}.+commit.${{ env.SHORT_SHA }}
RELEASE_VERSION=${{ env.RELEASE_VERSION }}

View File

@ -0,0 +1,59 @@
name: Gnosis Chain Publish Docker image
on:
workflow_dispatch:
push:
branches:
- production-xdai
jobs:
push_to_registry:
name: Push Docker image to GitHub Container Registry
runs-on: ubuntu-latest
env:
RELEASE_VERSION: 8.0.2
DOCKER_CHAIN_NAME: xdai
steps:
- uses: actions/checkout@v4
- name: Setup repo
uses: ./.github/actions/setup-repo
id: setup
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
docker-remote-multi-platform: true
docker-arm-host: ${{ secrets.ARM_RUNNER_HOSTNAME }}
docker-arm-host-key: ${{ secrets.ARM_RUNNER_KEY }}
- name: Build and push Docker image (indexer + API)
uses: docker/build-push-action@v6
with:
context: .
file: ./docker/Dockerfile
push: true
tags: ghcr.io/blockscout/blockscout-${{ env.DOCKER_CHAIN_NAME }}:${{ env.RELEASE_VERSION }}-postrelease-${{ env.SHORT_SHA }}
labels: ${{ steps.setup.outputs.docker-labels }}
platforms: |
linux/amd64
linux/arm64/v8
build-args: |
BRIDGED_TOKENS_ENABLED=true
BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}.+commit.${{ env.SHORT_SHA }}
RELEASE_VERSION=${{ env.RELEASE_VERSION }}
CHAIN_TYPE=ethereum
- name: Build and push Docker image (indexer)
uses: docker/build-push-action@v6
with:
context: .
file: ./docker/Dockerfile
push: true
tags: ghcr.io/blockscout/blockscout-${{ env.DOCKER_CHAIN_NAME }}:${{ env.RELEASE_VERSION }}-postrelease-${{ env.SHORT_SHA }}-indexer
labels: ${{ steps.setup.outputs.docker-labels }}
platforms: |
linux/amd64
linux/arm64/v8
build-args: |
BRIDGED_TOKENS_ENABLED=true
DISABLE_API=true
BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}.+commit.${{ env.SHORT_SHA }}
RELEASE_VERSION=${{ env.RELEASE_VERSION }}
CHAIN_TYPE=ethereum

View File

@ -0,0 +1,39 @@
name: L2 staging Publish Docker image
on:
workflow_dispatch:
push:
branches:
- staging-l2
jobs:
push_to_registry:
name: Push Docker image to GitHub Container Registry
runs-on: ubuntu-latest
env:
RELEASE_VERSION: 8.0.2
DOCKER_CHAIN_NAME: optimism-l2-advanced
steps:
- uses: actions/checkout@v4
- name: Setup repo
uses: ./.github/actions/setup-repo
id: setup
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
docker-remote-multi-platform: true
docker-arm-host: ${{ secrets.ARM_RUNNER_HOSTNAME }}
docker-arm-host-key: ${{ secrets.ARM_RUNNER_KEY }}
- name: Build and push Docker image
uses: docker/build-push-action@v6
with:
context: .
file: ./docker/Dockerfile
push: true
tags: ghcr.io/blockscout/blockscout-${{ env.DOCKER_CHAIN_NAME }}:latest, ghcr.io/blockscout/blockscout-${{ env.DOCKER_CHAIN_NAME }}:${{ env.RELEASE_VERSION }}-postrelease-${{ env.SHORT_SHA }}
labels: ${{ steps.setup.outputs.docker-labels }}
platforms: |
linux/amd64
linux/arm64/v8
build-args: |
BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}.+commit.${{ env.SHORT_SHA }}
RELEASE_VERSION=${{ env.RELEASE_VERSION }}

View File

@ -0,0 +1,39 @@
name: LUKSO Publish Docker image
on:
workflow_dispatch:
push:
branches:
- production-lukso
jobs:
push_to_registry:
name: Push Docker image to GitHub Container Registry
runs-on: ubuntu-latest
env:
RELEASE_VERSION: 8.0.2
DOCKER_CHAIN_NAME: lukso
steps:
- uses: actions/checkout@v4
- name: Setup repo
uses: ./.github/actions/setup-repo
id: setup
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
docker-remote-multi-platform: true
docker-arm-host: ${{ secrets.ARM_RUNNER_HOSTNAME }}
docker-arm-host-key: ${{ secrets.ARM_RUNNER_KEY }}
- name: Build and push Docker image
uses: docker/build-push-action@v6
with:
context: .
file: ./docker/Dockerfile
push: true
tags: ghcr.io/blockscout/blockscout-${{ env.DOCKER_CHAIN_NAME }}:latest, ghcr.io/blockscout/blockscout-${{ env.DOCKER_CHAIN_NAME }}:${{ env.RELEASE_VERSION }}-postrelease-${{ env.SHORT_SHA }}
labels: ${{ steps.setup.outputs.docker-labels }}
platforms: |
linux/amd64
linux/arm64/v8
build-args: |
BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}.+commit.${{ env.SHORT_SHA }}
RELEASE_VERSION=${{ env.RELEASE_VERSION }}

View File

@ -0,0 +1,58 @@
name: Optimism Publish Docker image
on:
workflow_dispatch:
push:
branches:
- production-optimism
jobs:
push_to_registry:
name: Push Docker image to GitHub Container Registry
runs-on: ubuntu-latest
env:
RELEASE_VERSION: 8.0.2
DOCKER_CHAIN_NAME: optimism
steps:
- uses: actions/checkout@v4
- name: Setup repo
uses: ./.github/actions/setup-repo
id: setup
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
docker-remote-multi-platform: true
docker-arm-host: ${{ secrets.ARM_RUNNER_HOSTNAME }}
docker-arm-host-key: ${{ secrets.ARM_RUNNER_KEY }}
- name: Build and push Docker image (indexer + API)
uses: docker/build-push-action@v6
with:
context: .
file: ./docker/Dockerfile
push: true
tags: ghcr.io/blockscout/blockscout-${{ env.DOCKER_CHAIN_NAME }}:${{ env.RELEASE_VERSION }}-postrelease-${{ env.SHORT_SHA }}
labels: ${{ steps.setup.outputs.docker-labels }}
platforms: |
linux/amd64
linux/arm64/v8
build-args: |
BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}.+commit.${{ env.SHORT_SHA }}
RELEASE_VERSION=${{ env.RELEASE_VERSION }}
CHAIN_TYPE=optimism
- name: Build and push Docker image (indexer)
uses: docker/build-push-action@v6
with:
context: .
file: ./docker/Dockerfile
push: true
tags: ghcr.io/blockscout/blockscout-${{ env.DOCKER_CHAIN_NAME }}:${{ env.RELEASE_VERSION }}-postrelease-${{ env.SHORT_SHA }}-indexer
labels: ${{ steps.setup.outputs.docker-labels }}
platforms: |
linux/amd64
linux/arm64/v8
build-args: |
DISABLE_API=true
BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}.+commit.${{ env.SHORT_SHA }}
RELEASE_VERSION=${{ env.RELEASE_VERSION }}
CHAIN_TYPE=optimism

View File

@ -0,0 +1,40 @@
name: Rootstock Publish Docker image
on:
workflow_dispatch:
push:
branches:
- production-rsk
jobs:
push_to_registry:
name: Push Docker image to GitHub Container Registry
runs-on: ubuntu-latest
env:
RELEASE_VERSION: 8.0.2
DOCKER_CHAIN_NAME: rsk
steps:
- uses: actions/checkout@v4
- name: Setup repo
uses: ./.github/actions/setup-repo
id: setup
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
docker-remote-multi-platform: true
docker-arm-host: ${{ secrets.ARM_RUNNER_HOSTNAME }}
docker-arm-host-key: ${{ secrets.ARM_RUNNER_KEY }}
- name: Build and push Docker image
uses: docker/build-push-action@v6
with:
context: .
file: ./docker/Dockerfile
push: true
tags: ghcr.io/blockscout/blockscout-${{ env.DOCKER_CHAIN_NAME }}:${{ env.RELEASE_VERSION }}-postrelease-${{ env.SHORT_SHA }}
labels: ${{ steps.setup.outputs.docker-labels }}
platforms: |
linux/amd64
linux/arm64/v8
build-args: |
BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}.+commit.${{ env.SHORT_SHA }}
RELEASE_VERSION=${{ env.RELEASE_VERSION }}
CHAIN_TYPE=rsk

View File

@ -0,0 +1,58 @@
name: Scroll Publish Docker image
on:
workflow_dispatch:
push:
branches:
- production-scroll
jobs:
push_to_registry:
name: Push Docker image to GitHub Container Registry
runs-on: ubuntu-latest
env:
RELEASE_VERSION: 8.0.2
DOCKER_CHAIN_NAME: scroll
steps:
- uses: actions/checkout@v4
- name: Setup repo
uses: ./.github/actions/setup-repo
id: setup
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
docker-remote-multi-platform: true
docker-arm-host: ${{ secrets.ARM_RUNNER_HOSTNAME }}
docker-arm-host-key: ${{ secrets.ARM_RUNNER_KEY }}
- name: Build and push Docker image (indexer + API)
uses: docker/build-push-action@v6
with:
context: .
file: ./docker/Dockerfile
push: true
tags: ghcr.io/blockscout/blockscout-${{ env.DOCKER_CHAIN_NAME }}:${{ env.RELEASE_VERSION }}-postrelease-${{ env.SHORT_SHA }}
labels: ${{ steps.setup.outputs.docker-labels }}
platforms: |
linux/amd64
linux/arm64/v8
build-args: |
BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}.+commit.${{ env.SHORT_SHA }}
RELEASE_VERSION=${{ env.RELEASE_VERSION }}
CHAIN_TYPE=scroll
- name: Build and push Docker image (indexer)
uses: docker/build-push-action@v6
with:
context: .
file: ./docker/Dockerfile
push: true
tags: ghcr.io/blockscout/blockscout-${{ env.DOCKER_CHAIN_NAME }}:${{ env.RELEASE_VERSION }}-postrelease-${{ env.SHORT_SHA }}-indexer
labels: ${{ steps.setup.outputs.docker-labels }}
platforms: |
linux/amd64
linux/arm64/v8
build-args: |
DISABLE_API=true
BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}.+commit.${{ env.SHORT_SHA }}
RELEASE_VERSION=${{ env.RELEASE_VERSION }}
CHAIN_TYPE=scroll

View File

@ -0,0 +1,43 @@
name: SUAVE Publish Docker image
on:
workflow_dispatch:
push:
branches:
- production-suave
env:
OTP_VERSION: ${{ vars.OTP_VERSION }}
ELIXIR_VERSION: ${{ vars.ELIXIR_VERSION }}
jobs:
push_to_registry:
name: Push Docker image to GitHub Container Registry
runs-on: ubuntu-latest
env:
RELEASE_VERSION: 8.0.2
DOCKER_CHAIN_NAME: suave
steps:
- uses: actions/checkout@v4
- name: Setup repo
uses: ./.github/actions/setup-repo
id: setup
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
docker-remote-multi-platform: true
docker-arm-host: ${{ secrets.ARM_RUNNER_HOSTNAME }}
docker-arm-host-key: ${{ secrets.ARM_RUNNER_KEY }}
- name: Build and push Docker image
uses: docker/build-push-action@v6
with:
context: .
file: ./docker/Dockerfile
push: true
tags: ghcr.io/blockscout/blockscout-${{ env.DOCKER_CHAIN_NAME }}:${{ env.RELEASE_VERSION }}-postrelease-${{ env.SHORT_SHA }}
labels: ${{ steps.setup.outputs.docker-labels }}
platforms: |
linux/amd64
linux/arm64/v8
build-args: |
BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}.+commit.${{ env.SHORT_SHA }}
RELEASE_VERSION=${{ env.RELEASE_VERSION }}
CHAIN_TYPE=suave

View File

@ -0,0 +1,40 @@
name: Zetachain publish Docker image
on:
workflow_dispatch:
push:
branches:
- production-zetachain
jobs:
push_to_registry:
name: Push Docker image to GitHub Container Registry
runs-on: ubuntu-latest
env:
RELEASE_VERSION: 8.0.2
DOCKER_CHAIN_NAME: zetachain
steps:
- uses: actions/checkout@v4
- name: Setup repo
uses: ./.github/actions/setup-repo
id: setup
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
docker-remote-multi-platform: true
docker-arm-host: ${{ secrets.ARM_RUNNER_HOSTNAME }}
docker-arm-host-key: ${{ secrets.ARM_RUNNER_KEY }}
- name: Build and push Docker image
uses: docker/build-push-action@v6
with:
context: .
file: ./docker/Dockerfile
push: true
tags: ghcr.io/blockscout/blockscout-${{ env.DOCKER_CHAIN_NAME }}:${{ env.RELEASE_VERSION }}-postrelease-${{ env.SHORT_SHA }}
labels: ${{ steps.setup.outputs.docker-labels }}
platforms: |
linux/amd64
linux/arm64/v8
build-args: |
BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}.+commit.${{ env.SHORT_SHA }}
RELEASE_VERSION=${{ env.RELEASE_VERSION }}
CHAIN_TYPE=zetachain

View File

@ -0,0 +1,59 @@
name: Zilliqa publish Docker image
on:
workflow_dispatch:
push:
branches:
- production-zilliqa
jobs:
push_to_registry:
name: Push Docker image to GitHub Container Registry
runs-on: ubuntu-latest
env:
RELEASE_VERSION: 8.0.2
DOCKER_CHAIN_NAME: zilliqa
steps:
- uses: actions/checkout@v4
- name: Setup repo
uses: ./.github/actions/setup-repo
id: setup
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
docker-remote-multi-platform: true
docker-arm-host: ${{ secrets.ARM_RUNNER_HOSTNAME }}
docker-arm-host-key: ${{ secrets.ARM_RUNNER_KEY }}
- name: Build and push Docker image (indexer + API)
uses: docker/build-push-action@v6
with:
context: .
file: ./docker/Dockerfile
push: true
tags: ghcr.io/blockscout/blockscout-${{ env.DOCKER_CHAIN_NAME }}:${{ env.RELEASE_VERSION }}-postrelease-${{ env.SHORT_SHA }}
labels: ${{ steps.setup.outputs.docker-labels }}
platforms: |
linux/amd64
linux/arm64/v8
build-args: |
BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}.+commit.${{ env.SHORT_SHA }}
RELEASE_VERSION=${{ env.RELEASE_VERSION }}
CHAIN_TYPE=${{ env.DOCKER_CHAIN_NAME }}
- name: Build and push Docker image (indexer)
uses: docker/build-push-action@v6
with:
context: .
file: ./docker/Dockerfile
push: true
tags: ghcr.io/blockscout/blockscout-${{ env.DOCKER_CHAIN_NAME }}:${{ env.RELEASE_VERSION }}-postrelease-${{ env.SHORT_SHA }}-indexer
labels: ${{ steps.setup.outputs.docker-labels }}
platforms: |
linux/amd64
linux/arm64/v8
build-args: |
DISABLE_API=true
BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}.+commit.${{ env.SHORT_SHA }}
RELEASE_VERSION=${{ env.RELEASE_VERSION }}
CHAIN_TYPE=${{ env.DOCKER_CHAIN_NAME }}

View File

@ -0,0 +1,58 @@
name: Zkevm publish Docker image
on:
workflow_dispatch:
push:
branches:
- production-zkevm
jobs:
push_to_registry:
name: Push Docker image to GitHub Container Registry
runs-on: ubuntu-latest
env:
RELEASE_VERSION: 8.0.2
DOCKER_CHAIN_NAME: zkevm
steps:
- uses: actions/checkout@v4
- name: Setup repo
uses: ./.github/actions/setup-repo
id: setup
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
docker-remote-multi-platform: true
docker-arm-host: ${{ secrets.ARM_RUNNER_HOSTNAME }}
docker-arm-host-key: ${{ secrets.ARM_RUNNER_KEY }}
- name: Build and push Docker image (indexer + API)
uses: docker/build-push-action@v6
with:
context: .
file: ./docker/Dockerfile
push: true
tags: ghcr.io/blockscout/blockscout-${{ env.DOCKER_CHAIN_NAME }}:${{ env.RELEASE_VERSION }}-postrelease-${{ env.SHORT_SHA }}
labels: ${{ steps.setup.outputs.docker-labels }}
platforms: |
linux/amd64
linux/arm64/v8
build-args: |
BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}.+commit.${{ env.SHORT_SHA }}
RELEASE_VERSION=${{ env.RELEASE_VERSION }}
CHAIN_TYPE=polygon_zkevm
- name: Build and push Docker image (indexer)
uses: docker/build-push-action@v6
with:
context: .
file: ./docker/Dockerfile
push: true
tags: ghcr.io/blockscout/blockscout-${{ env.DOCKER_CHAIN_NAME }}:${{ env.RELEASE_VERSION }}-postrelease-${{ env.SHORT_SHA }}-indexer
labels: ${{ steps.setup.outputs.docker-labels }}
platforms: |
linux/amd64
linux/arm64/v8
build-args: |
DISABLE_API=true
BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}.+commit.${{ env.SHORT_SHA }}
RELEASE_VERSION=${{ env.RELEASE_VERSION }}
CHAIN_TYPE=polygon_zkevm

View File

@ -0,0 +1,57 @@
name: Zksync publish Docker image
on:
push:
branches:
- production-zksync
jobs:
push_to_registry:
name: Push Docker image to GitHub Container Registry
runs-on: ubuntu-latest
env:
RELEASE_VERSION: 8.0.2
DOCKER_CHAIN_NAME: zksync
steps:
- uses: actions/checkout@v4
- name: Setup repo
uses: ./.github/actions/setup-repo
id: setup
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
docker-remote-multi-platform: true
docker-arm-host: ${{ secrets.ARM_RUNNER_HOSTNAME }}
docker-arm-host-key: ${{ secrets.ARM_RUNNER_KEY }}
- name: Build and push Docker image (indexer + API)
uses: docker/build-push-action@v6
with:
context: .
file: ./docker/Dockerfile
push: true
tags: ghcr.io/blockscout/blockscout-${{ env.DOCKER_CHAIN_NAME }}:${{ env.RELEASE_VERSION }}-postrelease-${{ env.SHORT_SHA }}
labels: ${{ steps.setup.outputs.docker-labels }}
platforms: |
linux/amd64
linux/arm64/v8
build-args: |
BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}.+commit.${{ env.SHORT_SHA }}
RELEASE_VERSION=${{ env.RELEASE_VERSION }}
CHAIN_TYPE=zksync
- name: Build and push Docker image (indexer)
uses: docker/build-push-action@v6
with:
context: .
file: ./docker/Dockerfile
push: true
tags: ghcr.io/blockscout/blockscout-${{ env.DOCKER_CHAIN_NAME }}:${{ env.RELEASE_VERSION }}-postrelease-${{ env.SHORT_SHA }}-indexer
labels: ${{ steps.setup.outputs.docker-labels }}
platforms: |
linux/amd64
linux/arm64/v8
build-args: |
DISABLE_API=true
BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}.+commit.${{ env.SHORT_SHA }}
RELEASE_VERSION=${{ env.RELEASE_VERSION }}
CHAIN_TYPE=zksync

View File

@ -0,0 +1,52 @@
name: Publish Docker image to staging on demand
on:
workflow_dispatch:
push:
branches:
- staging
paths-ignore:
- 'CHANGELOG.md'
- '**/README.md'
- 'docker-compose/*'
env:
OTP_VERSION: ${{ vars.OTP_VERSION }}
ELIXIR_VERSION: ${{ vars.ELIXIR_VERSION }}
RELEASE_VERSION: 8.0.2
jobs:
push_to_registry:
name: Push Docker image to GitHub Container Registry
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Setup repo
uses: ./.github/actions/setup-repo
id: setup
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
docker-remote-multi-platform: true
docker-arm-host: ${{ secrets.ARM_RUNNER_HOSTNAME }}
docker-arm-host-key: ${{ secrets.ARM_RUNNER_KEY }}
- name: Build and push Docker image
uses: docker/build-push-action@v6
with:
context: .
file: ./docker/Dockerfile
push: true
cache-from: type=registry,ref=ghcr.io/blockscout/blockscout:buildcache
cache-to: type=registry,ref=ghcr.io/blockscout/blockscout:buildcache,mode=max
tags: ghcr.io/blockscout/blockscout-staging:latest, ghcr.io/blockscout/blockscout-staging:${{ env.RELEASE_VERSION }}.commit.${{ env.SHORT_SHA }}
labels: ${{ steps.setup.outputs.docker-labels }}
platforms: |
linux/amd64
linux/arm64/v8
build-args: |
DECODE_NOT_A_CONTRACT_CALLS=false
MIXPANEL_URL=
MIXPANEL_TOKEN=
AMPLITUDE_URL=
AMPLITUDE_API_KEY=
BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}.+commit.${{ env.SHORT_SHA }}
RELEASE_VERSION=${{ env.RELEASE_VERSION }}

View File

@ -0,0 +1,67 @@
name: Publish regular Docker image on demand
on:
workflow_dispatch:
env:
OTP_VERSION: ${{ vars.OTP_VERSION }}
ELIXIR_VERSION: ${{ vars.ELIXIR_VERSION }}
RELEASE_VERSION: 8.0.2
jobs:
push_to_registry:
name: Push Docker image to GitHub Container Registry
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Setup repo
uses: ./.github/actions/setup-repo
id: setup
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
docker-remote-multi-platform: true
docker-arm-host: ${{ secrets.ARM_RUNNER_HOSTNAME }}
docker-arm-host-key: ${{ secrets.ARM_RUNNER_KEY }}
- name: Build and push Docker image (indexer + API)
uses: docker/build-push-action@v6
with:
context: .
file: ./docker/Dockerfile
push: true
cache-from: type=registry,ref=ghcr.io/blockscout/blockscout:buildcache
cache-to: type=registry,ref=ghcr.io/blockscout/blockscout:buildcache,mode=max
tags: ghcr.io/blockscout/blockscout:${{ env.RELEASE_VERSION }}.commit.${{ env.SHORT_SHA }}
labels: ${{ steps.setup.outputs.docker-labels }}
platforms: |
linux/amd64
linux/arm64/v8
build-args: |
DECODE_NOT_A_CONTRACT_CALLS=false
MIXPANEL_URL=
MIXPANEL_TOKEN=
AMPLITUDE_URL=
AMPLITUDE_API_KEY=
BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}.+commit.${{ env.SHORT_SHA }}
RELEASE_VERSION=${{ env.RELEASE_VERSION }}
- name: Build and push Docker image (indexer)
uses: docker/build-push-action@v6
with:
context: .
file: ./docker/Dockerfile
push: true
tags: ghcr.io/blockscout/blockscout:${{ env.RELEASE_VERSION }}.commit.${{ env.SHORT_SHA }}-indexer
labels: ${{ steps.setup.outputs.docker-labels }}
platforms: |
linux/amd64
linux/arm64/v8
build-args: |
DISABLE_API=true
DECODE_NOT_A_CONTRACT_CALLS=false
MIXPANEL_URL=
MIXPANEL_TOKEN=
AMPLITUDE_URL=
AMPLITUDE_API_KEY=
BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}.+commit.${{ env.SHORT_SHA }}
RELEASE_VERSION=${{ env.RELEASE_VERSION }}

View File

@ -0,0 +1,60 @@
name: Release for Arbitrum
on:
workflow_dispatch:
release:
types: [published]
env:
OTP_VERSION: ${{ vars.OTP_VERSION }}
ELIXIR_VERSION: ${{ vars.ELIXIR_VERSION }}
jobs:
push_to_registry:
name: Push Docker image to GitHub Container Registry
runs-on: ubuntu-latest
env:
RELEASE_VERSION: 8.0.2
steps:
- uses: actions/checkout@v4
- name: Setup repo
uses: ./.github/actions/setup-repo
id: setup
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
docker-remote-multi-platform: true
docker-arm-host: ${{ secrets.ARM_RUNNER_HOSTNAME }}
docker-arm-host-key: ${{ secrets.ARM_RUNNER_KEY }}
- name: Build and push Docker image for Arbitrum (indexer + API)
uses: docker/build-push-action@v6
with:
context: .
file: ./docker/Dockerfile
push: true
tags: ghcr.io/blockscout/blockscout-arbitrum:latest, ghcr.io/blockscout/blockscout-arbitrum:${{ env.RELEASE_VERSION }}
labels: ${{ steps.setup.outputs.docker-labels }}
platforms: |
linux/amd64
linux/arm64/v8
build-args: |
BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}
RELEASE_VERSION=${{ env.RELEASE_VERSION }}
CHAIN_TYPE=arbitrum
- name: Build and push Docker image for Arbitrum (indexer)
uses: docker/build-push-action@v6
with:
context: .
file: ./docker/Dockerfile
push: true
tags: ghcr.io/blockscout/blockscout-arbitrum:${{ env.RELEASE_VERSION }}-indexer
labels: ${{ steps.setup.outputs.docker-labels }}
platforms: |
linux/amd64
linux/arm64/v8
build-args: |
DISABLE_API=true
BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}
RELEASE_VERSION=${{ env.RELEASE_VERSION }}
CHAIN_TYPE=arbitrum

View File

@ -0,0 +1,60 @@
name: Release for Berachain
on:
workflow_dispatch:
release:
types: [published]
env:
OTP_VERSION: ${{ vars.OTP_VERSION }}
ELIXIR_VERSION: ${{ vars.ELIXIR_VERSION }}
jobs:
push_to_registry:
name: Push Docker image to GitHub Container Registry
runs-on: ubuntu-latest
env:
RELEASE_VERSION: 8.0.2
steps:
- uses: actions/checkout@v4
- name: Setup repo
uses: ./.github/actions/setup-repo
id: setup
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
docker-remote-multi-platform: true
docker-arm-host: ${{ secrets.ARM_RUNNER_HOSTNAME }}
docker-arm-host-key: ${{ secrets.ARM_RUNNER_KEY }}
- name: Build and push Docker image for Ethereum (indexer + API)
uses: docker/build-push-action@v6
with:
context: .
file: ./docker/Dockerfile
push: true
tags: ghcr.io/blockscout/blockscout-berachain:latest, ghcr.io/blockscout/blockscout-berachain:${{ env.RELEASE_VERSION }}
labels: ${{ steps.setup.outputs.docker-labels }}
platforms: |
linux/amd64
linux/arm64/v8
build-args: |
BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}
RELEASE_VERSION=${{ env.RELEASE_VERSION }}
CHAIN_TYPE=berachain
- name: Build and push Docker image for Ethereum (indexer)
uses: docker/build-push-action@v6
with:
context: .
file: ./docker/Dockerfile
push: true
tags: ghcr.io/blockscout/blockscout-berachain:${{ env.RELEASE_VERSION }}-indexer
labels: ${{ steps.setup.outputs.docker-labels }}
platforms: |
linux/amd64
linux/arm64/v8
build-args: |
DISABLE_API=true
BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}
RELEASE_VERSION=${{ env.RELEASE_VERSION }}
CHAIN_TYPE=berachain

View File

@ -0,0 +1,64 @@
name: Release for Celo
on:
workflow_dispatch:
release:
types: [published]
env:
OTP_VERSION: ${{ vars.OTP_VERSION }}
ELIXIR_VERSION: ${{ vars.ELIXIR_VERSION }}
jobs:
push_to_registry:
name: Push Docker image to GitHub Container Registry
runs-on: ubuntu-latest
env:
RELEASE_VERSION: 8.0.2
API_GRAPHQL_MAX_COMPLEXITY: 10400
steps:
- uses: actions/checkout@v4
- name: Setup repo
uses: ./.github/actions/setup-repo
id: setup
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
docker-remote-multi-platform: true
docker-arm-host: ${{ secrets.ARM_RUNNER_HOSTNAME }}
docker-arm-host-key: ${{ secrets.ARM_RUNNER_KEY }}
- name: Build and push Docker image for CELO (indexer + API)
uses: docker/build-push-action@v6
with:
context: .
file: ./docker/Dockerfile
push: true
tags: ghcr.io/blockscout/blockscout-celo:latest, ghcr.io/blockscout/blockscout-celo:${{ env.RELEASE_VERSION }}
labels: ${{ steps.setup.outputs.docker-labels }}
platforms: |
linux/amd64
linux/arm64/v8
build-args: |
API_GRAPHQL_MAX_COMPLEXITY=${{ env.API_GRAPHQL_MAX_COMPLEXITY }}
BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}
RELEASE_VERSION=${{ env.RELEASE_VERSION }}
CHAIN_TYPE=celo
- name: Build and push Docker image for CELO (indexer)
uses: docker/build-push-action@v6
with:
context: .
file: ./docker/Dockerfile
push: true
tags: ghcr.io/blockscout/blockscout-celo:${{ env.RELEASE_VERSION }}-indexer
labels: ${{ steps.setup.outputs.docker-labels }}
platforms: |
linux/amd64
linux/arm64/v8
build-args: |
API_GRAPHQL_MAX_COMPLEXITY=${{ env.API_GRAPHQL_MAX_COMPLEXITY }}
DISABLE_API=true
BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}
RELEASE_VERSION=${{ env.RELEASE_VERSION }}
CHAIN_TYPE=celo

View File

@ -0,0 +1,61 @@
name: Release for Ethereum
on:
workflow_dispatch:
release:
types: [published]
env:
OTP_VERSION: ${{ vars.OTP_VERSION }}
ELIXIR_VERSION: ${{ vars.ELIXIR_VERSION }}
jobs:
push_to_registry:
name: Push Docker image to GitHub Container Registry
runs-on: ubuntu-latest
env:
RELEASE_VERSION: 8.0.2
steps:
- uses: actions/checkout@v4
- name: Setup repo
uses: ./.github/actions/setup-repo
id: setup
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
docker-remote-multi-platform: true
docker-arm-host: ${{ secrets.ARM_RUNNER_HOSTNAME }}
docker-arm-host-key: ${{ secrets.ARM_RUNNER_KEY }}
- name: Build and push Docker image for Ethereum (indexer + API)
uses: docker/build-push-action@v6
with:
context: .
file: ./docker/Dockerfile
push: true
tags: ghcr.io/blockscout/blockscout-ethereum:latest, ghcr.io/blockscout/blockscout-ethereum:${{ env.RELEASE_VERSION }}
labels: ${{ steps.setup.outputs.docker-labels }}
platforms: |
linux/amd64
linux/arm64/v8
build-args: |
BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}
RELEASE_VERSION=${{ env.RELEASE_VERSION }}
CHAIN_TYPE=ethereum
- name: Build and push Docker image for Ethereum (indexer)
uses: docker/build-push-action@v6
with:
context: .
file: ./docker/Dockerfile
push: true
tags: ghcr.io/blockscout/blockscout-ethereum:${{ env.RELEASE_VERSION }}-indexer
labels: ${{ steps.setup.outputs.docker-labels }}
platforms: |
linux/amd64
linux/arm64/v8
build-args: |
DISABLE_API=true
BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}
RELEASE_VERSION=${{ env.RELEASE_VERSION }}
CHAIN_TYPE=ethereum

View File

@ -0,0 +1,60 @@
name: Release for Filecoin
on:
workflow_dispatch:
release:
types: [published]
env:
OTP_VERSION: ${{ vars.OTP_VERSION }}
ELIXIR_VERSION: ${{ vars.ELIXIR_VERSION }}
jobs:
push_to_registry:
name: Push Docker image to GitHub Container Registry
runs-on: ubuntu-latest
env:
RELEASE_VERSION: 8.0.2
steps:
- uses: actions/checkout@v4
- name: Setup repo
uses: ./.github/actions/setup-repo
id: setup
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
docker-remote-multi-platform: true
docker-arm-host: ${{ secrets.ARM_RUNNER_HOSTNAME }}
docker-arm-host-key: ${{ secrets.ARM_RUNNER_KEY }}
- name: Build and push Docker image for Filecoin (indexer + API)
uses: docker/build-push-action@v6
with:
context: .
file: ./docker/Dockerfile
push: true
tags: ghcr.io/blockscout/blockscout-filecoin:latest, ghcr.io/blockscout/blockscout-filecoin:${{ env.RELEASE_VERSION }}
labels: ${{ steps.setup.outputs.docker-labels }}
platforms: |
linux/amd64
linux/arm64/v8
build-args: |
BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}
RELEASE_VERSION=${{ env.RELEASE_VERSION }}
CHAIN_TYPE=filecoin
- name: Build and push Docker image for Filecoin (indexer)
uses: docker/build-push-action@v6
with:
context: .
file: ./docker/Dockerfile
push: true
tags: ghcr.io/blockscout/blockscout-filecoin:${{ env.RELEASE_VERSION }}-indexer
labels: ${{ steps.setup.outputs.docker-labels }}
platforms: |
linux/amd64
linux/arm64/v8
build-args: |
DISABLE_API=true
BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}
RELEASE_VERSION=${{ env.RELEASE_VERSION }}
CHAIN_TYPE=filecoin

View File

@ -0,0 +1,60 @@
name: Release for Fuse
on:
workflow_dispatch:
release:
types: [published]
env:
OTP_VERSION: ${{ vars.OTP_VERSION }}
ELIXIR_VERSION: ${{ vars.ELIXIR_VERSION }}
jobs:
push_to_registry:
name: Push Docker image to GitHub Container Registry
runs-on: ubuntu-latest
env:
RELEASE_VERSION: 8.0.2
steps:
- uses: actions/checkout@v4
- name: Setup repo
uses: ./.github/actions/setup-repo
id: setup
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
docker-remote-multi-platform: true
docker-arm-host: ${{ secrets.ARM_RUNNER_HOSTNAME }}
docker-arm-host-key: ${{ secrets.ARM_RUNNER_KEY }}
- name: Build and push Docker image for Fuse (indexer + API)
uses: docker/build-push-action@v6
with:
context: .
file: ./docker/Dockerfile
push: true
tags: ghcr.io/blockscout/blockscout-fuse:latest, ghcr.io/blockscout/blockscout-fuse:${{ env.RELEASE_VERSION }}
labels: ${{ steps.setup.outputs.docker-labels }}
platforms: |
linux/amd64
linux/arm64/v8
build-args: |
BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}
RELEASE_VERSION=${{ env.RELEASE_VERSION }}
BRIDGED_TOKENS_ENABLED=true
- name: Build and push Docker image for Fuse (indexer)
uses: docker/build-push-action@v6
with:
context: .
file: ./docker/Dockerfile
push: true
tags: ghcr.io/blockscout/blockscout-fuse:${{ env.RELEASE_VERSION }}-indexer
labels: ${{ steps.setup.outputs.docker-labels }}
platforms: |
linux/amd64
linux/arm64/v8
build-args: |
DISABLE_API=true
BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}
RELEASE_VERSION=${{ env.RELEASE_VERSION }}
BRIDGED_TOKENS_ENABLED=true

View File

@ -0,0 +1,62 @@
name: Release for Gnosis Chain
on:
workflow_dispatch:
release:
types: [published]
env:
OTP_VERSION: ${{ vars.OTP_VERSION }}
ELIXIR_VERSION: ${{ vars.ELIXIR_VERSION }}
jobs:
push_to_registry:
name: Push Docker image to GitHub Container Registry
runs-on: ubuntu-latest
env:
RELEASE_VERSION: 8.0.2
steps:
- uses: actions/checkout@v4
- name: Setup repo
uses: ./.github/actions/setup-repo
id: setup
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
docker-remote-multi-platform: true
docker-arm-host: ${{ secrets.ARM_RUNNER_HOSTNAME }}
docker-arm-host-key: ${{ secrets.ARM_RUNNER_KEY }}
- name: Build and push Docker image for Gnosis chain (indexer + API)
uses: docker/build-push-action@v6
with:
context: .
file: ./docker/Dockerfile
push: true
tags: ghcr.io/blockscout/blockscout-xdai:latest, ghcr.io/blockscout/blockscout-xdai:${{ env.RELEASE_VERSION }}
labels: ${{ steps.setup.outputs.docker-labels }}
platforms: |
linux/amd64
linux/arm64/v8
build-args: |
BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}
RELEASE_VERSION=${{ env.RELEASE_VERSION }}
BRIDGED_TOKENS_ENABLED=true
CHAIN_TYPE=ethereum
- name: Build and push Docker image for Gnosis chain (indexer)
uses: docker/build-push-action@v6
with:
context: .
file: ./docker/Dockerfile
push: true
tags: ghcr.io/blockscout/blockscout-xdai:${{ env.RELEASE_VERSION }}-indexer
labels: ${{ steps.setup.outputs.docker-labels }}
platforms: |
linux/amd64
linux/arm64/v8
build-args: |
DISABLE_API=true
BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}
RELEASE_VERSION=${{ env.RELEASE_VERSION }}
BRIDGED_TOKENS_ENABLED=true
CHAIN_TYPE=ethereum

View File

@ -0,0 +1,60 @@
name: Release for Optimism
on:
workflow_dispatch:
release:
types: [published]
env:
OTP_VERSION: ${{ vars.OTP_VERSION }}
ELIXIR_VERSION: ${{ vars.ELIXIR_VERSION }}
jobs:
push_to_registry:
name: Push Docker image to GitHub Container Registry
runs-on: ubuntu-latest
env:
RELEASE_VERSION: 8.0.2
steps:
- uses: actions/checkout@v4
- name: Setup repo
uses: ./.github/actions/setup-repo
id: setup
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
docker-remote-multi-platform: true
docker-arm-host: ${{ secrets.ARM_RUNNER_HOSTNAME }}
docker-arm-host-key: ${{ secrets.ARM_RUNNER_KEY }}
- name: Build and push Docker image for Optimism (indexer + API)
uses: docker/build-push-action@v6
with:
context: .
file: ./docker/Dockerfile
push: true
tags: ghcr.io/blockscout/blockscout-optimism:latest, ghcr.io/blockscout/blockscout-optimism:${{ env.RELEASE_VERSION }}
labels: ${{ steps.setup.outputs.docker-labels }}
platforms: |
linux/amd64
linux/arm64/v8
build-args: |
BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}
RELEASE_VERSION=${{ env.RELEASE_VERSION }}
CHAIN_TYPE=optimism
- name: Build and push Docker image for Optimism (indexer)
uses: docker/build-push-action@v6
with:
context: .
file: ./docker/Dockerfile
push: true
tags: ghcr.io/blockscout/blockscout-optimism:${{ env.RELEASE_VERSION }}-indexer
labels: ${{ steps.setup.outputs.docker-labels }}
platforms: |
linux/amd64
linux/arm64/v8
build-args: |
DISABLE_API=true
BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}
RELEASE_VERSION=${{ env.RELEASE_VERSION }}
CHAIN_TYPE=optimism

View File

@ -0,0 +1,61 @@
name: Release for Polygon zkEVM
on:
workflow_dispatch:
release:
types: [published]
env:
OTP_VERSION: ${{ vars.OTP_VERSION }}
ELIXIR_VERSION: ${{ vars.ELIXIR_VERSION }}
jobs:
push_to_registry:
name: Push Docker image to GitHub Container Registry
runs-on: ubuntu-latest
env:
RELEASE_VERSION: 8.0.2
steps:
- uses: actions/checkout@v4
- name: Setup repo
uses: ./.github/actions/setup-repo
id: setup
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
docker-remote-multi-platform: true
docker-arm-host: ${{ secrets.ARM_RUNNER_HOSTNAME }}
docker-arm-host-key: ${{ secrets.ARM_RUNNER_KEY }}
- name: Build and push Docker image for Polygon zkEVM (indexer + API)
uses: docker/build-push-action@v6
with:
context: .
file: ./docker/Dockerfile
push: true
tags: ghcr.io/blockscout/blockscout-zkevm:latest, ghcr.io/blockscout/blockscout-zkevm:${{ env.RELEASE_VERSION }}
labels: ${{ steps.setup.outputs.docker-labels }}
platforms: |
linux/amd64
linux/arm64/v8
build-args: |
BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}
RELEASE_VERSION=${{ env.RELEASE_VERSION }}
CHAIN_TYPE=polygon_zkevm
- name: Build and push Docker image for Polygon zkEVM (indexer)
uses: docker/build-push-action@v6
with:
context: .
file: ./docker/Dockerfile
push: true
tags: ghcr.io/blockscout/blockscout-zkevm:${{ env.RELEASE_VERSION }}-indexer
labels: ${{ steps.setup.outputs.docker-labels }}
platforms: |
linux/amd64
linux/arm64/v8
build-args: |
DISABLE_API=true
BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}
RELEASE_VERSION=${{ env.RELEASE_VERSION }}
CHAIN_TYPE=polygon_zkevm

View File

@ -0,0 +1,61 @@
name: Release for Rootstock
on:
workflow_dispatch:
release:
types: [published]
env:
OTP_VERSION: ${{ vars.OTP_VERSION }}
ELIXIR_VERSION: ${{ vars.ELIXIR_VERSION }}
jobs:
push_to_registry:
name: Push Docker image to GitHub Container Registry
runs-on: ubuntu-latest
env:
RELEASE_VERSION: 8.0.2
steps:
- uses: actions/checkout@v4
- name: Setup repo
uses: ./.github/actions/setup-repo
id: setup
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
docker-remote-multi-platform: true
docker-arm-host: ${{ secrets.ARM_RUNNER_HOSTNAME }}
docker-arm-host-key: ${{ secrets.ARM_RUNNER_KEY }}
- name: Build and push Docker image for Rootstock (indexer + API)
uses: docker/build-push-action@v6
with:
context: .
file: ./docker/Dockerfile
push: true
tags: ghcr.io/blockscout/blockscout-rsk:latest, ghcr.io/blockscout/blockscout-rsk:${{ env.RELEASE_VERSION }}
labels: ${{ steps.setup.outputs.docker-labels }}
platforms: |
linux/amd64
linux/arm64/v8
build-args: |
BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}
RELEASE_VERSION=${{ env.RELEASE_VERSION }}
CHAIN_TYPE=rsk
- name: Build and push Docker image for Rootstock (indexer)
uses: docker/build-push-action@v6
with:
context: .
file: ./docker/Dockerfile
push: true
tags: ghcr.io/blockscout/blockscout-rsk:${{ env.RELEASE_VERSION }}-indexer
labels: ${{ steps.setup.outputs.docker-labels }}
platforms: |
linux/amd64
linux/arm64/v8
build-args: |
DISABLE_API=true
BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}
RELEASE_VERSION=${{ env.RELEASE_VERSION }}
CHAIN_TYPE=rsk

View File

@ -0,0 +1,61 @@
name: Release for Scroll
on:
workflow_dispatch:
release:
types: [published]
env:
OTP_VERSION: ${{ vars.OTP_VERSION }}
ELIXIR_VERSION: ${{ vars.ELIXIR_VERSION }}
jobs:
push_to_registry:
name: Push Docker image to GitHub Container Registry
runs-on: ubuntu-latest
env:
RELEASE_VERSION: 8.0.2
steps:
- uses: actions/checkout@v4
- name: Setup repo
uses: ./.github/actions/setup-repo
id: setup
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
docker-remote-multi-platform: true
docker-arm-host: ${{ secrets.ARM_RUNNER_HOSTNAME }}
docker-arm-host-key: ${{ secrets.ARM_RUNNER_KEY }}
- name: Build and push Docker image for Scroll (indexer + API)
uses: docker/build-push-action@v6
with:
context: .
file: ./docker/Dockerfile
push: true
tags: ghcr.io/blockscout/blockscout-scroll:latest, ghcr.io/blockscout/blockscout-scroll:${{ env.RELEASE_VERSION }}
labels: ${{ steps.setup.outputs.docker-labels }}
platforms: |
linux/amd64
linux/arm64/v8
build-args: |
BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}
RELEASE_VERSION=${{ env.RELEASE_VERSION }}
CHAIN_TYPE=scroll
- name: Build and push Docker image for Scroll (indexer)
uses: docker/build-push-action@v6
with:
context: .
file: ./docker/Dockerfile
push: true
tags: ghcr.io/blockscout/blockscout-scroll:${{ env.RELEASE_VERSION }}-indexer
labels: ${{ steps.setup.outputs.docker-labels }}
platforms: |
linux/amd64
linux/arm64/v8
build-args: |
DISABLE_API=true
BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}
RELEASE_VERSION=${{ env.RELEASE_VERSION }}
CHAIN_TYPE=scroll

View File

@ -0,0 +1,60 @@
name: Release for SUAVE
on:
workflow_dispatch:
release:
types: [published]
env:
OTP_VERSION: ${{ vars.OTP_VERSION }}
ELIXIR_VERSION: ${{ vars.ELIXIR_VERSION }}
jobs:
push_to_registry:
name: Push Docker image to GitHub Container Registry
runs-on: ubuntu-latest
env:
RELEASE_VERSION: 8.0.2
steps:
- uses: actions/checkout@v4
- name: Setup repo
uses: ./.github/actions/setup-repo
id: setup
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
docker-remote-multi-platform: true
docker-arm-host: ${{ secrets.ARM_RUNNER_HOSTNAME }}
docker-arm-host-key: ${{ secrets.ARM_RUNNER_KEY }}
- name: Build and push Docker image for SUAVE (indexer + API)
uses: docker/build-push-action@v6
with:
context: .
file: ./docker/Dockerfile
push: true
tags: ghcr.io/blockscout/blockscout-suave:latest, ghcr.io/blockscout/blockscout-suave:${{ env.RELEASE_VERSION }}
labels: ${{ steps.setup.outputs.docker-labels }}
platforms: |
linux/amd64
linux/arm64/v8
build-args: |
BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}
RELEASE_VERSION=${{ env.RELEASE_VERSION }}
CHAIN_TYPE=suave
- name: Build and push Docker image for SUAVE (indexer)
uses: docker/build-push-action@v6
with:
context: .
file: ./docker/Dockerfile
push: true
tags: ghcr.io/blockscout/blockscout-suave:${{ env.RELEASE_VERSION }}-indexer
labels: ${{ steps.setup.outputs.docker-labels }}
platforms: |
linux/amd64
linux/arm64/v8
build-args: |
DISABLE_API=true
BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}
RELEASE_VERSION=${{ env.RELEASE_VERSION }}
CHAIN_TYPE=suave

View File

@ -0,0 +1,60 @@
name: Release for Zetachain
on:
workflow_dispatch:
release:
types: [published]
env:
OTP_VERSION: ${{ vars.OTP_VERSION }}
ELIXIR_VERSION: ${{ vars.ELIXIR_VERSION }}
jobs:
push_to_registry:
name: Push Docker image to GitHub Container Registry
runs-on: ubuntu-latest
env:
RELEASE_VERSION: 8.0.2
steps:
- uses: actions/checkout@v4
- name: Setup repo
uses: ./.github/actions/setup-repo
id: setup
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
docker-remote-multi-platform: true
docker-arm-host: ${{ secrets.ARM_RUNNER_HOSTNAME }}
docker-arm-host-key: ${{ secrets.ARM_RUNNER_KEY }}
- name: Build and push Docker image for Zetachain (indexer + API)
uses: docker/build-push-action@v6
with:
context: .
file: ./docker/Dockerfile
push: true
tags: ghcr.io/blockscout/blockscout-zetachain:latest, ghcr.io/blockscout/blockscout-zetachain:${{ env.RELEASE_VERSION }}
labels: ${{ steps.setup.outputs.docker-labels }}
platforms: |
linux/amd64
linux/arm64/v8
build-args: |
BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}
RELEASE_VERSION=${{ env.RELEASE_VERSION }}
CHAIN_TYPE=zetachain
- name: Build and push Docker image for Zetachain (indexer)
uses: docker/build-push-action@v6
with:
context: .
file: ./docker/Dockerfile
push: true
tags: ghcr.io/blockscout/blockscout-zetachain:${{ env.RELEASE_VERSION }}-indexer
labels: ${{ steps.setup.outputs.docker-labels }}
platforms: |
linux/amd64
linux/arm64/v8
build-args: |
DISABLE_API=true
BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}
RELEASE_VERSION=${{ env.RELEASE_VERSION }}
CHAIN_TYPE=zetachain

View File

@ -0,0 +1,62 @@
name: Release for Zilliqa
on:
workflow_dispatch:
release:
types: [published]
env:
OTP_VERSION: ${{ vars.OTP_VERSION }}
ELIXIR_VERSION: ${{ vars.ELIXIR_VERSION }}
jobs:
push_to_registry:
name: Push Docker image to GitHub Container Registry
runs-on: ubuntu-latest
env:
RELEASE_VERSION: 8.0.2
steps:
- uses: actions/checkout@v4
- name: Setup repo
uses: ./.github/actions/setup-repo
id: setup
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
docker-remote-multi-platform: true
docker-arm-host: ${{ secrets.ARM_RUNNER_HOSTNAME }}
docker-arm-host-key: ${{ secrets.ARM_RUNNER_KEY }}
- name: Build and push Docker image (indexer + API)
uses: docker/build-push-action@v6
with:
context: .
file: ./docker/Dockerfile
push: true
tags: ghcr.io/blockscout/blockscout-zilliqa:latest, ghcr.io/blockscout/blockscout-zilliqa:${{ env.RELEASE_VERSION }}
labels: ${{ steps.setup.outputs.docker-labels }}
platforms: |
linux/amd64
linux/arm64/v8
build-args: |
BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}
RELEASE_VERSION=${{ env.RELEASE_VERSION }}
CHAIN_TYPE=zilliqa
- name: Build and push Docker image (indexer)
uses: docker/build-push-action@v6
with:
context: .
file: ./docker/Dockerfile
push: true
tags: ghcr.io/blockscout/blockscout-zilliqa:${{ env.RELEASE_VERSION }}-indexer
labels: ${{ steps.setup.outputs.docker-labels }}
platforms: |
linux/amd64
linux/arm64/v8
build-args: |
DISABLE_API=true
BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}
RELEASE_VERSION=${{ env.RELEASE_VERSION }}
CHAIN_TYPE=zilliqa

View File

@ -0,0 +1,60 @@
name: Release for ZkSync
on:
workflow_dispatch:
release:
types: [published]
env:
OTP_VERSION: ${{ vars.OTP_VERSION }}
ELIXIR_VERSION: ${{ vars.ELIXIR_VERSION }}
jobs:
push_to_registry:
name: Push Docker image to GitHub Container Registry
runs-on: ubuntu-latest
env:
RELEASE_VERSION: 8.0.2
steps:
- uses: actions/checkout@v4
- name: Setup repo
uses: ./.github/actions/setup-repo
id: setup
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
docker-remote-multi-platform: true
docker-arm-host: ${{ secrets.ARM_RUNNER_HOSTNAME }}
docker-arm-host-key: ${{ secrets.ARM_RUNNER_KEY }}
- name: Build and push Docker image for ZkSync (indexer + API)
uses: docker/build-push-action@v6
with:
context: .
file: ./docker/Dockerfile
push: true
tags: ghcr.io/blockscout/blockscout-zksync:latest, ghcr.io/blockscout/blockscout-zksync:${{ env.RELEASE_VERSION }}
labels: ${{ steps.setup.outputs.docker-labels }}
platforms: |
linux/amd64
linux/arm64/v8
build-args: |
BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}
RELEASE_VERSION=${{ env.RELEASE_VERSION }}
CHAIN_TYPE=zksync
- name: Build and push Docker image for ZkSync (indexer)
uses: docker/build-push-action@v6
with:
context: .
file: ./docker/Dockerfile
push: true
tags: ghcr.io/blockscout/blockscout-zksync:${{ env.RELEASE_VERSION }}-indexer
labels: ${{ steps.setup.outputs.docker-labels }}
platforms: |
linux/amd64
linux/arm64/v8
build-args: |
DISABLE_API=true
BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}
RELEASE_VERSION=${{ env.RELEASE_VERSION }}
CHAIN_TYPE=zksync

View File

@ -0,0 +1,149 @@
name: Release
on:
workflow_dispatch:
release:
types: [published]
env:
OTP_VERSION: ${{ vars.OTP_VERSION }}
ELIXIR_VERSION: ${{ vars.ELIXIR_VERSION }}
jobs:
push_to_registry:
name: Push Docker image to GitHub Container Registry
runs-on: ubuntu-latest
env:
RELEASE_VERSION: 8.0.2
steps:
- uses: actions/checkout@v4
- name: Setup repo
uses: ./.github/actions/setup-repo
id: setup
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
docker-remote-multi-platform: true
docker-arm-host: ${{ secrets.ARM_RUNNER_HOSTNAME }}
docker-arm-host-key: ${{ secrets.ARM_RUNNER_KEY }}
- name: Build & Push Core Docker image (indexer + API)
uses: docker/build-push-action@v6
with:
context: .
file: ./docker/Dockerfile
push: true
cache-from: type=registry,ref=ghcr.io/blockscout/blockscout:buildcache
cache-to: type=registry,ref=ghcr.io/blockscout/blockscout:buildcache,mode=max
tags: ghcr.io/blockscout/blockscout:latest, ghcr.io/blockscout/blockscout:${{ env.RELEASE_VERSION }}
labels: ${{ steps.setup.outputs.docker-labels }}
platforms: |
linux/amd64
linux/arm64/v8
build-args: |
DECODE_NOT_A_CONTRACT_CALLS=false
MIXPANEL_URL=
MIXPANEL_TOKEN=
AMPLITUDE_URL=
AMPLITUDE_API_KEY=
BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}
RELEASE_VERSION=${{ env.RELEASE_VERSION }}
- name: Build & Push Core Docker image (indexer)
uses: docker/build-push-action@v6
with:
context: .
file: ./docker/Dockerfile
push: true
cache-from: type=registry,ref=ghcr.io/blockscout/blockscout:buildcache
cache-to: type=registry,ref=ghcr.io/blockscout/blockscout:buildcache,mode=max
tags: ghcr.io/blockscout/blockscout:${{ env.RELEASE_VERSION }}-indexer
labels: ${{ steps.setup.outputs.docker-labels }}
platforms: |
linux/amd64
linux/arm64/v8
build-args: |
DISABLE_API=true
DECODE_NOT_A_CONTRACT_CALLS=false
MIXPANEL_URL=
MIXPANEL_TOKEN=
AMPLITUDE_URL=
AMPLITUDE_API_KEY=
BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}
RELEASE_VERSION=${{ env.RELEASE_VERSION }}
- name: Build & Push Docker image with an old UI (indexer + API)
uses: docker/build-push-action@v6
with:
context: .
file: ./docker/oldUI.Dockerfile
push: true
cache-from: type=registry,ref=ghcr.io/blockscout/blockscout:buildcache
cache-to: type=registry,ref=ghcr.io/blockscout/blockscout:buildcache,mode=max
tags: ghcr.io/blockscout/blockscout:${{ env.RELEASE_VERSION }}-with-old-ui
labels: ${{ steps.setup.outputs.docker-labels }}
platforms: |
linux/amd64
linux/arm64/v8
build-args: |
DECODE_NOT_A_CONTRACT_CALLS=false
MIXPANEL_URL=
MIXPANEL_TOKEN=
AMPLITUDE_URL=
AMPLITUDE_API_KEY=
BLOCKSCOUT_VERSION=v${{ env.RELEASE_VERSION }}
RELEASE_VERSION=${{ env.RELEASE_VERSION }}
# - name: Send release announcement to Slack workflow
# id: slack
# uses: slackapi/slack-github-action@v1.24.0
# with:
# payload: |
# {
# "release-version": "${{ env.RELEASE_VERSION }}",
# "release-link": "https://github.com/blockscout/blockscout/releases/tag/v${{ env.RELEASE_VERSION }}"
# }
# env:
# SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
# merge-master-after-release:
# name: Merge 'master' to specific branch after release
# runs-on: ubuntu-latest
# env:
# BRANCHES: |
# production-core
# production-sokol
# production-eth-experimental
# production-eth-goerli
# production-lukso
# production-xdai
# production-polygon-supernets
# production-rsk
# production-immutable
# steps:
# - uses: actions/checkout@v4
# - name: Set Git config
# run: |
# git config --local user.email "actions@github.com"
# git config --local user.name "Github Actions"
# - name: Merge master back after release
# run: |
# git fetch --unshallow
# touch errors.txt
# for branch in $BRANCHES;
# do
# git reset --merge
# git checkout master
# git fetch origin
# echo $branch
# git ls-remote --exit-code --heads origin $branch || { echo $branch >> errors.txt; continue; }
# echo "Merge 'master' to $branch"
# git checkout $branch
# git pull || { echo $branch >> errors.txt; continue; }
# git merge --no-ff master -m "Auto-merge master back to $branch" || { echo $branch >> errors.txt; continue; }
# git push || { echo $branch >> errors.txt; continue; }
# git checkout master;
# done
# [ -s errors.txt ] && echo "There are problems with merging 'master' to branches:" || echo "Errors file is empty"
# cat errors.txt
# [ ! -s errors.txt ]

View File

@ -0,0 +1,77 @@
# App artifacts
/_build
/apps/*/cover
/apps/*/logs
/cover
/db
/deps
/doc
/*.ez
/logs
# mix dialyzer artifacts
/priv/plts
# Generated on crash by the VM
erl_crash.dump
dump.rdb
# Generated on crash by NPM
npm-debug.log
# Static artifacts
/apps/**/node_modules
# Since we are building assets from assets/,
# we ignore priv/static. You may want to comment
# this depending on your deployment strategy.
/apps/*/priv/static/
# Files matching config/*.secret.exs pattern contain sensitive
# data and you should not commit them into version control.
#
# Alternatively, you may comment the line below and commit the
# secrets files as long as you replace their contents by environment
# variables.
/apps/*/config/*.secret.exs
# Wallaby screenshots
screenshots/
# Sobelow
.sobelow
# osx
.DS_Store
dump.rdb
# mix phx.gen.cert self-signed certs for dev
/apps/block_scout_web/priv/cert
/docker-compose/services/blockscout-db-data
/docker-compose/services/stats-db-data
/docker-compose/services/redis-data
/docker-compose/services/logs
/docker-compose/tmp
.idea/
*.iml
.vscode
.cursorignore
.cursorrules
.elixir_ls
**.dec**
*.env.example
*.env.local
*.env.staging
.devcontainer/.blockscout_config*
# dets tables
queue_storage
tasks_in_progress
/dets
/temp

View File

@ -0,0 +1,13 @@
pairs:
cj: CJ Bryan; cj
dr: Doc Ritezel; doc
mo: Matt Olenick; matto
db: Derek Barnes; dgb
rdwb: Desmond Bowe; des
email:
prefix: pair
domain: ministryofvelocity.com
no_solo_prefix: true
global: true

View File

@ -0,0 +1,5 @@
repos:
- repo: https://github.com/gitleaks/gitleaks
rev: v8.17.0
hooks:
- id: gitleaks

View File

@ -0,0 +1,3 @@
elixir 1.17.3-otp-27
erlang 27.1
nodejs 20.17.0

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,73 @@
# Contributor Covenant Code of Conduct
## Our Pledge
In the interest of fostering an open and welcoming environment, we as
contributors and maintainers pledge to make participation in our project and
our community a harassment-free experience for everyone, regardless of age, body
size, disability, ethnicity, gender identity and expression, level of experience,
education, socio-economic status, nationality, personal appearance, race,
religion, or sexual identity and orientation.
## Our Standards
Examples of behavior that contributes to creating a positive environment
include:
* Using welcoming and inclusive language
* Being respectful of differing viewpoints and experiences
* Gracefully accepting constructive criticism
* Focusing on what is best for the community
* Showing empathy towards other community members
Examples of unacceptable behavior by participants include:
* The use of sexualized language or imagery and unwelcome sexual attention or
advances
* Trolling, insulting/derogatory comments, and personal or political attacks
* Public or private harassment
* Publishing others' private information, such as a physical or electronic
address, without explicit permission
* Other conduct which could reasonably be considered inappropriate in a
professional setting
## Our Responsibilities
Project maintainers are responsible for clarifying the standards of acceptable
behavior and are expected to take appropriate and fair corrective action in
response to any instances of unacceptable behavior.
Project maintainers have the right and responsibility to remove, edit, or
reject comments, commits, code, wiki edits, issues, and other contributions
that are not aligned to this Code of Conduct, or to ban temporarily or
permanently any contributor for other behaviors that they deem inappropriate,
threatening, offensive, or harmful.
## Scope
This Code of Conduct applies both within project spaces and in public spaces
when an individual is representing the project or its community. Examples of
representing a project or community include using an official project e-mail
address, posting via an official social media account, or acting as an appointed
representative at an online or offline event. Representation of a project may be
further defined and clarified by project maintainers.
## Enforcement
Instances of abusive, harassing, or otherwise unacceptable behavior may be
reported by contacting the project team at andrew@poa.network. All
complaints will be reviewed and investigated and will result in a response that
is deemed necessary and appropriate to the circumstances. The project team is
obligated to maintain confidentiality with regard to the reporter of an incident.
Further details of specific enforcement policies may be posted separately.
Project maintainers who do not follow or enforce the Code of Conduct in good
faith may face temporary or permanent repercussions as determined by other
members of the project's leadership.
## Attribution
This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4,
available at https://www.contributor-covenant.org/version/1/4/code-of-conduct.html
[homepage]: https://www.contributor-covenant.org

View File

@ -0,0 +1,259 @@
# Contribution Guidelines: What We Are Looking For
We welcome contributions that enhance the project and improve the overall quality of our codebase. While we appreciate the effort that goes into making contributions, we kindly ask that contributors focus on the following types of changes:
- Feature Enhancements: Substantial improvements or new features that add significant value to the project.
- Bug Fixes: Fixes for known bugs or issues that impact functionality.
- Documentation Improvements: Comprehensive updates to documentation that clarify usage, installation, or project structure.
- Performance Improvements: Changes that enhance the performance or efficiency of the application.
# Contributing
1. Fork it ( <https://github.com/blockscout/blockscout/fork> )
2. Create your feature branch (`git checkout -b my-new-feature`)
3. Write tests that cover your work
4. Commit your changes (`git commit -am 'Add some feature'`)
5. Push to the branch (`git push origin my-new-feature`)
6. Create a new Pull Request. The title of Pull Request should follow [Conventional Commits specification](https://www.conventionalcommits.org/en/v1.0.0/) and should start with `feat:`, `fix:`, `chore:`, `doc:`, `perf:`, `refactor:` prefix.
## General
* Commits should be one logical change that still allows all tests to pass. Prefer smaller commits if there could be two levels of logic grouping. The goal is to allow contributors in the future (including your own future self) to determine your reasoning for making changes and to allow them to cherry-pick, patch or port those changes in isolation to other branches or forks.
* If during your PR you reveal a pre-existing bug:
1. Try to isolate the bug and fix it on an independent branch and PR it first.
2. Try to fix the bug in a separate commit from other changes:
1. Commit the code in the broken state that revealed the bug originally
2. Commit the fix for the bug.
3. Continue original PR work.
## Enhancements
Enhancements cover all changes that make users lives better:
* [feature requests filed as issues](https://github.com/blockscout/blockscout/labels/enhancement) that impact end-user [contributors](https://github.com/blockscout/blockscout/labels/contributor) and [developers](https://github.com/blockscout/blockscout/labels/developer)
* changes to the [architecture](https://github.com/blockscout/blockscout/labels/architecture) that make it easier for contributors (in the GitHub sense), dev-ops, and deployers to maintain and run blockscout
## Bug Fixes
For bug fixes, whenever possible, there should be at least 2 commits:
1. A regression test commit that contains tests that demonstrate the bug and show as failing.
2. The bug fix commit that shows the regression test now passing.
This format ensures that we can run the test to reproduce the original bug without depending on the new code in the fix, which could lead to the test falsely passing.
## Incompatible Changes
Incompatible changes can arise as a side-effect of either Enhancements or Bug Fixes. During Enhancements, incompatible changes can occur because, as an example, in order to support showing end-users new data, the database schema may need to be changed and the index rebuilt from scratch. During bug fixes, incompatible changes can occur because in order to fix a bug, the schema had to change, or how certain internal APIs are called changed.
* Incompatible changes should be called out explicitly, with any steps the various user roles need to do to upgrade.
* If a schema change occurs that requires a re-index add the following to the Pull Request description:
```markdown
**NOTE**: A database reset and re-index is required
```
## Pull Request
There is a [PULL_REQUEST_TEMPLATE.md](PULL_REQUEST_TEMPLATE.md) for this repository, but since it can't fill in the title for you, please follow the following steps when opening a Pull Request before filling in the template:
* [ ] Title
* [ ] Prefix labels if you don't have permissions to set labels in the GitHub interface.
* (bug) for [bug](https://github.com/blockscout/blockscout/labels/bug) fixes
* (enhancement) for [enhancement](https://github.com/blockscout/blockscout/labels/enhancement)s
* (incompatible changes) for [incompatible changes](https://github.com/blockscout/blockscout/labels/incompatible%20changes), such a refactor that removes functionality, changes arguments, or makes something required that wasn't previously.
* [ ] Single sentence summary of change
* What was fixed for bugs
* What was added for enhancements
* What was changed for incompatible changes
See [#255](https://github.com/blockscout/blockscout/pull/255) as an example PR that uses GitHub keywords and a Changelog to explain multiple changes.
## Basic Naming Convention
When contributing to the codebase, please adhere to the following naming conventions to ensure clarity and consistency:
- Use full names for entities. Avoid abbreviations or shorthand.
- Instead of "tx" or "txn", use "transaction".
- Instead of "txs", use "transactions".
- Instead of "tx_hash" or "txn_hash", use "transaction_hash".
- Instead of "block_num", use "block_number".
- Ensure that variable names are descriptive and convey the purpose or content clearly.
- Consistent naming helps in maintaining readability and understanding of the code, especially for new contributors.
By following these conventions, we can maintain a clean and understandable codebase.
### API V2 Naming Convention
When contributing to the API v2, please adhere to the following naming conventions for response fields to ensure clarity and consistency:
- The block number should be returned as a number in the property with the name which ends with `block_number`.
- All hashes (transaction, block address etc.) should be returned as a hex string in the property which ends with `_hash`.
- Property name for aggregations like counts and sums should contain plural form of entity and `_count`, `_sum` suffix respecively, e.g. `transactions_count`, `blocks_count`, `withdrawals_sum`.
- All fields that contain the "index" suffix should be returned as numbers.
## Environment Configuration Best Practices
### Runtime vs. Compile-time Configuration
We strongly favor **runtime configuration** over compile-time configuration
whenever possible. This approach:
- Reduces the number of Docker images needed
- Increases deployment flexibility
- Simplifies maintenance and testing
When **adding** new configuration options, chain types, or **refactoring**
existing ones, please follow the decision tree below to determine the
appropriate approach:
```mermaid
flowchart TD
A[Add/Modify Configuration Option or Chain Type] --> B{Is it feature-specific behavior of a function?}
B -->|Yes| C[Use RuntimeEnvHelper or Application.get_env/3 and pattern matching]
B -->|No| D{Does it need new database tables?}
D -->|Yes| E[Create new Ecto.Repo and handle it at runtime in config_helper.ex]
D -->|No| F{Is it an API endpoint?}
F -->|Yes| G[Use chain_scope macro or CheckFeature plug]
F -->|No| H{Does it modify existing database schema?}
H -->|Yes| I[Use Compile-time configuration]
H -->|No| J[Contact us to discuss this case further]
I -->|Future Work| O[Refactor toward Runtime configuration]
```
#### Use runtime configuration and pattern matching
Anti-pattern:
```elixir
# AVOID THIS
use Utils.CompileTimeEnvHelper,
chain_type: [:explorer, :chain_type]
if @chain_type == :optimism do
def foo, do: :bar
else
def foo, do: :baz
end
```
Better approach:
```elixir
# DO THIS INSTEAD
use Utils.RuntimeEnvHelper,
chain_type: [:explorer, :chain_type]
def foo, do: chain_type() |> do_foo()
defp do_foo(:optimism), do: :bar
defp do_foo(_), do: :baz
```
#### New database tables
If your feature or chain-specific functionality requires new database tables:
1. Define a new repository module in `apps/explorer/lib/explorer/repo.ex`.
2. Add the repository to `config/config_helper.exs` in the `repos/0` function.
3. Include a runtime check to load this repo conditionally:
```elixir
# In config_helper.ex
ext_repos = [
{parse_bool_env_var("MY_FEATURE_ENABLED"), Explorer.Repo.MyFeature},
# other feature repos...
]
|> Enum.filter(&elem(&1, 0))
|> Enum.map(&elem(&1, 1))
```
This approach ensures migrations are automatically detected and applied at
runtime without requiring recompilation.
#### API endpoints
For feature-specific or chain-specific API endpoints, use one of the following
runtime approaches:
1. **For chain-specific routes**, use the `chain_scope` macro in your router:
```elixir
scope "/v2", as: :api_v2 do
chain_scope :polygon_zkevm do
get("/zkevm-batch/:batch_number", V2.TransactionController, :polygon_zkevm_batch)
end
end
```
2. **For feature-toggle endpoints**, use `CheckFeature` plug in pipelines:
```elixir
pipeline :my_feature do
plug(BlockScoutWeb.Plug.CheckFeature, feature_check: &my_feature_enabled?/0)
end
scope "/my-feature" do
pipe_through(:my_feature)
get "/data", MyFeatureController, :index
end
```
Both approaches return appropriate 404 responses when the feature is disabled or
chain type doesn't match.
#### Modifying existing database schema
If your functionality requires modifying existing database schema structures
(adding columns to shared tables, changing constraints, etc.), you currently
must use compile-time configuration. This is the **only case** where
compile-time configuration is still recommended.
```elixir
# Current approach for schema modifications
use Utils.CompileTimeEnvHelper,
chain_type: [:explorer, :chain_type]
if @chain_type == :optimism do
# Schema modifications specific to Optimism
end
```
To prepare for future runtime refactoring, isolate these schema-specific changes
as much as possible.
This limitation stems from Ecto schemas being defined at compile-time. When
different chain types need variations in shared tables (additional fields,
different constraints), these schema differences cannot be modified at runtime.
We're currently researching approaches for dynamic schema adjustment based on
runtime configuration.
For reference on which chain types still require compile-time configuration, see
the [Chain-Specific Environment
Variables](https://docs.blockscout.com/setup/env-variables/backend-envs-chain-specific)
documentation.
### Compile time Environment Variables
Before using compile-time configuration, ensure you've exhausted all runtime
alternatives by following the decision tree above. If after careful
consideration you still need to work with compile-time environment variables,
follow these guidelines:
- Always use the `Utils.CompileTimeEnvHelper` module instead of direct
`Application.compile_env/2` calls:
```elixir
# DO use this approach
use Utils.CompileTimeEnvHelper,
attribute_name: [:app, :test]
# Access the value using the module attribute
@attribute_name
# DON'T use this approach
Application.compile_env(:app, :test) # avoid direct compile_env calls
```
This approach provides faster compilation time and simplifies development and
maintenance.

View File

@ -0,0 +1,674 @@
GNU GENERAL PUBLIC LICENSE
Version 3, 29 June 2007
Copyright (C) 2007 Free Software Foundation, Inc. <http://fsf.org/>
Everyone is permitted to copy and distribute verbatim copies
of this license document, but changing it is not allowed.
Preamble
The GNU General Public License is a free, copyleft license for
software and other kinds of works.
The licenses for most software and other practical works are designed
to take away your freedom to share and change the works. By contrast,
the GNU General Public License is intended to guarantee your freedom to
share and change all versions of a program--to make sure it remains free
software for all its users. We, the Free Software Foundation, use the
GNU General Public License for most of our software; it applies also to
any other work released this way by its authors. You can apply it to
your programs, too.
When we speak of free software, we are referring to freedom, not
price. Our General Public Licenses are designed to make sure that you
have the freedom to distribute copies of free software (and charge for
them if you wish), that you receive source code or can get it if you
want it, that you can change the software or use pieces of it in new
free programs, and that you know you can do these things.
To protect your rights, we need to prevent others from denying you
these rights or asking you to surrender the rights. Therefore, you have
certain responsibilities if you distribute copies of the software, or if
you modify it: responsibilities to respect the freedom of others.
For example, if you distribute copies of such a program, whether
gratis or for a fee, you must pass on to the recipients the same
freedoms that you received. You must make sure that they, too, receive
or can get the source code. And you must show them these terms so they
know their rights.
Developers that use the GNU GPL protect your rights with two steps:
(1) assert copyright on the software, and (2) offer you this License
giving you legal permission to copy, distribute and/or modify it.
For the developers' and authors' protection, the GPL clearly explains
that there is no warranty for this free software. For both users' and
authors' sake, the GPL requires that modified versions be marked as
changed, so that their problems will not be attributed erroneously to
authors of previous versions.
Some devices are designed to deny users access to install or run
modified versions of the software inside them, although the manufacturer
can do so. This is fundamentally incompatible with the aim of
protecting users' freedom to change the software. The systematic
pattern of such abuse occurs in the area of products for individuals to
use, which is precisely where it is most unacceptable. Therefore, we
have designed this version of the GPL to prohibit the practice for those
products. If such problems arise substantially in other domains, we
stand ready to extend this provision to those domains in future versions
of the GPL, as needed to protect the freedom of users.
Finally, every program is threatened constantly by software patents.
States should not allow patents to restrict development and use of
software on general-purpose computers, but in those that do, we wish to
avoid the special danger that patents applied to a free program could
make it effectively proprietary. To prevent this, the GPL assures that
patents cannot be used to render the program non-free.
The precise terms and conditions for copying, distribution and
modification follow.
TERMS AND CONDITIONS
0. Definitions.
"This License" refers to version 3 of the GNU General Public License.
"Copyright" also means copyright-like laws that apply to other kinds of
works, such as semiconductor masks.
"The Program" refers to any copyrightable work licensed under this
License. Each licensee is addressed as "you". "Licensees" and
"recipients" may be individuals or organizations.
To "modify" a work means to copy from or adapt all or part of the work
in a fashion requiring copyright permission, other than the making of an
exact copy. The resulting work is called a "modified version" of the
earlier work or a work "based on" the earlier work.
A "covered work" means either the unmodified Program or a work based
on the Program.
To "propagate" a work means to do anything with it that, without
permission, would make you directly or secondarily liable for
infringement under applicable copyright law, except executing it on a
computer or modifying a private copy. Propagation includes copying,
distribution (with or without modification), making available to the
public, and in some countries other activities as well.
To "convey" a work means any kind of propagation that enables other
parties to make or receive copies. Mere interaction with a user through
a computer network, with no transfer of a copy, is not conveying.
An interactive user interface displays "Appropriate Legal Notices"
to the extent that it includes a convenient and prominently visible
feature that (1) displays an appropriate copyright notice, and (2)
tells the user that there is no warranty for the work (except to the
extent that warranties are provided), that licensees may convey the
work under this License, and how to view a copy of this License. If
the interface presents a list of user commands or options, such as a
menu, a prominent item in the list meets this criterion.
1. Source Code.
The "source code" for a work means the preferred form of the work
for making modifications to it. "Object code" means any non-source
form of a work.
A "Standard Interface" means an interface that either is an official
standard defined by a recognized standards body, or, in the case of
interfaces specified for a particular programming language, one that
is widely used among developers working in that language.
The "System Libraries" of an executable work include anything, other
than the work as a whole, that (a) is included in the normal form of
packaging a Major Component, but which is not part of that Major
Component, and (b) serves only to enable use of the work with that
Major Component, or to implement a Standard Interface for which an
implementation is available to the public in source code form. A
"Major Component", in this context, means a major essential component
(kernel, window system, and so on) of the specific operating system
(if any) on which the executable work runs, or a compiler used to
produce the work, or an object code interpreter used to run it.
The "Corresponding Source" for a work in object code form means all
the source code needed to generate, install, and (for an executable
work) run the object code and to modify the work, including scripts to
control those activities. However, it does not include the work's
System Libraries, or general-purpose tools or generally available free
programs which are used unmodified in performing those activities but
which are not part of the work. For example, Corresponding Source
includes interface definition files associated with source files for
the work, and the source code for shared libraries and dynamically
linked subprograms that the work is specifically designed to require,
such as by intimate data communication or control flow between those
subprograms and other parts of the work.
The Corresponding Source need not include anything that users
can regenerate automatically from other parts of the Corresponding
Source.
The Corresponding Source for a work in source code form is that
same work.
2. Basic Permissions.
All rights granted under this License are granted for the term of
copyright on the Program, and are irrevocable provided the stated
conditions are met. This License explicitly affirms your unlimited
permission to run the unmodified Program. The output from running a
covered work is covered by this License only if the output, given its
content, constitutes a covered work. This License acknowledges your
rights of fair use or other equivalent, as provided by copyright law.
You may make, run and propagate covered works that you do not
convey, without conditions so long as your license otherwise remains
in force. You may convey covered works to others for the sole purpose
of having them make modifications exclusively for you, or provide you
with facilities for running those works, provided that you comply with
the terms of this License in conveying all material for which you do
not control copyright. Those thus making or running the covered works
for you must do so exclusively on your behalf, under your direction
and control, on terms that prohibit them from making any copies of
your copyrighted material outside their relationship with you.
Conveying under any other circumstances is permitted solely under
the conditions stated below. Sublicensing is not allowed; section 10
makes it unnecessary.
3. Protecting Users' Legal Rights From Anti-Circumvention Law.
No covered work shall be deemed part of an effective technological
measure under any applicable law fulfilling obligations under article
11 of the WIPO copyright treaty adopted on 20 December 1996, or
similar laws prohibiting or restricting circumvention of such
measures.
When you convey a covered work, you waive any legal power to forbid
circumvention of technological measures to the extent such circumvention
is effected by exercising rights under this License with respect to
the covered work, and you disclaim any intention to limit operation or
modification of the work as a means of enforcing, against the work's
users, your or third parties' legal rights to forbid circumvention of
technological measures.
4. Conveying Verbatim Copies.
You may convey verbatim copies of the Program's source code as you
receive it, in any medium, provided that you conspicuously and
appropriately publish on each copy an appropriate copyright notice;
keep intact all notices stating that this License and any
non-permissive terms added in accord with section 7 apply to the code;
keep intact all notices of the absence of any warranty; and give all
recipients a copy of this License along with the Program.
You may charge any price or no price for each copy that you convey,
and you may offer support or warranty protection for a fee.
5. Conveying Modified Source Versions.
You may convey a work based on the Program, or the modifications to
produce it from the Program, in the form of source code under the
terms of section 4, provided that you also meet all of these conditions:
a) The work must carry prominent notices stating that you modified
it, and giving a relevant date.
b) The work must carry prominent notices stating that it is
released under this License and any conditions added under section
7. This requirement modifies the requirement in section 4 to
"keep intact all notices".
c) You must license the entire work, as a whole, under this
License to anyone who comes into possession of a copy. This
License will therefore apply, along with any applicable section 7
additional terms, to the whole of the work, and all its parts,
regardless of how they are packaged. This License gives no
permission to license the work in any other way, but it does not
invalidate such permission if you have separately received it.
d) If the work has interactive user interfaces, each must display
Appropriate Legal Notices; however, if the Program has interactive
interfaces that do not display Appropriate Legal Notices, your
work need not make them do so.
A compilation of a covered work with other separate and independent
works, which are not by their nature extensions of the covered work,
and which are not combined with it such as to form a larger program,
in or on a volume of a storage or distribution medium, is called an
"aggregate" if the compilation and its resulting copyright are not
used to limit the access or legal rights of the compilation's users
beyond what the individual works permit. Inclusion of a covered work
in an aggregate does not cause this License to apply to the other
parts of the aggregate.
6. Conveying Non-Source Forms.
You may convey a covered work in object code form under the terms
of sections 4 and 5, provided that you also convey the
machine-readable Corresponding Source under the terms of this License,
in one of these ways:
a) Convey the object code in, or embodied in, a physical product
(including a physical distribution medium), accompanied by the
Corresponding Source fixed on a durable physical medium
customarily used for software interchange.
b) Convey the object code in, or embodied in, a physical product
(including a physical distribution medium), accompanied by a
written offer, valid for at least three years and valid for as
long as you offer spare parts or customer support for that product
model, to give anyone who possesses the object code either (1) a
copy of the Corresponding Source for all the software in the
product that is covered by this License, on a durable physical
medium customarily used for software interchange, for a price no
more than your reasonable cost of physically performing this
conveying of source, or (2) access to copy the
Corresponding Source from a network server at no charge.
c) Convey individual copies of the object code with a copy of the
written offer to provide the Corresponding Source. This
alternative is allowed only occasionally and noncommercially, and
only if you received the object code with such an offer, in accord
with subsection 6b.
d) Convey the object code by offering access from a designated
place (gratis or for a charge), and offer equivalent access to the
Corresponding Source in the same way through the same place at no
further charge. You need not require recipients to copy the
Corresponding Source along with the object code. If the place to
copy the object code is a network server, the Corresponding Source
may be on a different server (operated by you or a third party)
that supports equivalent copying facilities, provided you maintain
clear directions next to the object code saying where to find the
Corresponding Source. Regardless of what server hosts the
Corresponding Source, you remain obligated to ensure that it is
available for as long as needed to satisfy these requirements.
e) Convey the object code using peer-to-peer transmission, provided
you inform other peers where the object code and Corresponding
Source of the work are being offered to the general public at no
charge under subsection 6d.
A separable portion of the object code, whose source code is excluded
from the Corresponding Source as a System Library, need not be
included in conveying the object code work.
A "User Product" is either (1) a "consumer product", which means any
tangible personal property which is normally used for personal, family,
or household purposes, or (2) anything designed or sold for incorporation
into a dwelling. In determining whether a product is a consumer product,
doubtful cases shall be resolved in favor of coverage. For a particular
product received by a particular user, "normally used" refers to a
typical or common use of that class of product, regardless of the status
of the particular user or of the way in which the particular user
actually uses, or expects or is expected to use, the product. A product
is a consumer product regardless of whether the product has substantial
commercial, industrial or non-consumer uses, unless such uses represent
the only significant mode of use of the product.
"Installation Information" for a User Product means any methods,
procedures, authorization keys, or other information required to install
and execute modified versions of a covered work in that User Product from
a modified version of its Corresponding Source. The information must
suffice to ensure that the continued functioning of the modified object
code is in no case prevented or interfered with solely because
modification has been made.
If you convey an object code work under this section in, or with, or
specifically for use in, a User Product, and the conveying occurs as
part of a transaction in which the right of possession and use of the
User Product is transferred to the recipient in perpetuity or for a
fixed term (regardless of how the transaction is characterized), the
Corresponding Source conveyed under this section must be accompanied
by the Installation Information. But this requirement does not apply
if neither you nor any third party retains the ability to install
modified object code on the User Product (for example, the work has
been installed in ROM).
The requirement to provide Installation Information does not include a
requirement to continue to provide support service, warranty, or updates
for a work that has been modified or installed by the recipient, or for
the User Product in which it has been modified or installed. Access to a
network may be denied when the modification itself materially and
adversely affects the operation of the network or violates the rules and
protocols for communication across the network.
Corresponding Source conveyed, and Installation Information provided,
in accord with this section must be in a format that is publicly
documented (and with an implementation available to the public in
source code form), and must require no special password or key for
unpacking, reading or copying.
7. Additional Terms.
"Additional permissions" are terms that supplement the terms of this
License by making exceptions from one or more of its conditions.
Additional permissions that are applicable to the entire Program shall
be treated as though they were included in this License, to the extent
that they are valid under applicable law. If additional permissions
apply only to part of the Program, that part may be used separately
under those permissions, but the entire Program remains governed by
this License without regard to the additional permissions.
When you convey a copy of a covered work, you may at your option
remove any additional permissions from that copy, or from any part of
it. (Additional permissions may be written to require their own
removal in certain cases when you modify the work.) You may place
additional permissions on material, added by you to a covered work,
for which you have or can give appropriate copyright permission.
Notwithstanding any other provision of this License, for material you
add to a covered work, you may (if authorized by the copyright holders of
that material) supplement the terms of this License with terms:
a) Disclaiming warranty or limiting liability differently from the
terms of sections 15 and 16 of this License; or
b) Requiring preservation of specified reasonable legal notices or
author attributions in that material or in the Appropriate Legal
Notices displayed by works containing it; or
c) Prohibiting misrepresentation of the origin of that material, or
requiring that modified versions of such material be marked in
reasonable ways as different from the original version; or
d) Limiting the use for publicity purposes of names of licensors or
authors of the material; or
e) Declining to grant rights under trademark law for use of some
trade names, trademarks, or service marks; or
f) Requiring indemnification of licensors and authors of that
material by anyone who conveys the material (or modified versions of
it) with contractual assumptions of liability to the recipient, for
any liability that these contractual assumptions directly impose on
those licensors and authors.
All other non-permissive additional terms are considered "further
restrictions" within the meaning of section 10. If the Program as you
received it, or any part of it, contains a notice stating that it is
governed by this License along with a term that is a further
restriction, you may remove that term. If a license document contains
a further restriction but permits relicensing or conveying under this
License, you may add to a covered work material governed by the terms
of that license document, provided that the further restriction does
not survive such relicensing or conveying.
If you add terms to a covered work in accord with this section, you
must place, in the relevant source files, a statement of the
additional terms that apply to those files, or a notice indicating
where to find the applicable terms.
Additional terms, permissive or non-permissive, may be stated in the
form of a separately written license, or stated as exceptions;
the above requirements apply either way.
8. Termination.
You may not propagate or modify a covered work except as expressly
provided under this License. Any attempt otherwise to propagate or
modify it is void, and will automatically terminate your rights under
this License (including any patent licenses granted under the third
paragraph of section 11).
However, if you cease all violation of this License, then your
license from a particular copyright holder is reinstated (a)
provisionally, unless and until the copyright holder explicitly and
finally terminates your license, and (b) permanently, if the copyright
holder fails to notify you of the violation by some reasonable means
prior to 60 days after the cessation.
Moreover, your license from a particular copyright holder is
reinstated permanently if the copyright holder notifies you of the
violation by some reasonable means, this is the first time you have
received notice of violation of this License (for any work) from that
copyright holder, and you cure the violation prior to 30 days after
your receipt of the notice.
Termination of your rights under this section does not terminate the
licenses of parties who have received copies or rights from you under
this License. If your rights have been terminated and not permanently
reinstated, you do not qualify to receive new licenses for the same
material under section 10.
9. Acceptance Not Required for Having Copies.
You are not required to accept this License in order to receive or
run a copy of the Program. Ancillary propagation of a covered work
occurring solely as a consequence of using peer-to-peer transmission
to receive a copy likewise does not require acceptance. However,
nothing other than this License grants you permission to propagate or
modify any covered work. These actions infringe copyright if you do
not accept this License. Therefore, by modifying or propagating a
covered work, you indicate your acceptance of this License to do so.
10. Automatic Licensing of Downstream Recipients.
Each time you convey a covered work, the recipient automatically
receives a license from the original licensors, to run, modify and
propagate that work, subject to this License. You are not responsible
for enforcing compliance by third parties with this License.
An "entity transaction" is a transaction transferring control of an
organization, or substantially all assets of one, or subdividing an
organization, or merging organizations. If propagation of a covered
work results from an entity transaction, each party to that
transaction who receives a copy of the work also receives whatever
licenses to the work the party's predecessor in interest had or could
give under the previous paragraph, plus a right to possession of the
Corresponding Source of the work from the predecessor in interest, if
the predecessor has it or can get it with reasonable efforts.
You may not impose any further restrictions on the exercise of the
rights granted or affirmed under this License. For example, you may
not impose a license fee, royalty, or other charge for exercise of
rights granted under this License, and you may not initiate litigation
(including a cross-claim or counterclaim in a lawsuit) alleging that
any patent claim is infringed by making, using, selling, offering for
sale, or importing the Program or any portion of it.
11. Patents.
A "contributor" is a copyright holder who authorizes use under this
License of the Program or a work on which the Program is based. The
work thus licensed is called the contributor's "contributor version".
A contributor's "essential patent claims" are all patent claims
owned or controlled by the contributor, whether already acquired or
hereafter acquired, that would be infringed by some manner, permitted
by this License, of making, using, or selling its contributor version,
but do not include claims that would be infringed only as a
consequence of further modification of the contributor version. For
purposes of this definition, "control" includes the right to grant
patent sublicenses in a manner consistent with the requirements of
this License.
Each contributor grants you a non-exclusive, worldwide, royalty-free
patent license under the contributor's essential patent claims, to
make, use, sell, offer for sale, import and otherwise run, modify and
propagate the contents of its contributor version.
In the following three paragraphs, a "patent license" is any express
agreement or commitment, however denominated, not to enforce a patent
(such as an express permission to practice a patent or covenant not to
sue for patent infringement). To "grant" such a patent license to a
party means to make such an agreement or commitment not to enforce a
patent against the party.
If you convey a covered work, knowingly relying on a patent license,
and the Corresponding Source of the work is not available for anyone
to copy, free of charge and under the terms of this License, through a
publicly available network server or other readily accessible means,
then you must either (1) cause the Corresponding Source to be so
available, or (2) arrange to deprive yourself of the benefit of the
patent license for this particular work, or (3) arrange, in a manner
consistent with the requirements of this License, to extend the patent
license to downstream recipients. "Knowingly relying" means you have
actual knowledge that, but for the patent license, your conveying the
covered work in a country, or your recipient's use of the covered work
in a country, would infringe one or more identifiable patents in that
country that you have reason to believe are valid.
If, pursuant to or in connection with a single transaction or
arrangement, you convey, or propagate by procuring conveyance of, a
covered work, and grant a patent license to some of the parties
receiving the covered work authorizing them to use, propagate, modify
or convey a specific copy of the covered work, then the patent license
you grant is automatically extended to all recipients of the covered
work and works based on it.
A patent license is "discriminatory" if it does not include within
the scope of its coverage, prohibits the exercise of, or is
conditioned on the non-exercise of one or more of the rights that are
specifically granted under this License. You may not convey a covered
work if you are a party to an arrangement with a third party that is
in the business of distributing software, under which you make payment
to the third party based on the extent of your activity of conveying
the work, and under which the third party grants, to any of the
parties who would receive the covered work from you, a discriminatory
patent license (a) in connection with copies of the covered work
conveyed by you (or copies made from those copies), or (b) primarily
for and in connection with specific products or compilations that
contain the covered work, unless you entered into that arrangement,
or that patent license was granted, prior to 28 March 2007.
Nothing in this License shall be construed as excluding or limiting
any implied license or other defenses to infringement that may
otherwise be available to you under applicable patent law.
12. No Surrender of Others' Freedom.
If conditions are imposed on you (whether by court order, agreement or
otherwise) that contradict the conditions of this License, they do not
excuse you from the conditions of this License. If you cannot convey a
covered work so as to satisfy simultaneously your obligations under this
License and any other pertinent obligations, then as a consequence you may
not convey it at all. For example, if you agree to terms that obligate you
to collect a royalty for further conveying from those to whom you convey
the Program, the only way you could satisfy both those terms and this
License would be to refrain entirely from conveying the Program.
13. Use with the GNU Affero General Public License.
Notwithstanding any other provision of this License, you have
permission to link or combine any covered work with a work licensed
under version 3 of the GNU Affero General Public License into a single
combined work, and to convey the resulting work. The terms of this
License will continue to apply to the part which is the covered work,
but the special requirements of the GNU Affero General Public License,
section 13, concerning interaction through a network will apply to the
combination as such.
14. Revised Versions of this License.
The Free Software Foundation may publish revised and/or new versions of
the GNU General Public License from time to time. Such new versions will
be similar in spirit to the present version, but may differ in detail to
address new problems or concerns.
Each version is given a distinguishing version number. If the
Program specifies that a certain numbered version of the GNU General
Public License "or any later version" applies to it, you have the
option of following the terms and conditions either of that numbered
version or of any later version published by the Free Software
Foundation. If the Program does not specify a version number of the
GNU General Public License, you may choose any version ever published
by the Free Software Foundation.
If the Program specifies that a proxy can decide which future
versions of the GNU General Public License can be used, that proxy's
public statement of acceptance of a version permanently authorizes you
to choose that version for the Program.
Later license versions may give you additional or different
permissions. However, no additional obligations are imposed on any
author or copyright holder as a result of your choosing to follow a
later version.
15. Disclaimer of Warranty.
THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
16. Limitation of Liability.
IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
SUCH DAMAGES.
17. Interpretation of Sections 15 and 16.
If the disclaimer of warranty and limitation of liability provided
above cannot be given local legal effect according to their terms,
reviewing courts shall apply local law that most closely approximates
an absolute waiver of all civil liability in connection with the
Program, unless a warranty or assumption of liability accompanies a
copy of the Program in return for a fee.
END OF TERMS AND CONDITIONS
How to Apply These Terms to Your New Programs
If you develop a new program, and you want it to be of the greatest
possible use to the public, the best way to achieve this is to make it
free software which everyone can redistribute and change under these terms.
To do so, attach the following notices to the program. It is safest
to attach them to the start of each source file to most effectively
state the exclusion of warranty; and each file should have at least
the "copyright" line and a pointer to where the full notice is found.
<one line to give the program's name and a brief idea of what it does.>
Copyright (C) <year> <name of author>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
Also add information on how to contact you by electronic and paper mail.
If the program does terminal interaction, make it output a short
notice like this when it starts in an interactive mode:
<program> Copyright (C) <year> <name of author>
This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
This is free software, and you are welcome to redistribute it
under certain conditions; type `show c' for details.
The hypothetical commands `show w' and `show c' should show the appropriate
parts of the General Public License. Of course, your program's commands
might be different; for a GUI interface, you would use an "about box".
You should also get your employer (if you work as a programmer) or school,
if any, to sign a "copyright disclaimer" for the program, if necessary.
For more information on this, and how to apply and follow the GNU GPL, see
<http://www.gnu.org/licenses/>.
The GNU General Public License does not permit incorporating your program
into proprietary programs. If your program is a subroutine library, you
may consider it more useful to permit linking proprietary applications with
the library. If this is what you want to do, use the GNU Lesser General
Public License instead of this License. But first, please read
<http://www.gnu.org/philosophy/why-not-lgpl.html>.

View File

@ -0,0 +1,32 @@
*[GitHub keywords to close any associated issues](https://blog.github.com/2013-05-14-closing-issues-via-pull-requests/)*
## Motivation
*Why we should merge these changes. If using GitHub keywords to close [issues](https://github.com/poanetwork/blockscout/issues), this is optional as the motivation can be read on the issue page.*
## Changelog
### Enhancements
*Things you added that don't break anything. Regression tests for Bug Fixes count as Enhancements.*
### Bug Fixes
*Things you changed that fix bugs. If it fixes a bug, but in so doing adds a new requirement, removes code, or requires a database reset and reindex, the breaking part of the change should be added to Incompatible Changes below also.*
### Incompatible Changes
*Things you broke while doing Enhancements and Bug Fixes. Breaking changes include (1) adding new requirements and (2) removing code. Renaming counts as (2) because a rename is a removal followed by an add.*
## Upgrading
*If you have any Incompatible Changes in the above Changelog, outline how users of prior versions can upgrade once this PR lands or when reviewers are testing locally. A common upgrading step is "Database reset and re-index required".*
## Checklist for your Pull Request (PR)
- [ ] If I added new functionality, I added tests covering it.
- [ ] If I fixed a bug, I added a regression test to prevent the bug from silently reappearing again.
- [ ] I checked whether I should update the docs and did so by submitting a PR to [docs repository](https://github.com/blockscout/docs).
- [ ] If I added/changed/removed ENV var, I submitted a PR to [docs repository](https://github.com/blockscout/docs) to update the list of [env vars](https://github.com/blockscout/docs/blob/master/setup/env-variables/README.md) and I updated the version to `master` in the Version column. If I removed variable, I added it to [Deprecated ENV Variables](https://github.com/blockscout/docs/blob/master/setup/env-variables/deprecated-env-variables/README.md) page. After merging docs PR, changes will be reflected in these [pages](https://docs.blockscout.com/setup/env-variables).
- [ ] If I added new DB indices, I checked, that they are not redundant, with PGHero or other tools.
- [ ] If I added/removed chain type, I modified the Github CI matrix and PR labels accordingly.

View File

@ -0,0 +1,51 @@
<h1 align="center">Blockscout</h1>
<p align="center">Blockchain Explorer for inspecting and analyzing EVM Chains.</p>
<div align="center">
[![Blockscout](https://github.com/blockscout/blockscout/actions/workflows/config.yml/badge.svg)](https://github.com/blockscout/blockscout/actions)
[![Discord](https://dcbadge.vercel.app/api/server/blockscout?style=flat)](https://discord.gg/blockscout)
</div>
Blockscout provides a comprehensive, easy-to-use interface for users to view, confirm, and inspect transactions on EVM (Ethereum Virtual Machine) blockchains. This includes Ethereum Mainnet, Ethereum Classic, Optimism, Gnosis Chain and many other **Ethereum testnets, private networks, L2s and sidechains**.
See our [project documentation](https://docs.blockscout.com/) for detailed information and setup instructions.
For questions, comments and feature requests see the [discussions section](https://github.com/blockscout/blockscout/discussions) or via [Discord](https://discord.com/invite/blockscout).
## About Blockscout
Blockscout allows users to search transactions, view accounts and balances, verify and interact with smart contracts and view and interact with applications on the Ethereum network including many forks, sidechains, L2s and testnets.
Blockscout is an open-source alternative to centralized, closed source block explorers such as Etherscan, Etherchain and others. As Ethereum sidechains and L2s continue to proliferate in both private and public settings, transparent, open-source tools are needed to analyze and validate all transactions.
## Supported Projects
Blockscout currently supports several hundred chains and rollups throughout the greater blockchain ecosystem. Ethereum, Cosmos, Polkadot, Avalanche, Near and many others include Blockscout integrations. A comprehensive list is available at [chains.blockscout.com](https://chains.blockscout.com). If your project is not listed, contact the team in [Discord](https://discord.com/invite/blockscout).
## Getting Started
See the [project documentation](https://docs.blockscout.com/) for instructions:
- [Manual deployment](https://docs.blockscout.com/for-developers/deployment/manual-deployment-guide)
- [Docker-compose deployment](https://docs.blockscout.com/for-developers/deployment/docker-compose-deployment)
- [Kubernetes deployment](https://docs.blockscout.com/for-developers/deployment/kubernetes-deployment)
- [Manual deployment (backend + old UI)](https://docs.blockscout.com/for-developers/deployment/manual-old-ui)
- [Ansible deployment](https://docs.blockscout.com/for-developers/ansible-deployment)
- [ENV variables](https://docs.blockscout.com/setup/env-variables)
- [Configuration options](https://docs.blockscout.com/for-developers/configuration-options)
## Acknowledgements
We would like to thank the EthPrize foundation for their funding support.
## Contributing
See [CONTRIBUTING.md](CONTRIBUTING.md) for contribution and pull request protocol. We expect contributors to follow our [code of conduct](CODE_OF_CONDUCT.md) when submitting code or comments.
## License
[![License: GPL v3.0](https://img.shields.io/badge/License-GPL%20v3-blue.svg)](https://www.gnu.org/licenses/gpl-3.0)
This project is licensed under the GNU General Public License v3.0. See the [LICENSE](LICENSE) file for details.

View File

@ -0,0 +1,15 @@
[
verbose: false,
private: true,
skip: true,
router: "lib/block_scout_web/router.ex",
exit: "low",
format: "compact",
ignore: ["Config.Headers", "Config.CSWH", "XSS.SendResp", "XSS.Raw"],
ignore_files: [
"apps/block_scout_web/lib/block_scout_web/routers/smart_contracts_api_v2_router.ex",
"apps/block_scout_web/lib/block_scout_web/routers/tokens_api_v2_router.ex",
"apps/block_scout_web/lib/block_scout_web/routers/utils_api_v2_router.ex",
"apps/block_scout_web/lib/block_scout_web/routers/address_badges_v2_router.ex"
]
]

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,43 @@
# BlockScout Web
BlockScoutWeb is the API and presentation layer of BlockScout built on the Phoenix framework. It exposes RESTful and GraphQL APIs for accessing blockchain data. It directs HTTP requests through Phoenix routers to controllers that manage resources like addresses, transactions, blocks, and tokens. It formats responses as JSON via view modules. It provides real-time updates on new blocks, transactions, and exchange rates using Phoenix Channels. It supports smart contract verification through multiple methods including integration with Sourcify. Custom plugs add functionalities such as rate limiting, API version checks, and logging. Configuration is retrieved from the application environment. It manages errors through fallback controllers.
## Machine Requirements
* Erlang/OTP 21+
* Elixir 1.9+
* Postgres 10.3
## Required Accounts
* Github for code storage
## Setup Instructions
### Development
To get BlockScout Web interface up and running locally:
* Setup `../explorer`
* Install Node.js dependencies with `$ cd assets && npm install && cd ..`
* Start Phoenix with `$ mix phx.server` (This can be run from this directory or the project root: the project root is recommended.)
Now you can visit [`localhost:4000`](http://localhost:4000) from your browser.
You can also run IEx (Interactive Elixir): `$ iex -S mix phx.server` (This can be run from this directory or the project root: the project root is recommended.)
### Testing
* Build the assets: `cd assets && npm run build`
* Format the Elixir code: `mix format`
* Lint the Elixir code: `mix credo --strict`
* Run the dialyzer: `mix dialyzer --halt-exit-status`
* Check the Elixir code for vulnerabilities: `mix sobelow --config`
* Update translation templates and translations and check there are no uncommitted changes: `mix gettext.extract --merge`
* Lint the JavaScript code: `cd assets && npm run eslint`
## Internationalization
The app is currently internationalized. It is only localized to U.S. English.
To translate new strings, run `$ mix gettext.extract --merge` and edit the new strings in `priv/gettext/en/LC_MESSAGES/default.po`.

View File

@ -0,0 +1,3 @@
{
"presets": [["@babel/preset-env", { "useBuiltIns": "usage", "corejs": { "version": 3 } }]]
}

View File

@ -0,0 +1,9 @@
# Javascript structure files
## lib
* This folder is used to place `component` files, that may span in multiple pages.
## pages
* This folder is used to place `page` specific files, that won't be reusable in other locations.

View File

@ -0,0 +1,4 @@
export default {
primary: "#4786ff",
secondary: "#ced4da"
}

View File

@ -0,0 +1,82 @@
/**
* @jest-environment jsdom
*/
import { asyncReducer, asyncInitialState } from '../../js/lib/async_listing_load'
describe('ELEMENTS_LOAD', () => {
test('sets only nextPagePath and ignores other keys', () => {
const state = Object.assign({}, asyncInitialState)
const action = { type: 'ELEMENTS_LOAD', nextPagePath: 'set', foo: 1 }
const output = asyncReducer(state, action)
expect(output.foo).not.toEqual(1)
expect(output.nextPagePath).toEqual('set')
})
})
describe('ADD_ITEM_KEY', () => {
test('sets itemKey to what was passed in the action', () => {
const expectedItemKey = 'expected.Key'
const state = Object.assign({}, asyncInitialState)
const action = { type: 'ADD_ITEM_KEY', itemKey: expectedItemKey }
const output = asyncReducer(state, action)
expect(output.itemKey).toEqual(expectedItemKey)
})
})
describe('START_REQUEST', () => {
test('sets loading status to true', () => {
const state = Object.assign({}, asyncInitialState, { loading: false })
const action = { type: 'START_REQUEST' }
const output = asyncReducer(state, action)
expect(output.loading).toEqual(true)
})
})
describe('REQUEST_ERROR', () => {
test('sets requestError to true', () => {
const state = Object.assign({}, asyncInitialState, { requestError: false })
const action = { type: 'REQUEST_ERROR' }
const output = asyncReducer(state, action)
expect(output.requestError).toEqual(true)
})
})
describe('FINISH_REQUEST', () => {
test('sets loading status to false', () => {
const state = Object.assign({}, asyncInitialState, {
loading: true
})
const action = { type: 'FINISH_REQUEST' }
const output = asyncReducer(state, action)
expect(output.loading).toEqual(false)
})
})
describe('ITEMS_FETCHED', () => {
test('sets the items to what was passed in the action', () => {
const expectedItems = [1, 2, 3]
const state = Object.assign({}, asyncInitialState)
const action = { type: 'ITEMS_FETCHED', items: expectedItems }
const output = asyncReducer(state, action)
expect(output.items).toEqual(expectedItems)
})
})
describe('NAVIGATE_TO_OLDER', () => {
test('sets beyondPageOne to true', () => {
const state = Object.assign({}, asyncInitialState, { beyondPageOne: false })
const action = { type: 'NAVIGATE_TO_OLDER' }
const output = asyncReducer(state, action)
expect(output.beyondPageOne).toEqual(true)
})
})

View File

@ -0,0 +1,31 @@
/**
* @jest-environment jsdom
*/
import { searchEngine } from '../../js/lib/autocomplete'
test('searchEngine', () => {
expect(searchEngine('qwe', {
'name': 'Test',
'symbol': 'TST',
'address_hash': '0x000',
'tx_hash': '0x000',
'block_hash': '0x000'
})).toEqual(undefined)
expect(searchEngine('tes', {
'name': 'Test',
'symbol': 'TST',
'address_hash': '0x000',
'tx_hash': '0x000',
'block_hash': '0x000'
})).toEqual('<div><div>0x000</div><div><b><mark class=\'autoComplete_highlight\'>Tes</mark>t</b> (TST)</div></div>')
expect(searchEngine('qwe', {
'name': 'qwe1\'"><iframe/onload=console.log(123)>${7*7}{{7*7}}{{\'7\'*\'7\'}}',
'symbol': 'qwe1\'"><iframe/onload=console.log(123)>${7*7}{{7*7}}{{\'7\'*\'7\'}}',
'address_hash': '0x000',
'tx_hash': '0x000',
'block_hash': '0x000'
})).toEqual('<div><div>0x000</div><div><b><mark class=\'autoComplete_highlight\'>qwe</mark>1&#039;&quot;&gt;&lt;iframe/onload=console.log(123)&gt;${7*7}{{7*7}}{{&#039;7&#039;*&#039;7&#039;}}</b> (<mark class=\'autoComplete_highlight\'>qwe</mark>1&#039;&quot;&gt;&lt;iframe/onload=console.log(123)&gt;${7*7}{{7*7}}{{&#039;7&#039;*&#039;7&#039;}})</div></div>')
})

View File

@ -0,0 +1,18 @@
/**
* @jest-environment jsdom
*/
import { formatUsdValue } from '../../js/lib/currency'
test('formatUsdValue', () => {
window.localized = {
'Less than': 'Less than'
}
expect(formatUsdValue(0)).toEqual('$0.00 USD')
expect(formatUsdValue(0.0000001)).toEqual('Less than $0.000001 USD')
expect(formatUsdValue(0.123456789)).toEqual('$0.123457 USD')
expect(formatUsdValue(0.1234)).toEqual('$0.123400 USD')
expect(formatUsdValue(1.23456789)).toEqual('$1.23 USD')
expect(formatUsdValue(1.2)).toEqual('$1.20 USD')
expect(formatUsdValue(123456.789)).toEqual('$123,457 USD')
})

View File

@ -0,0 +1,286 @@
/**
* @jest-environment jsdom
*/
import { prepareMethodArgs } from '../../../js/lib/smart_contract/common_helpers'
import $ from 'jquery'
const oneFieldHTML =
'<form data-function-form>' +
' <input type="hidden" name="function_name" value="convertMultiple">' +
' <input type="hidden" name="method_id" value="">' +
' <div>' +
' <input type="text" name="function_input" id="first">' +
' </div>' +
' <input type="submit" value="Write">' +
'</form>'
const twoFieldHTML =
'<form data-function-form>' +
' <input type="hidden" name="function_name" value="convertMultiple">' +
' <input type="hidden" name="method_id" value="">' +
' <div>' +
' <input type="text" name="function_input" id="first">' +
' </div>' +
' <div>' +
' <input type="text" name="function_input" id="second">' +
' </div>' +
' <input type="submit" value="Write">' +
'</form>'
test('prepare contract args | type: address', () => {
document.body.innerHTML = oneFieldHTML
var inputs = [
{
"type": "address",
"name": "arg1",
"internalType": "address"
}
]
document.getElementById('first').value = ' 0x000000000000000000 0000000000000000000000 '
const expectedValue = ['0x0000000000000000000000000000000000000000']
const $functionInputs = $('[data-function-form]').find('input[name=function_input]')
expect(prepareMethodArgs($functionInputs, inputs)).toEqual(expectedValue)
})
test('prepare contract args | type: address[]*2', () => {
document.body.innerHTML = twoFieldHTML
var inputs = [
{
"type": "address[]",
"name": "arg1",
"internalType": "address[]"
},
{
"type": "address[]",
"name": "arg2",
"internalType": "address[]"
}
]
document.getElementById('first').value = ' 0x0000000000000000000000000000000000000000 , 0x0000000000000000000000000000000000000001 '
document.getElementById('second').value = ' 0x0000000000000000000000000000000000000002 , 0x0000000000000000000000000000000000000003 '
const expectedValue = [
[
'0x0000000000000000000000000000000000000000',
'0x0000000000000000000000000000000000000001'
],
[
'0x0000000000000000000000000000000000000002',
'0x0000000000000000000000000000000000000003'
]
]
const $functionInputs = $('[data-function-form]').find('input[name=function_input]')
expect(prepareMethodArgs($functionInputs, inputs)).toEqual(expectedValue)
})
test('prepare contract args | type: string', () => {
document.body.innerHTML = oneFieldHTML
var inputs = [
{
"type": "string",
"name": "arg1",
"internalType": "string"
}
]
document.getElementById('first').value = ' 0x0000000000000000000000000000000000000000 , 0x0000000000000000000000000000000000000001 '
const expectedValue = ['0x0000000000000000000000000000000000000000 , 0x0000000000000000000000000000000000000001']
const $functionInputs = $('[data-function-form]').find('input[name=function_input]')
expect(prepareMethodArgs($functionInputs, inputs)).toEqual(expectedValue)
})
test('prepare contract args | type: string[]', () => {
document.body.innerHTML = oneFieldHTML
var inputs = [
{
"type": "string[]",
"name": "arg1",
"internalType": "string[]"
}
]
document.getElementById('first').value = ' " 0x0000000000000000000000000000000000000000 " , " 0x0000000000000000000000000000000000000001 " '
const expectedValue = [['0x0000000000000000000000000000000000000000', '0x0000000000000000000000000000000000000001']]
const $functionInputs = $('[data-function-form]').find('input[name=function_input]')
expect(prepareMethodArgs($functionInputs, inputs)).toEqual(expectedValue)
})
test('prepare contract args | type: bytes32', () => {
document.body.innerHTML = oneFieldHTML
var inputs = [
{
"type": "bytes32",
"name": "arg1",
"internalType": "bytes32"
}
]
document.getElementById('first').value = ' " 0x0000000000000000000000000000000000000000 " '
const expectedValue = ['0x0000000000000000000000000000000000000000']
const $functionInputs = $('[data-function-form]').find('input[name=function_input]')
expect(prepareMethodArgs($functionInputs, inputs)).toEqual(expectedValue)
})
test('prepare contract args | type: bytes32[]', () => {
document.body.innerHTML = oneFieldHTML
var inputs = [
{
"type": "bytes32[]",
"name": "arg1",
"internalType": "bytes32[]"
}
]
document.getElementById('first').value = ' " 0x0000000000000000000000000000000000000000 " , " 0x0000000000000000000000000000000000000001 " '
const expectedValue = [['0x0000000000000000000000000000000000000000','0x0000000000000000000000000000000000000001']]
const $functionInputs = $('[data-function-form]').find('input[name=function_input]')
expect(prepareMethodArgs($functionInputs, inputs)).toEqual(expectedValue)
})
test('prepare contract args | type: bool', () => {
document.body.innerHTML = oneFieldHTML
var inputs = [
{
"type": "bool",
"name": "arg1",
"internalType": "bool"
}
]
// cspell:ignore fals
document.getElementById('first').value = ' fals e '
const expectedValue = [false]
const $functionInputs = $('[data-function-form]').find('input[name=function_input]')
expect(prepareMethodArgs($functionInputs, inputs)).toEqual(expectedValue)
})
test('prepare contract args | type: bool[]', () => {
document.body.innerHTML = oneFieldHTML
var inputs = [
{
"type": "bool[]",
"name": "arg1",
"internalType": "bool[]"
}
]
document.getElementById('first').value = ' true , false '
const expectedValue = [[true, false]]
const $functionInputs = $('[data-function-form]').find('input[name=function_input]')
expect(prepareMethodArgs($functionInputs, inputs)).toEqual(expectedValue)
})
test('prepare contract args | type: uint256', () => {
document.body.innerHTML = oneFieldHTML
var inputs = [
{
"type": "uint256",
"name": "arg1",
"internalType": "uint256"
}
]
document.getElementById('first').value = ' 9 876 543 210 '
const expectedValue = ['9876543210']
const $functionInputs = $('[data-function-form]').find('input[name=function_input]')
expect(prepareMethodArgs($functionInputs, inputs)).toEqual(expectedValue)
})
test('prepare contract args | type: uint256[]', () => {
document.body.innerHTML = oneFieldHTML
var inputs = [
{
"type": "uint256[]",
"name": "arg1",
"internalType": "uint256[]"
}
]
document.getElementById('first').value = ' 156 000 , 10 690 000 , 59874 '
const expectedValue = [['156000', '10690000', '59874']]
const $functionInputs = $('[data-function-form]').find('input[name=function_input]')
expect(prepareMethodArgs($functionInputs, inputs)).toEqual(expectedValue)
})
test('prepare contract args | type: tuple', () => {
document.body.innerHTML = oneFieldHTML
var inputs = [
{
"type": "tuple",
"name": "mintParams",
"internalType": "struct ISynthereumLiquidityPool.MintParams",
"components": [
{
"type": "uint256",
"name": "minNumTokens",
"internalType": "uint256"
},
{
"type": "uint256",
"name": "collateralAmount",
"internalType": "uint256"
},
{
"type": "uint256",
"name": "expiration",
"internalType": "uint256"
},
{
"type": "address",
"name": "recipient",
"internalType": "address"
}
]
}
]
document.getElementById('first').value = '[0, "200000000000000000000","1672938000" ,"0xc31249BA48763dF46388BA5C4E7565d62ed4801C"]'
const expectedValue = [["0","200000000000000000000","1672938000","0xc31249BA48763dF46388BA5C4E7565d62ed4801C"]]
const $functionInputs = $('[data-function-form]').find('input[name=function_input]')
expect(prepareMethodArgs($functionInputs, inputs)).toEqual(expectedValue)
})
test('prepare contract args | type: tuple[]', () => {
document.body.innerHTML = oneFieldHTML
var inputs = [
{
"type": "tuple[]",
"name": "mintParams",
"internalType": "struct ISynthereumLiquidityPool.MintParams",
"components": [
{
"type": "uint256",
"name": "minNumTokens",
"internalType": "uint256"
},
{
"type": "address",
"name": "recipient",
"internalType": "address"
}
]
}
]
document.getElementById('first').value = '[["200000000000000000000" ,"0xc31249BA48763dF46388BA5C4E7565d62ed4801C"], ["100500" , "0x9fbaD00ae18FAe064C728E6B535a6cB950c8C40A "]]'
const expectedValue = [[["200000000000000000000","0xc31249BA48763dF46388BA5C4E7565d62ed4801C"], ["100500","0x9fbaD00ae18FAe064C728E6B535a6cB950c8C40A"]]]
const $functionInputs = $('[data-function-form]').find('input[name=function_input]')
expect(prepareMethodArgs($functionInputs, inputs)).toEqual(expectedValue)
})

View File

@ -0,0 +1,12 @@
/**
* @jest-environment jsdom
*/
import { escapeHtml } from '../../js/lib/utils'
test('escapeHtml', () => {
expect(escapeHtml('<script>')).toEqual('&lt;script&gt;')
expect(escapeHtml('1&')).toEqual('1&amp;')
expect(escapeHtml('1"')).toEqual('1&quot;')
expect(escapeHtml('1\'')).toEqual('1&#039;')
})

View File

@ -0,0 +1,66 @@
/**
* @jest-environment jsdom
*/
import { reducer, initialState } from '../../js/pages/address'
describe('RECEIVED_NEW_BLOCK', () => {
test('increases validation count', () => {
const state = Object.assign({}, initialState, { validationCount: 30 })
const action = {
type: 'RECEIVED_NEW_BLOCK',
blockHtml: 'test 2'
}
const output = reducer(state, action)
expect(output.validationCount).toEqual(31)
})
test('when channel has been disconnected does not increase validation count', () => {
const state = Object.assign({}, initialState, {
channelDisconnected: true,
validationCount: 30
})
const action = {
type: 'RECEIVED_NEW_BLOCK',
blockHtml: 'test 2'
}
const output = reducer(state, action)
expect(output.validationCount).toEqual(30)
})
})
describe('RECEIVED_NEW_TRANSACTION', () => {
test('increment the transactions count', () => {
const state = Object.assign({}, initialState, {
addressHash: "0x001",
transactionCount: 1
})
const action = {
type: 'RECEIVED_NEW_TRANSACTION',
msg: { fromAddressHash: "0x001", transactionHash: 2, transactionHtml: 'test 2' }
}
const newState = reducer(state, action)
expect(newState.transactionCount).toEqual(2)
})
test('does not increment the count if the channel is disconnected', () => {
const state = Object.assign({}, initialState, {
addressHash: "0x001",
transactionCount: 1,
channelDisconnected: true
})
const action = {
type: 'RECEIVED_NEW_TRANSACTION',
msg: { fromAddressHash: "0x001", transactionHash: 2, transactionHtml: 'test 2' }
}
const newState = reducer(state, action)
expect(newState.transactionCount).toEqual(1)
})
})

View File

@ -0,0 +1,151 @@
/**
* @jest-environment jsdom
*/
import { reducer, initialState } from '../../../js/pages/address/internal_transactions'
describe('RECEIVED_NEW_INTERNAL_TRANSACTION_BATCH', () => {
test('with new internal transaction', () => {
const state = Object.assign({}, initialState, {
items: ['test 1']
})
const action = {
type: 'RECEIVED_NEW_INTERNAL_TRANSACTION_BATCH',
msgs: [{ internalTransactionHtml: 'test 2' }]
}
const output = reducer(state, action)
expect(output.items).toEqual(['test 2', 'test 1'])
})
test('with batch of new internal transactions', () => {
const state = Object.assign({}, initialState, {
items: ['test 1']
})
const action = {
type: 'RECEIVED_NEW_INTERNAL_TRANSACTION_BATCH',
msgs: [
{ internalTransactionHtml: 'test 2' },
{ internalTransactionHtml: 'test 3' },
{ internalTransactionHtml: 'test 4' },
{ internalTransactionHtml: 'test 5' },
{ internalTransactionHtml: 'test 6' },
{ internalTransactionHtml: 'test 7' },
{ internalTransactionHtml: 'test 8' },
{ internalTransactionHtml: 'test 9' },
{ internalTransactionHtml: 'test 10' },
{ internalTransactionHtml: 'test 11' },
{ internalTransactionHtml: 'test 12' },
{ internalTransactionHtml: 'test 13' }
]
}
const output = reducer(state, action)
expect(output.items).toEqual(['test 1'])
expect(output.internalTransactionsBatch).toEqual([
'test 13',
'test 12',
'test 11',
'test 10',
'test 9',
'test 8',
'test 7',
'test 6',
'test 5',
'test 4',
'test 3',
'test 2',
])
})
test('after batch of new internal transactions', () => {
const state = Object.assign({}, initialState, {
internalTransactionsBatch: ['test 1']
})
const action = {
type: 'RECEIVED_NEW_INTERNAL_TRANSACTION_BATCH',
msgs: [
{ internalTransactionHtml: 'test 2' }
]
}
const output = reducer(state, action)
expect(output.internalTransactionsBatch).toEqual(['test 2', 'test 1'])
})
test('when channel has been disconnected', () => {
const state = Object.assign({}, initialState, {
channelDisconnected: true,
items: ['test 1']
})
const action = {
type: 'RECEIVED_NEW_INTERNAL_TRANSACTION_BATCH',
msgs: [{ internalTransactionHtml: 'test 2' }]
}
const output = reducer(state, action)
expect(output.items).toEqual(['test 1'])
})
test('beyond page one', () => {
const state = Object.assign({}, initialState, {
beyondPageOne: true,
items: ['test 1']
})
const action = {
type: 'RECEIVED_NEW_INTERNAL_TRANSACTION_BATCH',
msgs: [{ internalTransactionHtml: 'test 2' }]
}
const output = reducer(state, action)
expect(output.items).toEqual(['test 1'])
})
test('with filtered "to" internal transaction', () => {
const state = Object.assign({}, initialState, {
filter: 'to',
addressHash: '0x00',
items: []
})
const action = {
type: 'RECEIVED_NEW_INTERNAL_TRANSACTION_BATCH',
msgs: [{
fromAddressHash: '0x00',
toAddressHash: '0x01',
internalTransactionHtml: 'test 2'
},
{
fromAddressHash: '0x01',
toAddressHash: '0x00',
internalTransactionHtml: 'test 3'
}]
}
const output = reducer(state, action)
expect(output.items).toEqual(['test 3'])
})
test('with filtered "from" internal transaction', () => {
const state = Object.assign({}, initialState, {
filter: 'from',
addressHash: '0x00',
items: []
})
const action = {
type: 'RECEIVED_NEW_INTERNAL_TRANSACTION_BATCH',
msgs: [{
fromAddressHash: '0x00',
toAddressHash: '0x01',
internalTransactionHtml: 'test 2'
},
{
fromAddressHash: '0x01',
toAddressHash: '0x00',
internalTransactionHtml: 'test 3'
}]
}
const output = reducer(state, action)
expect(output.items).toEqual(['test 2'])
})
})

View File

@ -0,0 +1,128 @@
/**
* @jest-environment jsdom
*/
import { reducer, initialState } from '../../../js/pages/address/transactions'
describe('RECEIVED_NEW_TRANSACTION', () => {
test('with new transaction', () => {
const state = Object.assign({}, initialState, {
items: ['transaction html']
})
const action = {
type: 'RECEIVED_NEW_TRANSACTION',
msg: { transactionHtml: 'another transaction html' }
}
const output = reducer(state, action)
expect(output.items).toEqual([ 'another transaction html', 'transaction html' ])
})
test('when channel has been disconnected', () => {
const state = Object.assign({}, initialState, {
channelDisconnected: true,
items: ['transaction html']
})
const action = {
type: 'RECEIVED_NEW_TRANSACTION',
msg: { transactionHtml: 'another transaction html' }
}
const output = reducer(state, action)
expect(output.items).toEqual(['transaction html'])
})
test('beyond page one', () => {
const state = Object.assign({}, initialState, {
beyondPageOne: true,
items: ['transaction html']
})
const action = {
type: 'RECEIVED_NEW_TRANSACTION',
msg: { transactionHtml: 'another transaction html' }
}
const output = reducer(state, action)
expect(output.items).toEqual([ 'transaction html' ])
})
test('adds the new transaction to state even when it is filtered by to', () => {
const state = Object.assign({}, initialState, {
addressHash: '0x001',
filter: 'to',
items: []
})
const action = {
type: 'RECEIVED_NEW_TRANSACTION',
msg: {
fromAddressHash: '0x002',
transactionHtml: 'transaction html',
toAddressHash: '0x001'
}
}
const output = reducer(state, action)
expect(output.items).toEqual(['transaction html'])
})
test(
'does nothing when it is filtered by to but the toAddressHash is different from addressHash',
() => {
const state = Object.assign({}, initialState, {
addressHash: '0x001',
filter: 'to',
items: []
})
const action = {
type: 'RECEIVED_NEW_TRANSACTION',
msg: {
fromAddressHash: '0x003',
transactionHtml: 'transaction html',
toAddressHash: '0x002'
}
}
const output = reducer(state, action)
expect(output.items).toEqual([])
})
test('adds the new transaction to state even when it is filtered by from', () => {
const state = Object.assign({}, initialState, {
addressHash: '0x001',
filter: 'from',
items: []
})
const action = {
type: 'RECEIVED_NEW_TRANSACTION',
msg: {
fromAddressHash: '0x001',
transactionHtml: 'transaction html',
toAddressHash: '0x002'
}
}
const output = reducer(state, action)
expect(output.items).toEqual(['transaction html'])
})
test(
'does nothing when it is filtered by from but the fromAddressHash is different from addressHash',
() => {
const state = Object.assign({}, initialState, {
addressHash: '0x001',
filter: 'to',
items: []
})
const action = {
type: 'RECEIVED_NEW_TRANSACTION',
msg: {
addressHash: '0x001',
transactionHtml: 'transaction html',
fromAddressHash: '0x002'
}
}
const output = reducer(state, action)
expect(output.items).toEqual([])
})
})

View File

@ -0,0 +1,50 @@
/**
* @jest-environment jsdom
*/
import { reducer, initialState } from '../../../js/pages/address/validations'
describe('RECEIVED_NEW_BLOCK', () => {
test('adds new block to the top of the list', () => {
const state = Object.assign({}, initialState, {
items: ['test 1']
})
const action = {
type: 'RECEIVED_NEW_BLOCK',
blockHtml: 'test 2'
}
const output = reducer(state, action)
expect(output.items).toEqual(['test 2', 'test 1'])
})
test('does nothing beyond page one', () => {
const state = Object.assign({}, initialState, {
beyondPageOne: true,
channelDisconnected: false,
items: ['test 1']
})
const action = {
type: 'RECEIVED_NEW_BLOCK',
blockHtml: 'test 2'
}
const output = reducer(state, action)
expect(output.items).toEqual(['test 1'])
})
test('does nothing when channel has been disconnected', () => {
const state = Object.assign({}, initialState, {
channelDisconnected: true,
items: ['test 1']
})
const action = {
type: 'RECEIVED_NEW_BLOCK',
blockHtml: 'test 2'
}
const output = reducer(state, action)
expect(output.items).toEqual(['test 1'])
})
})

Some files were not shown because too many files have changed in this diff Show More